diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000000000000000000000000000000000..7a73a41bfdf76d6f793007240d80983a52f15f97 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,2 @@ +{ +} \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 7b95401dc46245ac339fc25059d4a56d90b4cde5..a6532007dabdd84c24d182e77dc5eeb12543346c 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,151 @@ ---- -license: apache-2.0 ---- +--- +license: apache-2.0 +library_name: tempo-pfn +tags: +- time-series-forecasting +- zero-shot +- rnn +- linear-rnn +- synthetic-data +- foundation-model +- automl +arxiv: 2510.25502 +--- + +# TempoPFN: Synthetic Pre-Training of Linear RNNs for Zero-Shot Time Series Forecasting + +[![arXiv](https://img.shields.io/badge/arXiv-2510.25502-b31b1b.svg)](https://arxiv.org/abs/2510.25502) [![License](https://img.shields.io/badge/License-Apache_2.0-green.svg)](https://github.com/automl/TempoPFN/blob/main/LICENSE) + +--- + +**TempoPFN** introduced in [TempoPFN: Synthetic Pre-Training of Linear RNNs for Zero-Shot Time Series Forecasting](https://arxiv.org/abs/2510.25502), is a univariate time series foundation model pretrained **entirely on synthetic data**. It delivers top-tier zero-shot forecasting accuracy while remaining fully reproducible and free from real-data leakage. + +Built on a **Linear RNN (GatedDeltaProduct)** backbone, TempoPFN performs end-to-end forecasting without patching or windowing. Its design enables fully parallelizable training and inference while maintaining stable temporal state-tracking across long sequences. The GatedDeltaProduct architecture is based on [DeltaProduct](https://arxiv.org/html/2502.10297v3), extended with state-weaving for time series forecasting. For detailed information about the architecture and custom modifications, see [`src/models/gated_deltaproduct/README.md`](src/models/gated_deltaproduct/README.md). + +This repository includes the **pretrained 38M parameter model** (`models/checkpoint_38M.pth`), all training and inference code, and the **complete synthetic data generation pipeline** used for pretraining. + +## ✨ Why TempoPFN? + +* **High Performance, No Real Data:** Achieves top-tier competitive results on **GIFT-Eval, outperforming all existing synthetic-only approaches** and **surpassing the vast majority of models trained on real-world data**. This ensures full reproducibility and eliminates benchmark leakage. +* **Parallel and Efficient:** The linear recurrence design enables full-sequence parallelization. This gives us the best of both worlds: the linear efficiency of an RNN, but with the training parallelism of a Transformer. +* **Open and Reproducible:** Includes the full synthetic data pipeline, configurations, and scripts to reproduce training from scratch. +* **State-Tracking Stability:** The GatedDeltaProduct recurrence and *state-weaving* mechanism preserve temporal continuity and information flow across long horizons, improving robustness without non-linear recurrence. + +![TempoPFN Overview](https://iili.io/KlUjfcP.png) + +## ⚙️ Installation + +> **Note on Model Weights:** This repository uses [Git LFS](https://git-lfs.github.com/) to store the model checkpoint (`.pth` file). You **must** have Git LFS installed to clone the repository correctly. +> +> ```bash +> # Install Git LFS (e.g., on Ubuntu) +> sudo apt-get install git-lfs +> git lfs install +> ``` + +1. **Clone the repository:** +```bash + git clone https://huggingface.co/AutoML-org/TempoPFN + cd TempoPFN +``` + +2. **Set up the environment:** +```bash + python -m venv venv && source venv/bin/activate + + # 1. Install PyTorch version matching your CUDA version + # Example for CUDA 12.8: + pip install torch --index-url https://download.pytorch.org/whl/cu128 + + # 2. Install TempoPFN and all other dependencies + pip install -r requirements.txt + export PYTHONPATH=$PWD +``` + +## 🚀 Quick Start: Run the Demo + +**Prerequisites:** +* You must have a **CUDA-capable GPU** with a matching PyTorch version installed. +* You have run `export PYTHONPATH=$PWD` from the repo's root directory (see Installation). + +### 1. Run the Quick Start Script + +Run a demo forecast on a synthetic sine wave. This script will automatically find and load the `models/checkpoint_38M.pth` file included in this repository. +```bash +python examples/quick_start_tempo_pfn.py +``` + +### 2. Run with a Different Checkpoint (Optional) + +If you have trained your own model, you can point the script to it: +```bash +python examples/quick_start_tempo_pfn.py --checkpoint /path/to/your/checkpoint.pth +``` + +### 3. Run the Notebook version +```bash +jupyter notebook examples/quick_start_tempo_pfn.ipynb +``` + +### Hardware & Performance Tips + +**GPU Required:** Inference requires a CUDA-capable GPU. Tested on NVIDIA A100/H100. + +**First Inference May Be Slow:** Initial calls for unseen sequence lengths trigger Triton kernel compilation. Subsequent runs are cached and fast. + +**Triton Caches:** To prevent slowdowns from writing caches to a network filesystem, route caches to a local directory (like `/tmp`) before running: +```bash +LOCAL_CACHE_BASE="${TMPDIR:-/tmp}/tsf-$(date +%s)" +mkdir -p "${LOCAL_CACHE_BASE}/triton" "${LOCAL_CACHE_BASE}/torchinductor" +export TRITON_CACHE_DIR="${LOCAL_CACHE_BASE}/triton" +export TORCHINDUCTOR_CACHE_DIR="${LOCAL_CACHE_BASE}/torchinductor" + +python examples/quick_start_tempo_pfn.py +``` + +## 🚂 Training + +### Single-GPU Training (for debugging) +```bash +torchrun --standalone --nproc_per_node=1 src/training/trainer_dist.py --config ./configs/train.yaml +``` + +### Multi-GPU Training (Single-Node) + +This example uses 8 GPUs. The training script uses PyTorch DistributedDataParallel (DDP). +```bash +torchrun --standalone --nproc_per_node=8 src/training/trainer_dist.py --config ./configs/train.yaml +``` + +### Configuration + +All training and model parameters are controlled via YAML files in `configs/` (architecture, optimizers, paths). + +## 💾 Synthetic Data Generation + +A core contribution of this work is our open-source synthetic data pipeline, located in `src/synthetic_generation/`. It combines diverse generators with a powerful augmentation cascade. + +**Generators Used:** + +* **Adapted Priors:** ForecastPFN, KernelSynth, GaussianProcess (GP), and CauKer (Structural Causal Models). +* **Novel Priors:** SDE (a flexible regime-switching Ornstein-Uhlenbeck process), Sawtooth, StepFunction, Anomaly, Spikes, SineWave, and Audio-Inspired generators (Stochastic Rhythms, Financial Volatility, Network Topology, Multi-Scale Fractals). + +You can easily generate your own data by installing the development dependencies and instantiating a generator wrapper. See `examples/generate_synthetic_data.py` for a minimal script, or inspect the generator code in `src/synthetic_generation/`. + +## 🤝 License + +This project is licensed under the Apache 2.0 License. See the LICENSE file for details. This permissive license allows for both academic and commercial use. + +## 📚 Citation + +If you find TempoPFN useful in your research, please consider citing our paper: +```bibtex +@misc{moroshan2025tempopfn, + title={TempoPFN: Synthetic Pre-training of Linear RNNs for Zero-Shot Time Series Forecasting}, + author={Vladyslav Moroshan and Julien Siems and Arber Zela and Timur Carstensen and Frank Hutter}, + year={2025}, + eprint={2510.25502}, + archivePrefix={arXiv}, + primaryClass={cs.LG} +} +``` \ No newline at end of file diff --git a/configs/example.yaml b/configs/example.yaml new file mode 100644 index 0000000000000000000000000000000000000000..667220e5c73a08e209d19a331c31cd6cc007b8de --- /dev/null +++ b/configs/example.yaml @@ -0,0 +1,119 @@ +train_data_path: null # Replace with the path to root of the training data directory with subdirectories for each generator (e.g. gp, kernel, etc.) +model_path: ./models # Path where the model will be saved +model_name: TempoPFN +continue_training: false +checkpoint_path: null # Replace with the path to the checkpoint file +seed: 2025 +wandb: true # whether to log to wandb +wandb_project_name: TempoPFNTraining +wandb_entity: university-of-freiburg-2024 +wandb_plots: false + +batch_size: 40 +num_training_iterations: 1000000 # 1M +validation_batch_size: 64 +num_validation_batches: 1 +num_workers: 4 +gradient_accumulation_enabled: true +accumulation_steps: 5 # Number of batches to accumulate before updating (effective batch size = batch_size * accumulation_steps) +log_interval: 2048 +save_every: 100000 + +generator_proportions: + forecast_pfn: 1.0 + gp: 1.0 + kernel: 1.0 + sawtooth: 1.0 + sinewave: 1.0 + step: 1.0 + anomaly: 1.0 + spike: 1.0 + cauker_univariate: 1.0 + ou_process: 3.0 + audio_financial_volatility: 0.1 + audio_multi_scale_fractal: 0.1 + audio_network_topology: 0.5 + audio_stochastic_rhythm: 0.5 + augmented_per_sample_2048: 2.0 + augmented_temp_batch_2048: 2.0 + +# Learning Rate Scheduler Configuration +lr_scheduler: cosine # Options: "warmup_stable_decay", "cosine_with_warmup", "cosine_with_restarts", "cosine" + +# Learning Rate Parameters +peak_lr: 0.0002 # 2e-4 - Peak learning rate +min_lr_ratio: 0.01 # Minimum LR as fraction of peak LR + +# WSD Scheduler Specific Parameters +warmup_ratio: 0.003 # 0.3% of total steps for warmup +stable_ratio: 0.90 # 90% of total steps at stable learning rate +decay_type: cosine # Type of decay: "cosine" or "linear" + +# Alternative Scheduler Parameters (if using different schedulers) +num_cycles: 0.5 # For cosine_with_warmup: 0.5 = half cosine wave +num_restart_cycles: 4 # For cosine_with_restarts: number of restart cycles + +# Optimizer Configuration +weight_decay: 0.01 # Weight decay for AdamW +beta1: 0.9 # Adam beta1 parameter +beta2: 0.98 # Adam beta2 parameter (optimized for transformers) +optimizer_eps: 1e-6 # Adam epsilon + +# Training Stability +gradient_clip_val: 100.0 +scaler: custom_robust + +gift_eval: + evaluate_on_gift_eval: false + max_context_length: 3072 + create_plots: false + max_plots: 5 + dataset_storage_path: null # Replace with the path to the dataset storage path + +data_augmentation: + nan_augmentation: true + scaler_augmentation: false + length_shortening: true + nan_stats_path: ./data/nan_stats.json + +augmentation_probabilities: + scaler_augmentation: 0.5 + +TimeSeriesModel: + # Core architecture + embed_size: 512 + num_encoder_layers: 10 + + # Scaling and preprocessing + scaler: custom_robust + epsilon: 0.00001 + scaler_clamp_value: null + handle_constants: false + + # Time features + K_max: 25 + time_feature_config: + use_enhanced_features: true + use_holiday_features: false + use_index_features: true + include_seasonality_info: true + + drop_enc_allow: false + encoding_dropout: 0.0 + + # Encoder configuration + encoder_config: + attn_mode: chunk + num_heads: 4 + expand_v: 1.0 + use_short_conv: true + conv_size: 32 + allow_neg_eigval: true + hidden_ratio: 1.0 + use_gate: true + use_forget_gate: true + num_householder: 4 + weaving: true + + loss_type: 'quantile' + quantiles: [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9] \ No newline at end of file diff --git a/data/dataset_properties.json b/data/dataset_properties.json new file mode 100644 index 0000000000000000000000000000000000000000..e53b8565792bfb4071eaff8c74304ab5ff63cbed --- /dev/null +++ b/data/dataset_properties.json @@ -0,0 +1,152 @@ +{ + "m4_yearly": { + "domain": "Econ/Fin", + "frequency": "A", + "num_variates": 1 + }, + "m4_quarterly": { + "domain": "Econ/Fin", + "frequency": "Q", + "num_variates": 1 + }, + "m4_monthly": { + "domain": "Econ/Fin", + "frequency": "M", + "num_variates": 1 + }, + "m4_weekly": { + "domain": "Econ/Fin", + "frequency": "W", + "num_variates": 1 + }, + "m4_daily": { + "domain": "Econ/Fin", + "frequency": "D", + "num_variates": 1 + }, + "m4_hourly": { + "domain": "Econ/Fin", + "frequency": "H", + "num_variates": 1 + }, + "electricity": { + "domain": "Energy", + "frequency": "W", + "num_variates": 1 + }, + "ett1": { + "domain": "Energy", + "frequency": "W", + "num_variates": 7 + }, + "ett2": { + "domain": "Energy", + "frequency": "W", + "num_variates": 7 + }, + "solar": { + "domain": "Energy", + "frequency": "W", + "num_variates": 1 + }, + "hospital": { + "domain": "Healthcare", + "frequency": "M", + "num_variates": 1 + }, + "covid_deaths": { + "domain": "Healthcare", + "frequency": "D", + "num_variates": 1 + }, + "us_births": { + "domain": "Healthcare", + "frequency": "M", + "num_variates": 1 + }, + "saugeen": { + "domain": "Nature", + "frequency": "M", + "num_variates": 1 + }, + "temperature_rain": { + "domain": "Nature", + "frequency": "D", + "num_variates": 1 + }, + "kdd_cup_2018": { + "domain": "Nature", + "frequency": "D", + "num_variates": 1 + }, + "jena_weather": { + "domain": "Nature", + "frequency": "D", + "num_variates": 21 + }, + "car_parts": { + "domain": "Sales", + "frequency": "M", + "num_variates": 1 + }, + "restaurant": { + "domain": "Sales", + "frequency": "D", + "num_variates": 1 + }, + "hierarchical_sales": { + "domain": "Sales", + "frequency": "W-WED", + "num_variates": 1 + }, + "loop_seattle": { + "domain": "Transport", + "frequency": "D", + "num_variates": 1 + }, + "sz_taxi": { + "domain": "Transport", + "frequency": "H", + "num_variates": 1 + }, + "m_dense": { + "domain": "Transport", + "frequency": "D", + "num_variates": 1 + }, + "bitbrains_fast_storage": { + "domain": "Web/CloudOps", + "frequency": "H", + "num_variates": 2 + }, + "bitbrains_rnd": { + "domain": "Web/CloudOps", + "frequency": "H", + "num_variates": 2 + }, + "bizitobs_application": { + "domain": "Web/CloudOps", + "frequency": "10S", + "num_variates": 2 + }, + "bizitobs_service": { + "domain": "Web/CloudOps", + "frequency": "10S", + "num_variates": 2 + }, + "bizitobs_l2c": { + "domain": "Web/CloudOps", + "frequency": "H", + "num_variates": 7 + }, + "dd_benchmark_short": { + "domain": "Web/Observability", + "frequency": "Short", + "num_variates": 32 + }, + "dd_benchmark_long": { + "domain": "Web/Observability", + "frequency": "Long", + "num_variates": 32 + } +} \ No newline at end of file diff --git a/data/nan_stats.json b/data/nan_stats.json new file mode 100644 index 0000000000000000000000000000000000000000..e1db00019a2453d0ff9bf95ccb5c2f1eb65e5b44 --- /dev/null +++ b/data/nan_stats.json @@ -0,0 +1 @@ +{"p_series_has_nan": 0.24642624405529442, "nan_ratio_distribution": [0.002717391304347826, 0.0007763975155279503, 0.0006469979296066253, 0.002976190476190476, 0.0006469979296066253, 0.14945652173913043, 0.0009057971014492754, 0.002976190476190476, 0.003105590062111801, 0.003105590062111801, 0.003105590062111801, 0.003105590062111801, 0.002846790890269151, 0.0014233954451345755, 0.00038819875776397513, 0.002717391304347826, 0.002976190476190476, 0.003105590062111801, 0.0010351966873706005, 0.0005175983436853002, 0.0007763975155279503, 0.0007763975155279503, 0.003105590062111801, 0.00038819875776397513, 0.0006469979296066253, 0.0009057971014492754, 0.002846790890269151, 0.0007763975155279503, 0.0036231884057971015, 0.14945652173913043, 0.002717391304347826, 0.0007763975155279503, 0.00038819875776397513, 0.0007763975155279503, 0.0033643892339544515, 0.0010351966873706005, 0.002458592132505176, 0.003105590062111801, 0.0033643892339544515, 0.1482919254658385, 0.002846790890269151, 0.0010351966873706005, 0.002846790890269151, 0.002976190476190476, 0.002976190476190476, 0.0005175983436853002, 0.0011645962732919255, 0.0032349896480331265, 0.003105590062111801, 0.0006469979296066253, 0.002717391304347826, 0.00038819875776397513, 0.0032349896480331265, 0.0032349896480331265, 0.002976190476190476, 0.002976190476190476, 0.0005175983436853002, 0.379787784679089, 0.6717132505175983, 0.002976190476190476, 0.002717391304347826, 0.0033643892339544515, 0.9989648033126294, 0.0032349896480331265, 0.0006469979296066253, 0.0032349896480331265, 0.003105590062111801, 0.002717391304347826, 0.002976190476190476, 0.002846790890269151, 0.00038819875776397513, 0.002717391304347826, 0.37085921325051757, 0.00012939958592132506, 0.0033643892339544515, 0.8501552795031055, 0.0005175983436853002, 0.0009057971014492754, 0.0036231884057971015, 0.984860248447205, 0.0009057971014492754, 1.0, 0.002976190476190476, 0.0011645962732919255, 0.002976190476190476, 0.5905797101449275, 0.8910455486542443, 0.002976190476190476, 0.8501552795031055, 0.0036231884057971015, 0.0032349896480331265, 0.0014233954451345755, 0.002587991718426501, 0.002846790890269151, 0.0032349896480331265, 0.003105590062111801, 0.0007763975155279503, 0.0036231884057971015, 0.9742494824016563, 0.002976190476190476, 0.0015527950310559005, 0.002846790890269151, 0.0034937888198757765, 0.002846790890269151, 0.0033643892339544515, 0.003105590062111801, 1.0, 0.0005175983436853002, 0.0033643892339544515, 0.0033643892339544515, 0.002717391304347826, 0.0032349896480331265, 0.0011645962732919255, 0.003105590062111801, 0.6282349896480331, 0.0033643892339544515, 0.0005175983436853002, 0.0033643892339544515, 0.003105590062111801, 0.14997412008281574, 0.0033643892339544515, 0.002717391304347826, 0.003105590062111801, 0.0007763975155279503, 0.002717391304347826, 0.0036231884057971015, 0.0034937888198757765, 0.0032349896480331265, 0.0010351966873706005, 0.0006469979296066253, 0.0032349896480331265, 0.6714544513457557, 0.0009057971014492754, 0.003105590062111801, 0.002846790890269151, 0.002587991718426501, 0.0034937888198757765, 0.0010351966873706005, 0.002976190476190476, 0.002976190476190476, 0.0011645962732919255, 0.002329192546583851, 0.0033643892339544515, 0.003105590062111801, 0.0036231884057971015, 0.9949534161490683, 0.00038819875776397513, 0.002846790890269151, 0.0033643892339544515, 0.002846790890269151, 0.002846790890269151, 0.002717391304347826, 0.002329192546583851, 0.0005175983436853002, 0.0032349896480331265, 0.0034937888198757765, 0.003105590062111801, 0.002199792960662526, 0.15087991718426502, 0.002329192546583851, 0.002717391304347826, 0.00038819875776397513, 0.0007763975155279503, 0.0033643892339544515, 0.002976190476190476, 0.0005175983436853002, 0.003105590062111801, 0.003105590062111801, 0.0007763975155279503, 0.1486801242236025, 0.002976190476190476, 0.0033643892339544515, 0.0005175983436853002, 0.0010351966873706005, 0.9946946169772257, 0.0032349896480331265, 0.0006469979296066253, 0.0007763975155279503, 0.00038819875776397513, 0.0006469979296066253, 0.002846790890269151, 0.0006469979296066253, 0.0005175983436853002, 0.002458592132505176, 0.002846790890269151, 0.0005175983436853002, 0.002976190476190476, 0.002717391304347826, 0.00038819875776397513, 0.0014233954451345755, 0.8935041407867494, 0.002717391304347826, 0.0032349896480331265, 0.002976190476190476, 0.00038819875776397513, 0.0006469979296066253, 0.003105590062111801, 0.003105590062111801, 1.0, 0.0006469979296066253, 0.002846790890269151, 0.002717391304347826, 0.002587991718426501, 0.003105590062111801, 0.003105590062111801, 0.0009057971014492754, 0.003105590062111801, 0.002717391304347826, 0.0033643892339544515, 0.003105590062111801, 0.002717391304347826, 0.0006469979296066253, 0.0010351966873706005, 0.002717391304347826, 0.0032349896480331265, 0.00038819875776397513, 0.0033643892339544515, 0.0009057971014492754, 0.0006469979296066253, 0.0006469979296066253, 0.0009057971014492754, 0.0033643892339544515, 0.0005175983436853002, 0.003105590062111801, 0.00038819875776397513, 0.002846790890269151, 0.00012939958592132506, 0.0034937888198757765, 0.00038819875776397513, 0.0007763975155279503, 0.0005175983436853002, 0.002976190476190476, 0.0009057971014492754, 0.0037525879917184265, 0.0006469979296066253, 0.0032349896480331265, 0.002846790890269151, 0.0032349896480331265, 0.0005175983436853002, 0.003105590062111801, 0.003105590062111801, 0.0006469979296066253, 0.8501552795031055, 0.002846790890269151, 0.0033643892339544515, 0.0033643892339544515, 0.3734472049689441, 0.0007763975155279503, 0.0005175983436853002, 0.0007763975155279503, 0.002846790890269151, 0.0006469979296066253, 0.002846790890269151, 0.0033643892339544515, 0.0006469979296066253, 0.0032349896480331265, 0.002846790890269151, 0.0007763975155279503, 0.00038819875776397513, 0.0032349896480331265, 0.003105590062111801, 0.0032349896480331265, 0.0005175983436853002, 0.002199792960662526, 0.003105590062111801, 0.003105590062111801, 0.5976966873706004, 0.002976190476190476, 0.0010351966873706005, 0.003105590062111801, 0.0006469979296066253, 0.002846790890269151, 0.0033643892339544515, 0.002717391304347826, 0.002846790890269151, 0.002846790890269151, 0.0032349896480331265, 0.5187629399585921, 0.002717391304347826, 0.0010351966873706005, 0.0032349896480331265, 0.0005175983436853002, 0.00038819875776397513, 0.002976190476190476, 0.047489648033126296, 0.0016821946169772257, 0.0033643892339544515, 0.5578416149068323, 0.002717391304347826, 0.003105590062111801, 0.0007763975155279503, 0.002717391304347826, 0.0032349896480331265, 0.002846790890269151, 0.9967650103519669, 0.0006469979296066253, 0.04283126293995859, 0.0005175983436853002, 0.002329192546583851, 0.0019409937888198758, 0.0006469979296066253, 0.002846790890269151, 0.002846790890269151, 0.002976190476190476, 0.00038819875776397513, 0.0007763975155279503, 0.002717391304347826, 0.0034937888198757765, 0.002976190476190476, 0.0036231884057971015, 0.002587991718426501, 0.002717391304347826, 0.0006469979296066253, 0.0010351966873706005, 0.002717391304347826, 0.00038819875776397513, 0.0006469979296066253, 0.003105590062111801, 0.003105590062111801, 0.8907867494824017, 0.003105590062111801, 0.0005175983436853002, 0.0005175983436853002, 0.003105590062111801, 0.0032349896480331265, 0.599508281573499, 0.0009057971014492754, 0.0005175983436853002, 0.002976190476190476, 0.0006469979296066253, 0.00038819875776397513, 0.0006469979296066253, 0.0006469979296066253, 0.0034937888198757765, 0.0033643892339544515, 0.0033643892339544515, 0.002846790890269151, 0.00038819875776397513, 0.0032349896480331265, 0.0009057971014492754, 0.0006469979296066253, 0.1503623188405797, 0.003105590062111801, 0.003105590062111801, 0.002458592132505176, 0.0005175983436853002, 0.0006469979296066253, 0.00038819875776397513, 0.0009057971014492754, 0.0006469979296066253, 0.003105590062111801, 0.0038819875776397515, 0.0033643892339544515, 0.003105590062111801, 0.003105590062111801, 0.0034937888198757765, 0.00038819875776397513, 0.00038819875776397513, 0.002199792960662526, 0.0010351966873706005, 0.0032349896480331265, 0.002717391304347826, 0.0006469979296066253, 0.0034937888198757765, 0.0005175983436853002, 0.0032349896480331265, 0.003105590062111801, 0.0037525879917184265, 0.0032349896480331265, 0.002846790890269151, 0.0005175983436853002, 0.0032349896480331265, 0.0034937888198757765, 0.0033643892339544515, 0.0007763975155279503, 0.002976190476190476, 0.003105590062111801, 0.0037525879917184265, 0.0032349896480331265, 0.0005175983436853002, 0.0036231884057971015, 1.0, 0.0033643892339544515, 0.0036231884057971015, 0.0005175983436853002, 0.0032349896480331265, 0.0009057971014492754, 0.0032349896480331265, 0.002717391304347826, 0.0006469979296066253, 0.003105590062111801, 0.0006469979296066253, 0.0034937888198757765, 0.0005175983436853002, 0.002717391304347826, 0.003105590062111801, 0.0033643892339544515, 0.0009057971014492754, 0.0036231884057971015, 0.003105590062111801, 0.002846790890269151, 0.0010351966873706005, 0.002846790890269151, 0.0010351966873706005, 0.002458592132505176, 0.0005175983436853002, 0.002717391304347826, 0.0006469979296066253, 0.0034937888198757765, 0.0006469979296066253, 0.002717391304347826, 0.002976190476190476, 0.002717391304347826, 0.0033643892339544515, 0.5974378881987578, 0.00038819875776397513, 0.0006469979296066253, 0.0007763975155279503, 0.002717391304347826, 0.0002587991718426501, 0.0007763975155279503, 0.00038819875776397513, 0.0007763975155279503, 0.002846790890269151, 0.0033643892339544515, 0.002976190476190476, 0.003105590062111801, 0.003105590062111801, 0.003105590062111801, 0.3718944099378882, 0.0033643892339544515, 0.0036231884057971015, 0.0007763975155279503, 0.003105590062111801, 0.0009057971014492754, 0.0033643892339544515, 0.002717391304347826, 0.0033643892339544515, 0.0032349896480331265, 0.0007763975155279503, 0.002976190476190476, 0.002199792960662526, 0.002976190476190476, 0.0005175983436853002, 0.003105590062111801, 0.6335403726708074, 0.630564182194617, 0.0034937888198757765, 0.0034937888198757765, 0.0033643892339544515, 0.0007763975155279503, 0.002976190476190476, 0.0012939958592132505, 0.0007763975155279503, 0.0033643892339544515, 0.0033643892339544515, 0.0010351966873706005, 0.00038819875776397513, 0.00038819875776397513, 0.003105590062111801, 0.0011645962732919255, 0.002976190476190476, 0.0006469979296066253, 0.0036231884057971015, 0.002976190476190476, 0.002976190476190476, 0.0034937888198757765, 0.0036231884057971015, 0.002976190476190476, 0.0034937888198757765, 0.003105590062111801, 0.002846790890269151, 0.00038819875776397513, 0.002717391304347826, 0.0006469979296066253, 0.003105590062111801, 0.0032349896480331265, 0.0005175983436853002, 0.002976190476190476, 0.0032349896480331265, 0.0033643892339544515, 0.0010351966873706005, 0.00038819875776397513, 0.0033643892339544515, 0.0006469979296066253, 0.0033643892339544515, 0.00038819875776397513, 0.0007763975155279503, 0.002717391304347826, 0.0005175983436853002, 0.003105590062111801, 0.0033643892339544515, 0.0034937888198757765, 0.0034937888198757765, 0.0033643892339544515, 0.0032349896480331265, 0.597567287784679, 0.003105590062111801, 0.0033643892339544515, 0.0006469979296066253, 0.002976190476190476, 0.003105590062111801, 0.003105590062111801, 0.0032349896480331265, 0.15049171842650103, 0.002846790890269151, 0.8558488612836439, 0.0032349896480331265, 0.0011645962732919255, 0.0032349896480331265, 0.0034937888198757765, 0.8910455486542443, 0.0034937888198757765, 0.0007763975155279503, 0.002846790890269151, 0.002976190476190476, 0.0009057971014492754, 0.0034937888198757765, 0.002976190476190476, 0.0006469979296066253, 0.0037525879917184265, 0.0005175983436853002, 0.003105590062111801, 0.0005175983436853002, 0.0038819875776397515, 0.003105590062111801, 0.00038819875776397513, 0.003105590062111801, 0.003105590062111801, 0.0032349896480331265, 0.003105590062111801, 0.0009057971014492754, 0.002458592132505176, 0.00038819875776397513, 0.0009057971014492754, 0.0007763975155279503, 0.003105590062111801, 0.0036231884057971015, 0.0014233954451345755, 0.0033643892339544515, 0.0032349896480331265, 0.002976190476190476, 0.0006469979296066253, 0.0032349896480331265, 0.003105590062111801, 0.002717391304347826, 0.002717391304347826, 0.0006469979296066253, 0.0032349896480331265, 0.8907867494824017, 0.002717391304347826, 0.2644927536231884, 0.14751552795031056, 0.002976190476190476, 0.00012939958592132506, 0.003105590062111801, 0.003105590062111801, 0.0006469979296066253, 0.002846790890269151, 0.6274585921325052, 0.0005175983436853002, 0.0033643892339544515, 0.0005175983436853002, 0.0005175983436853002, 0.0006469979296066253, 0.00038819875776397513, 0.003105590062111801, 0.0032349896480331265, 0.11063664596273291, 0.0032349896480331265, 0.0036231884057971015, 0.0032349896480331265, 0.0006469979296066253, 0.0007763975155279503, 0.002976190476190476, 0.7110507246376812, 0.002846790890269151, 0.0032349896480331265, 0.003105590062111801, 0.0034937888198757765, 0.002846790890269151, 0.002717391304347826, 0.003105590062111801, 0.002717391304347826, 0.8903985507246377, 0.0006469979296066253, 0.15010351966873706, 0.0006469979296066253, 0.0034937888198757765, 0.0033643892339544515, 0.002717391304347826, 0.003105590062111801, 0.0006469979296066253, 0.0007763975155279503, 0.0036231884057971015, 0.003105590062111801, 0.15100931677018634, 0.003105590062111801, 0.00038819875776397513, 0.003105590062111801, 0.002846790890269151, 0.0009057971014492754, 0.002846790890269151, 0.0006469979296066253, 0.0036231884057971015, 0.0032349896480331265, 0.0007763975155279503, 0.002458592132505176, 0.002717391304347826, 0.0006469979296066253, 0.0010351966873706005, 0.15049171842650103, 0.003105590062111801, 0.15062111801242237, 0.6004140786749482, 0.0005175983436853002, 0.0006469979296066253, 0.003105590062111801, 0.002976190476190476, 0.0032349896480331265, 0.0036231884057971015, 0.0032349896480331265, 0.0005175983436853002, 0.0034937888198757765, 0.0034937888198757765, 0.0034937888198757765, 0.0032349896480331265, 0.8968685300207039, 0.0034937888198757765, 0.0011645962732919255, 0.26268115942028986, 0.00038819875776397513, 0.0005175983436853002, 0.0007763975155279503, 0.002976190476190476, 0.002976190476190476, 0.6313405797101449, 0.8925983436853002, 0.002976190476190476, 0.0006469979296066253, 0.003105590062111801, 0.003105590062111801, 0.0006469979296066253, 0.0006469979296066253, 0.0034937888198757765, 0.0036231884057971015, 0.002846790890269151, 0.002976190476190476, 0.003105590062111801, 0.002846790890269151, 0.002976190476190476, 0.0005175983436853002, 0.0036231884057971015, 0.0033643892339544515, 0.0033643892339544515, 0.00038819875776397513, 0.003105590062111801, 0.0034937888198757765, 0.0034937888198757765, 0.0005175983436853002, 0.0005175983436853002, 0.0006469979296066253, 0.0005175983436853002, 0.0006469979296066253, 0.002458592132505176, 0.29528985507246375, 0.00038819875776397513, 0.00038819875776397513, 0.0010351966873706005, 0.0032349896480331265, 0.0033643892339544515, 0.0033643892339544515, 0.0036231884057971015, 0.002587991718426501, 0.0006469979296066253, 0.0032349896480331265, 0.002976190476190476, 0.0032349896480331265, 0.0005175983436853002, 0.0006469979296066253, 0.0034937888198757765, 0.003105590062111801, 0.002976190476190476, 0.0005175983436853002, 0.002976190476190476, 0.0033643892339544515, 0.0036231884057971015, 0.0032349896480331265, 0.0032349896480331265, 0.00038819875776397513, 0.00038819875776397513, 0.003105590062111801, 0.0006469979296066253, 0.002329192546583851, 0.0007763975155279503, 0.0011645962732919255, 0.003105590062111801, 0.002976190476190476, 0.0036231884057971015, 0.0010351966873706005, 0.0016821946169772257, 0.0006469979296066253, 0.0036231884057971015, 0.0033643892339544515, 0.0009057971014492754, 0.0032349896480331265, 0.002717391304347826, 0.002846790890269151, 0.0032349896480331265, 0.003105590062111801, 0.002458592132505176, 0.002976190476190476, 0.0034937888198757765, 0.00038819875776397513, 0.0010351966873706005, 0.9970238095238095, 0.9989648033126294, 0.0006469979296066253, 0.1503623188405797, 0.002846790890269151, 0.002717391304347826, 0.0032349896480331265, 0.0005175983436853002, 0.00038819875776397513, 0.002587991718426501, 0.00038819875776397513, 0.0009057971014492754, 0.002587991718426501, 0.0012939958592132505, 0.0007763975155279503, 0.15075051759834368, 0.0010351966873706005, 0.0009057971014492754, 0.0033643892339544515, 0.002717391304347826, 0.0032349896480331265, 0.002976190476190476, 0.002976190476190476, 0.003105590062111801, 0.7510351966873706, 0.0034937888198757765, 0.0036231884057971015, 0.0032349896480331265, 0.0010351966873706005, 0.002976190476190476, 0.0006469979296066253, 0.0007763975155279503, 0.0010351966873706005, 0.002976190476190476, 0.0006469979296066253, 0.002846790890269151, 0.0032349896480331265, 0.33203933747412007, 0.0005175983436853002, 0.003105590062111801, 0.0007763975155279503, 0.003105590062111801, 0.0032349896480331265, 0.0006469979296066253, 0.4508281573498965, 0.0032349896480331265, 0.0033643892339544515, 0.26384575569358176, 0.002717391304347826, 0.002717391304347826, 0.002846790890269151, 0.002717391304347826, 0.0006469979296066253, 0.002846790890269151, 0.002717391304347826, 0.0034937888198757765, 0.002976190476190476, 0.0032349896480331265, 0.0006469979296066253, 0.002458592132505176, 0.0006469979296066253, 0.003105590062111801, 0.0032349896480331265, 0.002587991718426501, 0.003105590062111801, 0.0011645962732919255, 0.0032349896480331265, 0.003105590062111801, 0.002976190476190476, 0.0034937888198757765, 0.0006469979296066253, 0.002846790890269151, 0.002717391304347826, 0.0007763975155279503, 0.002329192546583851, 0.0006469979296066253, 0.002976190476190476, 0.002976190476190476, 0.7507763975155279, 0.00038819875776397513, 0.0033643892339544515, 0.0010351966873706005, 0.15062111801242237, 0.002976190476190476, 0.00038819875776397513, 0.00038819875776397513, 0.002846790890269151, 0.0007763975155279503, 0.00038819875776397513, 0.0009057971014492754, 0.0034937888198757765, 0.0032349896480331265, 0.0033643892339544515, 0.0032349896480331265, 0.8581780538302277, 0.0010351966873706005, 0.0006469979296066253, 0.00038819875776397513, 0.0010351966873706005, 0.00038819875776397513, 0.0007763975155279503, 0.0033643892339544515, 0.042443064182194616, 0.0034937888198757765, 0.0007763975155279503, 0.002717391304347826, 0.0010351966873706005, 0.002846790890269151, 0.0033643892339544515, 0.00038819875776397513, 0.002846790890269151, 0.002846790890269151, 0.7391304347826086, 0.0009057971014492754, 0.0034937888198757765, 0.0036231884057971015, 0.0032349896480331265, 0.002717391304347826, 0.003105590062111801, 0.8514492753623188, 0.00038819875776397513, 0.0032349896480331265, 0.002587991718426501, 0.002976190476190476, 0.003105590062111801, 0.002846790890269151, 0.002846790890269151, 0.002846790890269151, 0.0007763975155279503, 0.002976190476190476, 0.002070393374741201, 0.002458592132505176, 0.0032349896480331265, 0.0033643892339544515, 0.003105590062111801, 0.0011645962732919255, 0.0032349896480331265, 0.003105590062111801, 0.038302277432712216, 0.0005175983436853002, 0.003105590062111801, 0.0037525879917184265, 0.002976190476190476, 0.0007763975155279503, 0.0006469979296066253, 0.002717391304347826, 0.002846790890269151, 0.0011645962732919255, 0.0032349896480331265, 0.002458592132505176, 0.0007763975155279503, 0.002976190476190476, 0.0006469979296066253, 0.003105590062111801, 0.002717391304347826, 0.0032349896480331265, 0.14997412008281574, 0.003105590062111801, 0.002976190476190476, 0.0032349896480331265, 0.0034937888198757765, 0.0006469979296066253, 0.003105590062111801, 0.0033643892339544515, 0.002846790890269151, 0.0033643892339544515, 0.0032349896480331265, 0.002458592132505176, 0.0034937888198757765, 0.003105590062111801, 0.0009057971014492754, 0.0006469979296066253, 0.003105590062111801, 0.003105590062111801, 0.0032349896480331265, 0.0005175983436853002, 0.003105590062111801, 0.0009057971014492754, 0.6000258799171843, 0.0033643892339544515, 0.0034937888198757765, 0.002976190476190476, 0.00038819875776397513, 0.0033643892339544515, 0.003105590062111801, 0.8924689440993789, 0.0007763975155279503, 0.2629399585921325, 0.0009057971014492754, 0.0032349896480331265, 0.00038819875776397513, 0.0007763975155279503, 0.00038819875776397513, 0.003105590062111801, 0.002587991718426501, 0.0006469979296066253, 0.002976190476190476, 0.0032349896480331265, 0.003105590062111801, 0.0010351966873706005, 0.0036231884057971015, 0.002717391304347826, 0.44539337474120083, 0.0007763975155279503, 0.0033643892339544515, 0.0036231884057971015, 0.0007763975155279503, 0.0005175983436853002, 0.0005175983436853002, 0.002717391304347826, 0.0006469979296066253, 0.0033643892339544515, 0.002717391304347826, 0.003105590062111801, 0.002329192546583851, 0.0033643892339544515, 0.0036231884057971015, 0.0032349896480331265, 0.003105590062111801, 0.002717391304347826, 0.0005175983436853002, 0.0032349896480331265, 0.0005175983436853002, 0.0006469979296066253, 0.00038819875776397513, 0.00038819875776397513, 0.0010351966873706005, 0.002846790890269151, 0.002587991718426501, 0.0036231884057971015, 0.002846790890269151, 0.0005175983436853002, 0.0033643892339544515, 0.00038819875776397513, 0.0006469979296066253, 0.002846790890269151, 0.0011645962732919255, 0.297360248447205, 0.0032349896480331265, 0.00038819875776397513, 0.0005175983436853002, 0.003105590062111801, 0.0034937888198757765, 0.003105590062111801, 0.002717391304347826, 0.0036231884057971015, 0.0007763975155279503, 0.002846790890269151, 0.0033643892339544515, 0.0006469979296066253, 0.0006469979296066253, 0.0032349896480331265, 0.8935041407867494, 0.0006469979296066253, 0.0011645962732919255, 0.8501552795031055, 0.002976190476190476, 0.00038819875776397513, 0.003105590062111801, 0.0033643892339544515, 0.0032349896480331265, 0.0033643892339544515, 0.002846790890269151, 0.0007763975155279503, 0.0006469979296066253, 0.14945652173913043, 0.0037525879917184265, 0.002717391304347826, 0.002976190476190476, 0.0005175983436853002, 0.002976190476190476, 0.0006469979296066253, 0.002717391304347826, 0.002717391304347826, 0.0005175983436853002, 0.002587991718426501, 0.002717391304347826, 0.0033643892339544515, 0.04930124223602485, 0.0007763975155279503, 0.0005175983436853002, 0.14932712215320912, 0.002976190476190476, 0.0005175983436853002, 0.002976190476190476, 0.002717391304347826, 0.04283126293995859, 0.002717391304347826, 0.0034937888198757765, 0.002846790890269151, 0.002717391304347826, 0.0032349896480331265, 0.0007763975155279503, 0.002846790890269151, 0.0032349896480331265, 0.0005175983436853002, 0.002587991718426501, 0.0005175983436853002, 0.0006469979296066253, 0.0005175983436853002, 0.003105590062111801, 0.0009057971014492754, 0.002070393374741201, 0.0006469979296066253, 0.0032349896480331265, 0.0006469979296066253, 0.0007763975155279503, 0.003105590062111801, 0.0032349896480331265, 0.002717391304347826, 0.9657091097308489, 0.0034937888198757765, 0.0010351966873706005, 0.003105590062111801, 0.003105590062111801, 0.002976190476190476, 0.002717391304347826, 0.00038819875776397513, 0.003105590062111801, 0.0010351966873706005, 0.0032349896480331265, 0.003105590062111801, 0.0032349896480331265, 0.003105590062111801, 0.002976190476190476, 0.002717391304347826, 0.0005175983436853002, 0.003105590062111801, 0.002717391304347826, 0.003105590062111801, 0.003105590062111801, 0.0005175983436853002, 0.003105590062111801, 0.0034937888198757765, 0.002976190476190476, 0.10856625258799171, 0.002329192546583851, 0.0005175983436853002, 0.002976190476190476, 0.0011645962732919255, 1.0, 0.0032349896480331265, 0.0006469979296066253, 0.0033643892339544515, 0.0036231884057971015, 0.0006469979296066253, 0.0034937888198757765, 0.0014233954451345755, 0.002976190476190476, 0.0007763975155279503, 0.002846790890269151, 0.003105590062111801, 0.0032349896480331265, 0.0034937888198757765, 0.003105590062111801, 0.002846790890269151, 0.0033643892339544515, 0.0007763975155279503, 0.002717391304347826, 0.0009057971014492754, 0.003105590062111801, 0.0032349896480331265, 0.002976190476190476, 0.0033643892339544515, 0.002587991718426501, 0.0010351966873706005, 0.0007763975155279503, 0.0034937888198757765, 0.9993530020703933, 0.002846790890269151, 0.002199792960662526, 0.0005175983436853002, 0.003105590062111801, 0.002976190476190476, 0.0006469979296066253, 0.0036231884057971015, 0.0006469979296066253, 0.0032349896480331265, 0.0010351966873706005, 0.002717391304347826, 0.003105590062111801, 0.0006469979296066253, 0.00038819875776397513, 0.0034937888198757765, 0.0005175983436853002, 0.0005175983436853002, 0.002458592132505176, 0.0033643892339544515, 0.0007763975155279503, 0.002846790890269151, 0.0005175983436853002, 0.0034937888198757765, 0.0005175983436853002, 0.9981884057971014, 0.03558488612836439, 0.0007763975155279503, 0.002717391304347826, 0.0006469979296066253, 0.0033643892339544515, 0.00038819875776397513, 0.002846790890269151, 0.0033643892339544515, 0.002976190476190476, 0.0006469979296066253, 0.003105590062111801, 0.9939182194616977, 0.0033643892339544515, 0.0009057971014492754, 0.00038819875776397513, 0.0032349896480331265, 0.8504140786749482, 0.003105590062111801, 0.002846790890269151, 0.5971790890269151, 0.0012939958592132505, 0.0036231884057971015, 0.7506469979296067, 0.002846790890269151, 0.002587991718426501, 0.0033643892339544515, 0.0009057971014492754, 0.0007763975155279503, 0.002587991718426501, 0.0033643892339544515, 0.002717391304347826, 0.0007763975155279503, 0.003105590062111801, 0.0009057971014492754, 0.0007763975155279503, 0.00038819875776397513, 0.0032349896480331265, 0.0033643892339544515, 0.0007763975155279503, 0.00012939958592132506, 0.002976190476190476, 0.0036231884057971015, 0.002976190476190476, 0.1503623188405797, 0.0006469979296066253, 0.0032349896480331265, 0.0007763975155279503, 0.0009057971014492754, 0.0032349896480331265, 0.0033643892339544515, 0.0005175983436853002, 0.0034937888198757765, 0.0033643892339544515, 0.0032349896480331265, 0.002717391304347826, 0.7505175983436853, 0.002717391304347826, 0.1502329192546584, 0.003105590062111801, 0.0005175983436853002, 0.0032349896480331265, 0.002717391304347826, 0.5909679089026915, 0.6278467908902692, 0.002846790890269151, 0.0010351966873706005, 0.0007763975155279503, 0.0007763975155279503, 0.003105590062111801, 0.0032349896480331265, 0.16291161178509533, 0.16291161178509533, 0.1611785095320624, 0.42114384748700173, 0.6308492201039861, 0.9965337954939342, 0.41421143847487, 0.9480069324090121, 0.9809358752166378, 1.0, 0.6603119584055459, 0.9930675909878682, 0.9480069324090121, 0.9688041594454073, 1.0, 0.7019064124783362, 0.16464471403812825, 0.6308492201039861, 0.9930675909878682, 0.16464471403812825, 0.16291161178509533, 0.9930675909878682, 0.9948006932409013, 1.0, 0.9480069324090121, 0.41421143847487, 0.6672443674176777, 0.5771230502599654, 0.05025996533795494, 0.6204506065857885, 0.9948006932409013, 0.045060658578856154, 0.9930675909878682, 0.5511265164644714, 0.16464471403812825, 1.0, 0.6672443674176777, 0.4124783362218371, 0.5892547660311959, 0.7019064124783362, 0.6672443674176777, 0.16464471403812825, 0.9532062391681109, 0.9930675909878682, 0.9930675909878682, 0.29116117850953205, 0.1611785095320624, 0.7001733102253033, 0.0034662045060658577, 0.6759098786828422, 0.9930675909878682, 0.16464471403812825, 0.16464471403812825, 0.16464471403812825, 0.16464471403812825, 0.5511265164644714, 1.0, 0.29116117850953205, 0.58578856152513, 0.9948006932409013, 0.3292894280762565, 0.9948006932409013, 0.9982668977469671, 0.16464471403812825, 0.16464471403812825, 0.7209705372616985, 0.3708838821490468, 0.3830155979202773, 0.29116117850953205, 0.7192374350086655, 0.04852686308492201, 0.8405545927209706, 0.045060658578856154, 0.707105719237435, 0.949740034662045, 0.03986135181975736, 0.16464471403812825, 0.5511265164644714, 0.8769497400346621, 0.28942807625649913, 0.49393414211438474, 0.3292894280762565, 0.9948006932409013, 0.9480069324090121, 0.16291161178509533, 0.05199306759098787, 0.16464471403812825, 0.045060658578856154, 0.9601386481802426, 0.11785095320623917, 1.0, 0.9982668977469671, 0.9948006932409013, 0.036395147313691506, 0.9913344887348353, 0.9480069324090121, 0.6672443674176777, 0.7209705372616985, 0.16464471403812825, 0.7192374350086655, 0.16464471403812825, 0.6603119584055459, 0.7019064124783362, 0.6036490683229814, 0.04891304347826087, 0.04891304347826087, 0.04904244306418219, 0.05020703933747412, 0.04917184265010352, 0.04891304347826087, 0.04994824016563147, 0.04891304347826087, 0.04930124223602485, 0.05033643892339545, 0.04891304347826087, 0.8870341614906833, 0.04891304347826087, 0.04891304347826087, 0.04994824016563147, 0.04904244306418219, 0.04917184265010352, 0.04891304347826087, 0.04904244306418219, 0.0012939958592132505, 0.05020703933747412, 0.04917184265010352, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.04904244306418219, 0.04994824016563147, 0.05007763975155279, 0.05033643892339545, 0.04891304347826087, 0.6045548654244306, 0.04891304347826087, 0.04891304347826087, 0.04994824016563147, 0.04891304347826087, 0.04891304347826087, 0.04904244306418219, 0.04891304347826087, 0.04891304347826087, 0.04994824016563147, 0.00012939958592132506, 0.04917184265010352, 0.04917184265010352, 0.04994824016563147, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.0014233954451345755, 0.04994824016563147, 0.04917184265010352, 0.05020703933747412, 0.04930124223602485, 0.04917184265010352, 0.04891304347826087, 0.04994824016563147, 0.04994824016563147, 0.04994824016563147, 0.05033643892339545, 0.10804865424430642, 1.0, 0.04891304347826087, 0.05033643892339545, 0.04904244306418219, 0.04917184265010352, 0.04904244306418219, 0.04994824016563147, 0.04917184265010352, 0.04917184265010352, 0.0002587991718426501, 0.04891304347826087, 0.05046583850931677, 0.04904244306418219, 0.04917184265010352, 0.04891304347826087, 0.04891304347826087, 0.04994824016563147, 0.04917184265010352, 0.04994824016563147, 0.04994824016563147, 0.04891304347826087, 0.04930124223602485, 0.05007763975155279, 0.04917184265010352, 0.04994824016563147, 0.04891304347826087, 0.05046583850931677, 0.04891304347826087, 0.05020703933747412, 0.05007763975155279, 0.05007763975155279, 1.0, 0.04891304347826087, 0.05007763975155279, 0.04994824016563147, 0.04994824016563147, 0.04917184265010352, 0.0012939958592132505, 0.04994824016563147, 0.04917184265010352, 0.04891304347826087, 0.04891304347826087, 1.0, 0.5918737060041408, 0.05020703933747412, 0.05007763975155279, 0.05098343685300207, 0.0012939958592132505, 1.0, 0.04994824016563147, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.04994824016563147, 0.04891304347826087, 0.05020703933747412, 0.05007763975155279, 0.5934265010351967, 0.04891304347826087, 0.04891304347826087, 1.0, 0.04891304347826087, 0.04917184265010352, 0.04891304347826087, 0.04994824016563147, 0.5938146997929606, 0.04904244306418219, 0.05033643892339545, 0.4495341614906832, 0.04994824016563147, 0.04891304347826087, 0.537008281573499, 0.05020703933747412, 0.05033643892339545, 0.05033643892339545, 0.5914855072463768, 0.05007763975155279, 0.6041666666666666, 0.05020703933747412, 0.05007763975155279, 0.05033643892339545, 0.05046583850931677, 0.05020703933747412, 0.05046583850931677, 0.04904244306418219, 0.04994824016563147, 0.05007763975155279, 0.04891304347826087, 0.04930124223602485, 0.8070652173913043, 0.604684265010352, 0.04917184265010352, 0.3848343685300207, 0.04917184265010352, 0.04891304347826087, 0.44306418219461696, 0.04891304347826087, 0.04994824016563147, 0.05020703933747412, 0.04891304347826087, 0.0012939958592132505, 0.04994824016563147, 0.04904244306418219, 0.05007763975155279, 0.6037784679089027, 0.04917184265010352, 0.04930124223602485, 0.05033643892339545, 0.04917184265010352, 1.0, 0.04891304347826087, 0.5932971014492754, 0.04917184265010352, 0.0012939958592132505, 0.04930124223602485, 0.04891304347826087, 0.04891304347826087, 0.04994824016563147, 0.05007763975155279, 0.05020703933747412, 0.04930124223602485, 0.04904244306418219, 0.10390786749482402, 0.04917184265010352, 1.0, 0.6039078674948241, 0.04904244306418219, 0.5936853002070394, 0.22179089026915114, 0.6040372670807453, 0.05033643892339545, 0.04930124223602485, 0.04917184265010352, 0.04917184265010352, 0.04994824016563147, 0.05007763975155279, 0.04891304347826087, 0.04994824016563147, 1.0, 0.04994824016563147, 0.05007763975155279, 0.04904244306418219, 0.04994824016563147, 0.04994824016563147, 0.04891304347826087, 0.05033643892339545, 0.797360248447205, 0.04994824016563147, 0.04891304347826087, 0.05033643892339545, 0.0012939958592132505, 0.04917184265010352, 0.05020703933747412, 0.04891304347826087, 0.05020703933747412, 0.6309523809523809, 0.04917184265010352, 0.04891304347826087, 0.05046583850931677, 0.04930124223602485, 0.6044254658385093, 0.05007763975155279, 0.04904244306418219, 0.04891304347826087, 0.6309523809523809, 0.05033643892339545, 0.05033643892339545, 0.050724637681159424, 0.05020703933747412, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.6037784679089027, 0.05007763975155279, 0.04994824016563147, 0.05007763975155279, 0.04930124223602485, 0.05046583850931677, 0.04917184265010352, 0.6044254658385093, 0.0012939958592132505, 0.05007763975155279, 0.4346532091097309, 0.05046583850931677, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.8070652173913043, 0.04994824016563147, 0.04891304347826087, 0.04917184265010352, 0.04994824016563147, 0.05033643892339545, 0.04930124223602485, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.04917184265010352, 0.05033643892339545, 0.0012939958592132505, 0.04917184265010352, 0.04891304347826087, 0.04891304347826087, 0.593944099378882, 0.04994824016563147, 0.04891304347826087, 0.05098343685300207, 0.0012939958592132505, 0.05007763975155279, 0.04891304347826087, 0.6037784679089027, 0.05007763975155279, 0.04891304347826087, 0.04917184265010352, 0.04994824016563147, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.05007763975155279, 0.8070652173913043, 0.8748706004140787, 0.6044254658385093, 0.8070652173913043, 0.00038819875776397513, 0.05098343685300207, 0.8493788819875776, 0.05046583850931677, 0.04917184265010352, 0.05020703933747412, 0.04917184265010352, 0.05007763975155279, 0.04891304347826087, 0.05020703933747412, 0.5932971014492754, 0.8429089026915114, 0.0012939958592132505, 0.04994824016563147, 0.0009057971014492754, 0.05020703933747412, 0.6039078674948241, 0.00012939958592132506, 0.04904244306418219, 0.05007763975155279, 0.05098343685300207, 0.04994824016563147, 0.04904244306418219, 1.0, 0.05020703933747412, 0.04891304347826087, 0.04891304347826087, 0.5936853002070394, 0.04994824016563147, 0.05007763975155279, 0.593944099378882, 0.04891304347826087, 0.04891304347826087, 0.593167701863354, 0.05007763975155279, 0.04917184265010352, 0.05007763975155279, 0.04891304347826087, 0.04917184265010352, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 1.0, 0.04891304347826087, 0.05033643892339545, 0.04994824016563147, 0.10144927536231885, 0.05033643892339545, 0.5916149068322981, 0.04917184265010352, 0.05007763975155279, 0.04891304347826087, 0.05007763975155279, 0.04904244306418219, 0.0012939958592132505, 0.04917184265010352, 0.04994824016563147, 0.0002587991718426501, 0.04917184265010352, 0.8834109730848861, 0.05007763975155279, 0.0002587991718426501, 0.04891304347826087, 0.04917184265010352, 0.0014233954451345755, 0.04904244306418219, 0.04917184265010352, 0.04994824016563147, 0.04930124223602485, 0.05033643892339545, 0.05007763975155279, 0.04994824016563147, 0.05046583850931677, 0.05020703933747412, 0.05020703933747412, 0.04917184265010352, 1.0, 0.04891304347826087, 0.05033643892339545, 0.0005175983436853002, 0.05020703933747412, 0.0002587991718426501, 0.05020703933747412, 0.05007763975155279, 0.04891304347826087, 0.05046583850931677, 0.04994824016563147, 0.604684265010352, 0.04994824016563147, 0.05033643892339545, 0.05007763975155279, 0.8378623188405797, 0.05033643892339545, 0.04891304347826087, 0.04904244306418219, 0.04891304347826087, 0.0012939958592132505, 0.04891304347826087, 0.04994824016563147, 0.05033643892339545, 0.0005175983436853002, 0.04930124223602485, 0.05033643892339545, 0.04891304347826087, 0.8426501035196687, 0.05007763975155279, 0.05033643892339545, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.05033643892339545, 0.04904244306418219, 0.05033643892339545, 0.05033643892339545, 0.04930124223602485, 0.4442287784679089, 0.050595238095238096, 0.8865165631469979, 0.05007763975155279, 0.04904244306418219, 0.05007763975155279, 0.04994824016563147, 0.04994824016563147, 0.05007763975155279, 0.05007763975155279, 0.5929089026915114, 0.05046583850931677, 0.04917184265010352, 0.04994824016563147, 0.0002587991718426501, 0.05007763975155279, 0.04891304347826087, 0.04904244306418219, 0.04891304347826087, 0.04917184265010352, 0.04904244306418219, 0.04994824016563147, 0.04994824016563147, 0.04891304347826087, 0.04994824016563147, 1.0, 0.0006469979296066253, 0.04891304347826087, 0.0012939958592132505, 0.04994824016563147, 1.0, 0.04891304347826087, 0.05020703933747412, 0.05033643892339545, 0.05033643892339545, 0.04904244306418219, 0.05007763975155279, 0.0012939958592132505, 0.050595238095238096, 0.04917184265010352, 0.04904244306418219, 0.0006469979296066253, 0.6036490683229814, 1.0, 0.0012939958592132505, 0.04930124223602485, 1.0, 0.04904244306418219, 0.04891304347826087, 0.04891304347826087, 0.3504140786749482, 0.04904244306418219, 0.04891304347826087, 0.8804347826086957, 0.050595238095238096, 0.04917184265010352, 0.05033643892339545, 0.05007763975155279, 0.05020703933747412, 0.05007763975155279, 0.04994824016563147, 1.0, 0.05020703933747412, 0.04917184265010352, 1.0, 0.04891304347826087, 0.04891304347826087, 0.04891304347826087, 0.6040372670807453, 0.05098343685300207, 0.05020703933747412, 0.04891304347826087, 0.04917184265010352, 0.04994824016563147, 0.05020703933747412, 0.04917184265010352, 0.04891304347826087, 0.8429089026915114, 0.04904244306418219, 0.04994824016563147, 0.04904244306418219, 0.05033643892339545, 0.04917184265010352, 0.05033643892339545, 0.05033643892339545, 0.050724637681159424, 0.04904244306418219, 0.8070652173913043, 0.04994824016563147, 0.04994824016563147, 0.0002587991718426501, 0.05046583850931677, 0.04994824016563147, 0.04891304347826087, 0.04891304347826087, 0.05007763975155279, 0.04891304347826087, 0.6701388888888888, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.9895833333333334, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.671875, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.11631944444444445, 1.0, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 1.0, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 1.0, 0.6579861111111112, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 1.0, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.6597222222222222, 0.050347222222222224, 0.050347222222222224, 1.0, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.6597222222222222, 0.050347222222222224, 0.052083333333333336, 0.4982638888888889, 0.052083333333333336, 0.050347222222222224, 0.5989583333333334, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.65625, 0.052083333333333336, 0.671875, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.8993055555555556, 0.671875, 0.050347222222222224, 0.4253472222222222, 0.050347222222222224, 0.050347222222222224, 0.4913194444444444, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.6701388888888888, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 1.0, 0.050347222222222224, 0.6597222222222222, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.1111111111111111, 0.050347222222222224, 1.0, 0.6701388888888888, 0.050347222222222224, 0.6597222222222222, 0.24305555555555555, 0.671875, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 1.0, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.8888888888888888, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.703125, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.671875, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.703125, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.6701388888888888, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.671875, 0.052083333333333336, 0.484375, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.8993055555555556, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.6597222222222222, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.6701388888888888, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.8993055555555556, 0.9756944444444444, 0.671875, 0.8993055555555556, 0.052083333333333336, 0.9479166666666666, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.6597222222222222, 0.9409722222222222, 0.052083333333333336, 0.052083333333333336, 0.6701388888888888, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 1.0, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.6597222222222222, 0.052083333333333336, 0.052083333333333336, 0.6597222222222222, 0.050347222222222224, 0.050347222222222224, 0.6597222222222222, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 1.0, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.1076388888888889, 0.052083333333333336, 0.6579861111111112, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.9861111111111112, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 1.0, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.671875, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.9357638888888888, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.9409722222222222, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.4930555555555556, 0.052083333333333336, 0.9895833333333334, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.6579861111111112, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 1.0, 0.050347222222222224, 0.052083333333333336, 1.0, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.6701388888888888, 1.0, 0.050347222222222224, 1.0, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.3854166666666667, 0.050347222222222224, 0.050347222222222224, 0.9826388888888888, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 1.0, 0.052083333333333336, 0.050347222222222224, 1.0, 0.050347222222222224, 0.050347222222222224, 0.050347222222222224, 0.671875, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.9409722222222222, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.8993055555555556, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.052083333333333336, 0.050347222222222224, 0.050347222222222224, 0.052083333333333336, 0.050347222222222224, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5555555555555556, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5555555555555556, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5555555555555556, 0.5555555555555556, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5555555555555556, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5185185185185185, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5555555555555556, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5185185185185185, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5185185185185185, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.5555555555555556, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.48148148148148145, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.6698264966103642, 0.25163736642537055, 0.25163736642537055, 0.6105509594392738, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.31331150178099504, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.5926189245087901, 0.25163736642537055, 0.47266028955532574, 0.38580087326209356, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.6938986556359876, 0.25163736642537055, 0.6802108468344249, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.5249267493967598, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.7645639434677697, 0.7645639434677697, 0.7645639434677697, 0.7886935539469149, 0.7645639434677697, 0.7645639434677697, 0.7845570492933471, 0.7645639434677697, 0.25163736642537055, 0.7645639434677697, 0.7886935539469149, 0.7645639434677697, 0.25163736642537055, 0.25163736642537055, 0.7645639434677697, 0.7645639434677697, 0.7645639434677697, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.3771185223486154, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.8050097667470987, 0.5039641502930025, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.4589078478685511, 0.25163736642537055, 0.2562406641388027, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.5039641502930025, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.7790417097552568, 0.03618005285533724, 0.67454469723084, 0.3709065839365736, 0.5439503619441571, 0.3067907618062737, 0.3874526025508445, 0.8921061702861083, 0.7473285074112375, 0.7990348155808342, 0.3902102723198897, 0.6218545329196828, 0.6890440078133977, 0.6239227852464667, 0.4667356084108928, 0.06618407445708377, 0.7328507411237505, 0.7397449155463633, 0.4095139607032058, 0.6701137538779731, 0.4133344823624038, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.7666321957945536, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.25163736642537055, 0.5039641502930025, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.7580015612802498, 0.2849336455893833, 0.2849336455893833, 0.6908665105386417, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.3544106167056987, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.6705698672911787, 0.2849336455893833, 0.5347384855581577, 0.43637782982045276, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.7853239656518345, 0.2849336455893833, 0.7697111631537861, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.5940671350507416, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.8657298985167837, 0.8657298985167837, 0.8657298985167837, 0.8930523028883685, 0.8657298985167837, 0.8657298985167837, 0.8883684621389539, 0.8657298985167837, 0.2849336455893833, 0.8657298985167837, 0.8930523028883685, 0.8657298985167837, 0.2849336455893833, 0.2849336455893833, 0.8657298985167837, 0.8657298985167837, 0.8657298985167837, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.4270101483216237, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.9110070257611241, 0.570647931303669, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.5191256830601093, 0.2849336455893833, 0.2896174863387978, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.570647931303669, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.8821233411397346, 0.04059328649492584, 0.7634660421545667, 0.419984387197502, 0.6159250585480094, 0.3473848555815769, 0.43871975019516, 1.0, 0.8462138953942233, 0.9047619047619048, 0.4418423106947697, 0.7041373926619828, 0.7798594847775175, 0.7064793130366901, 0.5284933645589384, 0.07494145199063232, 0.8298204527712725, 0.8376268540202967, 0.4637002341920375, 0.7587822014051522, 0.4676034348165496, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.868071818891491, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.2849336455893833, 0.570647931303669, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.6846957951609114, 0.2572233967582805, 0.2572233967582805, 0.6240897345548508, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.32026661968522435, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.6057669720460418, 0.2572233967582805, 0.4831454075640122, 0.39435048155978386, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.7093023255813954, 0.2572233967582805, 0.6952959830866807, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.5365867982147052, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.7815362931642001, 0.7815362931642001, 0.7815362931642001, 0.8062015503875969, 0.7815362931642001, 0.7815362931642001, 0.8019732205778718, 0.7815362931642001, 0.2572233967582805, 0.7815362931642001, 0.8062015503875969, 0.7815362931642001, 0.2572233967582805, 0.2572233967582805, 0.7815362931642001, 0.7815362931642001, 0.7815362931642001, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.3854827343199436, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.8228799624148462, 0.5151515151515151, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.4690803382663848, 0.2572233967582805, 0.26192154099130843, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.5151515151515151, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.7963354474982381, 0.03696852243363871, 0.689511392999765, 0.37914023960535587, 0.5560253699788583, 0.31360112755461594, 0.3960535588442565, 0.9119097956307258, 0.7639182522903453, 0.8167723749119098, 0.3988724453840733, 0.6356589147286822, 0.7043399107352596, 0.6377730796335448, 0.4770965468639887, 0.06765327695560254, 0.7491190979563073, 0.7561663143058492, 0.4186046511627907, 0.6849894291754757, 0.42250998355649516, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.7836504580690627, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.2572233967582805, 0.5151515151515151, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.7840909090909091, 0.29545454545454547, 0.29545454545454547, 0.7159090909090909, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.36363636363636365, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.6931818181818182, 0.29545454545454547, 0.5511363636363636, 0.44886363636363635, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.8125, 0.29545454545454547, 0.7954545454545454, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.6136363636363636, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.8977272727272727, 0.8977272727272727, 0.8977272727272727, 0.9261363636363636, 0.8977272727272727, 0.8977272727272727, 0.9204545454545454, 0.8977272727272727, 0.29545454545454547, 0.8977272727272727, 0.9261363636363636, 0.8977272727272727, 0.29545454545454547, 0.29545454545454547, 0.8977272727272727, 0.8977272727272727, 0.8977272727272727, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.4431818181818182, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.9431818181818182, 0.5909090909090909, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.5397727272727273, 0.29545454545454547, 0.30113636363636365, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.5909090909090909, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.9147727272727273, 0.03977272727272727, 0.7897727272727273, 0.4318181818181818, 0.6363636363636364, 0.35795454545454547, 0.45454545454545453, 1.0, 0.875, 0.9375, 0.45454545454545453, 0.7272727272727273, 0.8068181818181818, 0.7329545454545454, 0.5454545454545454, 0.07386363636363637, 0.8579545454545454, 0.8693181818181818, 0.4772727272727273, 0.7840909090909091, 0.48295454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.8977272727272727, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.29545454545454547, 0.5909090909090909, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.013249211356466877, 0.00017409470752089137, 0.0001278118609406953, 0.04657534246575343, 0.08493150684931507, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.04657534246575343, 0.0547945205479452, 0.043835616438356165, 0.1095890410958904, 0.06027397260273973, 0.043835616438356165, 0.04657534246575343, 0.07671232876712329, 0.04657534246575343, 0.10410958904109589, 0.04657534246575343, 0.049315068493150684, 0.04657534246575343, 0.043835616438356165, 0.043835616438356165, 0.12602739726027398, 0.043835616438356165, 0.043835616438356165, 0.043835616438356165, 0.07397260273972603, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.06575342465753424, 0.16986301369863013, 0.06301369863013699, 0.12054794520547946, 0.07397260273972603, 0.06301369863013699, 0.043835616438356165, 0.06027397260273973, 0.043835616438356165, 0.10410958904109589, 0.049315068493150684, 0.043835616438356165, 0.2493150684931507, 0.2876712328767123, 0.2493150684931507, 0.30684931506849317, 0.2493150684931507, 0.2493150684931507, 0.043835616438356165, 0.06027397260273973, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.052054794520547946, 0.04657534246575343, 0.057534246575342465, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.043835616438356165, 0.0547945205479452, 0.0547945205479452, 0.10136986301369863, 0.049315068493150684, 0.043835616438356165, 0.04657534246575343, 0.07123287671232877, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.043835616438356165, 0.06027397260273973, 0.043835616438356165, 0.1178082191780822, 0.043835616438356165, 0.043835616438356165, 0.27123287671232876, 0.3150684931506849, 0.27123287671232876, 0.3287671232876712, 0.27123287671232876, 0.27123287671232876, 0.043835616438356165, 0.06575342465753424, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.043835616438356165, 0.0684931506849315, 0.043835616438356165, 0.10684931506849316, 0.06301369863013699, 0.043835616438356165, 0.07671232876712329, 0.136986301369863, 0.0684931506849315, 0.12602739726027398, 0.0684931506849315, 0.0684931506849315, 0.25205479452054796, 0.2821917808219178, 0.2493150684931507, 0.30684931506849317, 0.2493150684931507, 0.2602739726027397, 0.04657534246575343, 0.07397260273972603, 0.049315068493150684, 0.10684931506849316, 0.049315068493150684, 0.06301369863013699, 0.043835616438356165, 0.057534246575342465, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.04657534246575343, 0.09041095890410959, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.043835616438356165, 0.10684931506849316, 0.043835616438356165, 0.10136986301369863, 0.04657534246575343, 0.043835616438356165, 0.043835616438356165, 0.07123287671232877, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.043835616438356165, 0.052054794520547946, 0.04657534246575343, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.14520547945205478, 0.16986301369863013, 0.14520547945205478, 0.20273972602739726, 0.14520547945205478, 0.14520547945205478, 0.043835616438356165, 0.057534246575342465, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.049315068493150684, 0.043835616438356165, 0.06301369863013699, 0.043835616438356165, 0.10684931506849316, 0.043835616438356165, 0.043835616438356165, 0.043835616438356165, 0.09041095890410959, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.052054794520547946, 0.07671232876712329, 0.0547945205479452, 0.1095890410958904, 0.052054794520547946, 0.052054794520547946, 0.04657534246575343, 0.052054794520547946, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.04657534246575343, 0.1178082191780822, 0.043835616438356165, 0.10136986301369863, 0.043835616438356165, 0.043835616438356165, 0.06301369863013699, 0.18082191780821918, 0.0684931506849315, 0.12054794520547946, 0.06301369863013699, 0.06301369863013699, 0.06027397260273973, 0.12876712328767123, 0.06027397260273973, 0.12054794520547946, 0.06027397260273973, 0.06027397260273973, 0.049315068493150684, 0.07945205479452055, 0.04657534246575343, 0.10410958904109589, 0.04657534246575343, 0.04657534246575343, 0.1589041095890411, 0.19452054794520549, 0.1506849315068493, 0.20821917808219179, 0.1506849315068493, 0.1506849315068493, 0.01643835616438356, 0.049315068493150684, 0.07397260273972603, 0.03287671232876712, 0.019178082191780823, 0.09863013698630137, 0.07397260273972603, 0.019178082191780823, 0.00821917808219178, 0.12602739726027398, 0.12054794520547946, 0.010958904109589041, 0.12876712328767123, 0.00821917808219178, 0.01643835616438356, 0.01643835616438356, 0.1589041095890411, 0.1506849315068493, 0.12054794520547946, 0.13424657534246576, 0.5232876712328767, 0.5232876712328767, 0.06027397260273973, 0.06027397260273973, 0.2410958904109589, 0.049315068493150684, 0.010958904109589041, 0.11506849315068493, 0.030136986301369864, 0.021917808219178082, 0.5013698630136987, 0.5013698630136987, 0.5013698630136987, 0.7041095890410959, 0.684931506849315, 0.6767123287671233, 0.038356164383561646, 0.29863013698630136, 0.0027397260273972603, 0.09863013698630137, 0.005479452054794521, 0.03287671232876712, 0.0136986301369863, 0.5123287671232877, 0.0273972602739726, 0.0136986301369863, 0.0684931506849315, 0.9698630136986301, 0.00980392156862745, 0.09817997977755308, 0.269464105156724, 0.09625884732052578, 0.1685540950455005, 0.11678463094034378, 0.09706774519716886, 0.10657229524772498, 0.21870576339737108, 0.09514661274014155, 0.17482305358948433, 0.10839231547017189, 0.09676440849342771, 0.13023255813953488, 0.32204246713852375, 0.1269969666329626, 0.19878665318503538, 0.12790697674418605, 0.12770475227502529, 0.11233569261880688, 0.19656218402426692, 0.10596562184024266, 0.19019211324570273, 0.10606673407482306, 0.10353892821031345, 0.08766430738119313, 0.2885743174924166, 0.0923154701718908, 0.16258847320525785, 0.09605662285136501, 0.09059656218402427, 0.14560161779575329, 0.5027300303336704, 0.1487360970677452, 0.2089989888776542, 0.15783619817997976, 0.14499494438827099, 0.1122345803842265, 0.24833164812942365, 0.1122345803842265, 0.1845298281092012, 0.12406471183013144, 0.11183013144590495, 0.29251769464105154, 0.41638018200202226, 0.2800808897876643, 0.34196157735085947, 0.28099089989888776, 0.27360970677451973, 0.10394337714863498, 0.2242669362992922, 0.09504550050556117, 0.16764408493427704, 0.11375126390293225, 0.10586450960566228, 0.10748230535894843, 0.25611729019211327, 0.09757330637007078, 0.16905965621840244, 0.0968655207280081, 0.0968655207280081, 0.1102123356926188, 0.2462082912032356, 0.1186046511627907, 0.17664307381193126, 0.11627906976744186, 0.10606673407482306, 0.1032355915065723, 0.2448938321536906, 0.10010111223458039, 0.17148634984833164, 0.10020222446916077, 0.10040444893832154, 0.11769464105156724, 0.2985844287158746, 0.12467138523761376, 0.19908998988877655, 0.1320525783619818, 0.12224469160768453, 0.31061678463094033, 0.44883720930232557, 0.2985844287158746, 0.3666329625884732, 0.30293225480283115, 0.2961577350859454, 0.1057633973710819, 0.23609706774519718, 0.09838220424671386, 0.17148634984833164, 0.10606673407482306, 0.09908998988877654, 0.10475227502527806, 0.29625884732052576, 0.09989888776541962, 0.1750252780586451, 0.11183013144590495, 0.09919110212335693, 0.1565217391304348, 0.45551061678463095, 0.14580384226491405, 0.21536905965621841, 0.14479271991911022, 0.14509605662285135, 0.30920121334681494, 0.4043478260869565, 0.2788675429726997, 0.34438827098078867, 0.2782608695652174, 0.2896865520728008, 0.11577350859453994, 0.2851365015166835, 0.11638018200202224, 0.18827098078867543, 0.11820020222446916, 0.1313447927199191, 0.1070778564206269, 0.2582406471183013, 0.10525783619817998, 0.1751263902932255, 0.10849342770475227, 0.10495449949443883, 0.11445904954499495, 0.4166835187057634, 0.10343781597573307, 0.1763397371081901, 0.10606673407482306, 0.11638018200202224, 0.11354903943377148, 0.3571284125379171, 0.11445904954499495, 0.1789686552072801, 0.1358948432760364, 0.11122345803842265, 0.09595551061678463, 0.30798786653185034, 0.09959555106167846, 0.17087967644084934, 0.1025278058645096, 0.09878665318503539, 0.11031344792719919, 0.2487360970677452, 0.11617795753286148, 0.18149646107178968, 0.11739130434782609, 0.11385237613751265, 0.20394337714863497, 0.3223458038422649, 0.19261880687563196, 0.26006066734074823, 0.19585439838220425, 0.1902932254802831, 0.09625884732052578, 0.26238624873609706, 0.09959555106167846, 0.1692618806875632, 0.1, 0.10424671385237613, 0.12204246713852376, 0.2763397371081901, 0.11648129423660263, 0.18806875631951467, 0.12446916076845298, 0.11587462082912033, 0.12598584428715875, 0.33943377148634984, 0.12457027300303336, 0.1897876643073812, 0.1448938321536906, 0.1230535894843276, 0.11102123356926188, 0.2801820020222447, 0.10242669362992922, 0.1705763397371082, 0.10040444893832154, 0.09878665318503539, 0.10394337714863498, 0.2961577350859454, 0.09453993933265925, 0.16804853387259858, 0.09848331648129424, 0.09383215369059657, 0.12770475227502529, 0.3936299292214358, 0.11850353892821032, 0.18766430738119314, 0.1351870576339737, 0.11708796764408494, 0.12062689585439838, 0.4243680485338726, 0.12831142568250758, 0.18988877654196157, 0.12143579373104145, 0.11931243680485339, 0.14560161779575329, 0.4220424671385238, 0.13508594539939334, 0.2076845298281092, 0.13559150657229524, 0.13083923154701718, 0.10990899898887765, 0.2626895854398382, 0.10293225480283114, 0.17148634984833164, 0.10505561172901921, 0.1025278058645096, 0.34944388270980786, 0.533670374115268, 0.32790697674418606, 0.39767441860465114, 0.32679474216380183, 0.32588473205257834, 0.029459241323648102, 0.0743543179983858, 0.0924132364810331, 0.06255044390637611, 0.05306698950766747, 0.012005649717514125, 0.18099273607748184, 0.1120863599677159, 0.02360774818401937, 0.03853914447134786, 0.02784503631961259, 0.13518966908797417, 0.13105326876513318, 0.0035310734463276836, 0.014830508474576272, 0.12227602905569007, 0.012711864406779662, 0.007163034705407587, 0.02320419693301049, 0.025221953188054883, 0.2762308313155771, 0.17453591606133978, 0.11894673123486683, 0.14336158192090395, 0.01059322033898305, 0.003934624697336562, 0.4643866020984665, 0.4643866020984665, 0.1157183212267958, 0.11481033091202583, 0.005246166263115416, 0.2630145278450363, 0.07213478611783697, 0.018361581920903956, 0.12651331719128328, 0.05135189669087974, 0.03188054882970137, 0.5627522195318806, 0.5627522195318806, 0.5635593220338984, 0.7496973365617433, 0.7353712671509282, 0.716908797417272, 0.05357142857142857, 0.29499596448748994, 0.017453591606133977, 0.010189669087974173, 0.09584342211460856, 0.01513317191283293, 0.06830104923325263, 0.04640839386602098, 0.009887005649717515, 0.5771791767554479, 0.006255044390637611, 0.044592413236481034, 0.026432606941081516, 0.09301856335754641, 0.9743744955609362, 0.005548829701372074, 0.034722222222222224, 0.1724137931034483, 0.3115942028985507, 0.012711864406779662, 0.011019283746556474, 0.20722891566265061, 0.12560386473429952, 0.42, 0.0211864406779661, 0.029914529914529916, 0.025423728813559324, 0.2754237288135593, 0.18795180722891566, 0.033734939759036145, 0.2584745762711864, 0.3559322033898305, 0.0211864406779661, 0.3617021276595745, 0.024752475247524754, 0.19491525423728814, 0.011961722488038277, 0.08373205741626795, 0.1467065868263473, 0.2709832134292566, 0.31196581196581197, 0.1440677966101695, 0.05084745762711865, 0.17067307692307693, 0.2245762711864407, 0.09322033898305085, 0.09592326139088729, 0.05084745762711865, 0.211864406779661, 0.1906779661016949, 0.1228813559322034, 0.059322033898305086, 0.06779661016949153, 0.09322033898305085, 0.0211864406779661, 0.22033898305084745, 0.1228813559322034, 0.028846153846153848, 0.21610169491525424, 0.17714285714285713, 0.10602409638554217, 0.13559322033898305, 0.0211864406779661, 0.029661016949152543, 0.16101694915254236, 0.14042553191489363, 0.5602836879432624, 0.46411483253588515, 0.28085106382978725, 0.03389830508474576, 0.15853658536585366, 0.0211864406779661, 0.2385542168674699, 0.13559322033898305, 0.13135593220338984, 0.1864406779661017, 0.11904761904761904, 0.0211864406779661, 0.463519313304721, 0.01694915254237288, 0.043701799485861184, 0.12289156626506025, 0.0211864406779661, 0.13135593220338984, 0.6551724137931034, 0.043859649122807015, 0.1493975903614458, 0.038135593220338986, 0.12295081967213115, 0.01015228426395939, 0.038135593220338986, 0.036057692307692304, 0.08888888888888889, 0.1652542372881356, 0.012711864406779662, 0.09134615384615384, 0.13043478260869565, 0.01927710843373494, 0.038135593220338986, 0.17590361445783131, 0.07203389830508475, 0.03117505995203837, 0.056962025316455694, 0.2222222222222222, 0.011990407673860911, 0.18072289156626506, 0.029661016949152543, 0.011990407673860911, 0.006134969325153374, 0.0847457627118644, 0.13983050847457626, 0.1271186440677966, 0.17266187050359713, 0.145933014354067, 0.05508474576271186, 0.13983050847457626, 0.0963855421686747, 0.008620689655172414, 0.8571428571428571, 0.03597122302158273, 0.1016949152542373, 0.024154589371980676, 0.1271186440677966, 0.20481927710843373, 0.2076271186440678, 0.10843373493975904, 0.13559322033898305, 0.09156626506024096, 0.00423728813559322, 0.023980815347721823, 0.033816425120772944, 0.03389830508474576, 0.17372881355932204, 0.005747126436781609, 0.09322033898305085, 0.08050847457627118, 0.14285714285714285, 0.1059322033898305, 0.05084745762711865, 0.3075060532687651, 0.3106796116504854, 0.21610169491525424, 0.19082125603864733, 0.05542168674698795, 0.11440677966101695, 0.12530120481927712, 0.08050847457627118, 0.14216867469879518, 0.11016949152542373, 0.09745762711864407, 0.046610169491525424, 0.0635593220338983, 0.03827751196172249, 0.3728813559322034, 0.1694915254237288, 0.10628019323671498, 0.178743961352657, 0.019138755980861243, 0.01694915254237288, 0.13983050847457626, 0.17372881355932204, 0.025423728813559324, 0.1694915254237288, 0.05741626794258373, 0.2692307692307692, 0.14698795180722893, 0.1271186440677966, 0.15012106537530268, 0.2542372881355932, 0.1483050847457627, 0.4139194139194139, 0.15677966101694915, 0.24074074074074073, 0.26570048309178745, 0.14903846153846154, 0.17796610169491525, 0.17149758454106281, 0.14285714285714285, 0.19491525423728814, 0.16626506024096385, 0.18220338983050846, 0.1864406779661017, 0.2542372881355932, 0.11961722488038277, 0.025423728813559324, 0.14878048780487804, 0.07004830917874397, 0.18220338983050846, 0.15942028985507245, 0.029661016949152543, 0.07894736842105263, 0.00423728813559322, 0.2584541062801932, 0.21610169491525424, 0.23728813559322035, 0.07627118644067797, 0.16666666666666666, 0.08050847457627118, 0.25217391304347825, 0.38106796116504854, 0.15677966101694915, 0.021739130434782608, 0.11016949152542373, 0.11325301204819277, 0.019184652278177457, 0.0211864406779661, 0.02158273381294964, 0.32340425531914896, 0.01694915254237288, 0.06779661016949153, 0.1493975903614458, 0.1927710843373494, 0.1271186440677966, 0.15319148936170213, 0.09745762711864407, 0.0847457627118644, 0.0425531914893617, 0.02891566265060241, 0.21030042918454936, 0.029661016949152543, 0.3276595744680851, 0.03389830508474576, 0.32051282051282054, 0.24281984334203655, 0.03389830508474576, 0.0423728813559322, 0.3391304347826087, 0.09808612440191387, 0.014354066985645933, 0.04142011834319527, 0.27228915662650605, 0.14698795180722893, 0.0211864406779661, 0.011990407673860911, 0.15254237288135594, 0.06265060240963856, 0.18401937046004843, 0.211864406779661, 0.23728813559322035, 0.1642512077294686, 0.07964601769911504, 0.07203389830508475, 0.26382978723404255, 0.05084745762711865, 0.17372881355932204, 0.046610169491525424, 0.01694915254237288, 0.1927710843373494, 0.012711864406779662, 0.18220338983050846, 0.13135593220338984, 0.15903614457831325, 0.2025862068965517, 0.016867469879518072, 0.20574162679425836, 0.2179176755447942, 0.06779661016949153, 0.1652542372881356, 0.02891566265060241, 0.1440677966101695, 0.18840579710144928, 0.25, 0.046610169491525424, 0.2912621359223301, 0.12560386473429952, 0.16595744680851063, 0.2288135593220339, 0.13636363636363635, 0.19915254237288135, 0.07203389830508475, 0.5454545454545454, 0.2033898305084746, 0.025423728813559324, 0.15942028985507245, 0.4957627118644068, 0.2857142857142857, 0.13135593220338984, 0.18220338983050846, 0.21610169491525424, 0.0847457627118644, 0.059322033898305086, 0.06779661016949153, 0.059322033898305086, 0.059322033898305086, 0.4127659574468085, 0.2584745762711864, 0.14832535885167464, 0.17796610169491525, 0.0847457627118644, 0.03365384615384615, 0.15311004784688995, 0.05995203836930456, 0.012711864406779662, 0.3940677966101695, 0.19915254237288135, 0.2288135593220339, 0.21610169491525424, 0.1694915254237288, 0.37922705314009664, 0.2288135593220339, 0.24152542372881355, 0.2536231884057971, 0.2330508474576271, 0.1694915254237288, 0.13135593220338984, 0.3177966101694915, 0.3702127659574468, 0.29577464788732394, 0.05508474576271186, 0.05084745762711865, 0.15677966101694915, 0.21610169491525424, 0.211864406779661, 0.00847457627118644, 0.21610169491525424, 0.3717948717948718, 0.13734939759036144, 0.16908212560386474, 0.2891566265060241, 0.016786570743405275, 0.01694915254237288, 0.26811594202898553, 0.046610169491525424, 0.012711864406779662, 0.28019323671497587, 0.13701923076923078, 0.014388489208633094, 0.01694915254237288, 0.1569620253164557, 0.22033898305084745, 0.1906779661016949, 0.16101694915254236, 0.19518072289156627, 0.09200968523002422, 0.1440677966101695, 0.01694915254237288, 0.09745762711864407, 0.20531400966183574, 0.3008474576271186, 0.28502415458937197, 0.3140096618357488, 0.1483050847457627, 0.05339805825242718, 0.0635593220338983, 0.10071942446043165, 0.1859903381642512, 0.2330508474576271, 0.012711864406779662, 0.19491525423728814, 0.3300970873786408, 0.23429951690821257, 0.2076271186440678, 0.25, 0.38256658595641646, 0.16101694915254236, 0.014354066985645933, 0.05508474576271186, 0.13559322033898305, 0.0024096385542168677, 0.0211864406779661, 0.3722627737226277, 0.0211864406779661, 0.014598540145985401, 0.13135593220338984, 0.07729468599033816, 0.2711864406779661, 0.20531400966183574, 0.2076271186440678, 0.03389830508474576, 0.08232445520581114, 0.3050847457627119, 0.21610169491525424, 0.1855421686746988, 0.2245762711864407, 0.029661016949152543, 0.1574468085106383, 0.1016949152542373, 0.3005050505050505, 0.15942028985507245, 0.014457831325301205, 0.05783132530120482, 0.2033898305084746, 0.1566265060240964, 0.1435523114355231, 0.13768115942028986, 0.025423728813559324, 0.10843373493975904, 0.2936170212765957, 0.21610169491525424, 0.2542372881355932, 0.32439024390243903, 0.19915254237288135, 0.09420289855072464, 0.024096385542168676, 0.13559322033898305, 0.06779661016949153, 0.30303030303030304, 0.24152542372881355, 0.007177033492822967, 0.025423728813559324, 0.1652542372881356, 0.22033898305084745, 0.18220338983050846, 0.1440677966101695, 0.3220338983050847, 0.05508474576271186, 0.1783132530120482, 0.08373205741626795, 0.04807692307692308, 0.00847457627118644, 0.0635593220338983, 0.1510791366906475, 0.42735042735042733, 0.0851063829787234, 0.11004784688995216, 0.029661016949152543, 0.30638297872340425, 0.06698564593301436, 0.2288135593220339, 0.3008474576271186, 0.22705314009661837, 0.26570048309178745, 0.1906779661016949, 0.059322033898305086, 0.046610169491525424, 0.1652542372881356, 0.16183574879227053, 0.15254237288135594, 0.05084745762711865, 0.1864406779661017, 0.13526570048309178, 0.30917874396135264, 0.20681265206812652, 0.22033898305084745, 0.2542372881355932, 0.00847457627118644, 0.033734939759036145, 0.1570048309178744, 0.11016949152542373, 0.1694915254237288, 0.11961722488038277, 0.2711864406779661, 0.1694915254237288, 0.05339805825242718, 0.2584745762711864, 0.17372881355932204, 0.30193236714975846, 0.11352657004830918, 0.12289156626506025, 0.00423728813559322, 0.19915254237288135, 0.19915254237288135, 0.1059322033898305, 0.19915254237288135, 0.16183574879227053, 0.13983050847457626, 0.2076271186440678, 0.25, 0.0635593220338983, 0.11864406779661017, 0.2457627118644068, 0.16101694915254236, 0.0423728813559322, 0.2033898305084746, 0.028985507246376812, 0.08133971291866028, 0.3, 0.13559322033898305, 0.21204819277108433, 0.025423728813559324, 0.2, 0.012048192771084338, 0.145933014354067, 0.09322033898305085, 0.2754237288135593, 0.038135593220338986, 0.012077294685990338, 0.21204819277108433, 0.18220338983050846, 0.01694915254237288, 0.288135593220339, 0.18840579710144928, 0.1906779661016949, 0.047961630695443645, 0.2288135593220339, 0.11864406779661017, 0.19491525423728814, 0.2796610169491525, 0.19491525423728814, 0.23132530120481928, 0.2584745762711864, 0.17647058823529413, 0.038135593220338986, 0.4, 0.050239234449760764, 0.21256038647342995, 0.41445783132530123, 0.4533898305084746, 0.04578313253012048, 0.038135593220338986, 0.029661016949152543, 0.012711864406779662, 0.2804878048780488, 0.1059322033898305, 0.11510791366906475, 0.3793103448275862, 0.18220338983050846, 0.13559322033898305, 0.03597122302158273, 0.3253588516746411, 0.14457831325301204, 0.00423728813559322, 0.07627118644067797, 0.05084745762711865, 0.012711864406779662, 0.2413793103448276, 0.00847457627118644, 0.13135593220338984, 0.014388489208633094, 0.06971153846153846, 0.03614457831325301, 0.13559322033898305, 0.2096317280453258, 0.16908212560386474, 0.038135593220338986, 0.1497584541062802, 0.1906779661016949, 0.0211864406779661, 0.25, 0.3927710843373494, 0.25, 0.0847457627118644, 0.2923728813559322, 0.20722891566265061, 0.01674641148325359, 0.0673076923076923, 0.19951923076923078, 0.0635593220338983, 0.025423728813559324, 0.029661016949152543, 0.029661016949152543, 0.04600484261501211, 0.011560693641618497, 0.13559322033898305, 0.15180722891566265, 0.17149758454106281, 0.13559322033898305, 0.17391304347826086, 0.038135593220338986, 0.16101694915254236, 0.1864406779661017, 0.3702127659574468, 0.09322033898305085, 0.19491525423728814, 0.01694915254237288, 0.06779661016949153, 0.1059322033898305, 0.021686746987951807, 0.13559322033898305, 0.09420289855072464, 0.16786570743405277, 0.20531400966183574, 0.023923444976076555, 0.05084745762711865, 0.00847457627118644, 0.2033898305084746, 0.15180722891566265, 0.1652542372881356, 0.23557692307692307, 0.0966183574879227, 0.13349514563106796, 0.012048192771084338, 0.038135593220338986, 0.007177033492822967, 0.15384615384615385, 0.1271186440677966, 0.07451923076923077, 0.016867469879518072, 0.05084745762711865, 0.15903614457831325, 0.1694915254237288, 0.2851063829787234, 0.20772946859903382, 0.15319148936170213, 0.2709832134292566, 0.13135593220338984, 0.11864406779661017, 0.18220338983050846, 0.0023923444976076554, 0.038135593220338986, 0.0423728813559322, 0.009592326139088728, 0.01694915254237288, 0.1059322033898305, 0.029661016949152543, 0.17372881355932204, 0.5127118644067796, 0.16028708133971292, 0.04578313253012048, 0.03588516746411483, 0.2936170212765957, 0.2711864406779661, 0.17349397590361446, 0.2385542168674699, 0.34541062801932365, 0.08050847457627118, 0.06521739130434782, 0.07627118644067797, 0.3432203389830508, 0.40294840294840295, 0.06779661016949153, 0.1652542372881356, 0.029661016949152543, 0.25, 0.0211864406779661, 0.2627118644067797, 0.050239234449760764, 0.16326530612244897, 0.13135593220338984, 0.3695652173913043, 0.19854721549636803, 0.1076555023923445, 0.06779661016949153, 0.10551558752997602, 0.06779661016949153, 0.26570048309178745, 0.14251207729468598, 0.0211864406779661, 0.17796610169491525, 0.19491525423728814, 0.012711864406779662, 0.13983050847457626, 0.1826086956521739, 0.1059322033898305, 0.029661016949152543, 0.2076271186440678, 0.03389830508474576, 0.24152542372881355, 0.0847457627118644, 0.19617224880382775, 0.15865384615384615, 0.1228813559322034, 0.17391304347826086, 0.00423728813559322, 0.02891566265060241, 0.23002421307506055, 0.00423728813559322, 0.4658119658119658, 0.050724637681159424, 0.2033898305084746, 0.37872340425531914, 0.20425531914893616, 0.31896551724137934, 0.03110047846889952, 0.0211864406779661, 0.15254237288135594, 0.2106537530266344, 0.2723404255319149, 0.3220338983050847, 0.009569377990430622, 0.3404255319148936, 0.19915254237288135, 0.13701923076923078, 0.3191489361702128, 0.1016949152542373, 0.004784688995215311, 0.16101694915254236, 0.09322033898305085, 0.13875598086124402, 0.0211864406779661, 0.15217391304347827, 0.11352657004830918, 0.2330508474576271, 0.007211538461538462, 0.038135593220338986, 0.014457831325301205, 0.00423728813559322, 0.21980676328502416, 0.1483050847457627, 0.23728813559322035, 0.2385542168674699, 0.30917874396135264, 0.1652542372881356, 0.18220338983050846, 0.014388489208633094, 0.012711864406779662, 0.2754237288135593, 0.15776699029126215, 0.09322033898305085, 0.03132530120481928, 0.10551558752997602, 0.029661016949152543, 0.004784688995215311, 0.24152542372881355, 0.012711864406779662, 0.012711864406779662, 0.2288135593220339, 0.033816425120772944, 0.01201923076923077, 0.05741626794258373, 0.4279661016949153, 0.012106537530266344, 0.15421686746987953, 0.06779661016949153, 0.16101694915254236, 0.3659574468085106, 0.05508474576271186, 0.0635593220338983, 0.2929782082324455, 0.10311750599520383, 0.045563549160671464, 0.1652542372881356, 0.2578616352201258, 0.012711864406779662, 0.025423728813559324, 0.18220338983050846, 0.19491525423728814, 0.21927710843373494, 0.00847457627118644, 0.40487804878048783, 0.1440677966101695, 0.10653753026634383, 0.1016949152542373, 0.03389830508474576, 0.1271186440677966, 0.326271186440678, 0.09322033898305085, 0.01694915254237288, 0.13043478260869565, 0.045454545454545456, 0.10945273631840796, 0.18723404255319148, 0.025423728813559324, 0.043373493975903614, 0.15677966101694915, 0.012711864406779662, 0.4110169491525424, 0.19915254237288135, 0.07203389830508475, 0.19310344827586207, 0.009925558312655087, 0.1829787234042553, 0.2711864406779661, 0.08695652173913043, 0.17307692307692307, 0.19915254237288135, 0.19491525423728814, 0.1570048309178744, 0.11016949152542373, 0.1271186440677966, 0.1108433734939759, 0.002398081534772182, 0.19491525423728814, 0.11440677966101695, 0.025423728813559324, 0.16101694915254236, 0.1108433734939759, 0.15254237288135594, 0.5, 0.15942028985507245, 0.14798206278026907, 0.06569343065693431, 0.1864406779661017, 0.13975903614457832, 0.20048309178743962, 0.046610169491525424, 0.3389830508474576, 0.2680851063829787, 0.0211864406779661, 0.20100502512562815, 0.11864406779661017, 0.4703389830508475, 0.09090909090909091, 0.17149758454106281, 0.1473429951690821, 0.1864406779661017, 0.23728813559322035, 0.4576271186440678, 0.24087591240875914, 0.011961722488038277, 0.1440677966101695, 0.11440677966101695, 0.03389830508474576, 0.1059322033898305, 0.21014492753623187, 0.04066985645933014, 0.16101694915254236, 0.17372881355932204, 0.1642512077294686, 0.1906779661016949, 0.01694915254237288, 0.012048192771084338, 0.059322033898305086, 0.22033898305084745, 0.16105769230769232, 0.00423728813559322, 0.05980861244019139, 0.10411622276029056, 0.00847457627118644, 0.2146341463414634, 0.15677966101694915, 0.2542372881355932, 0.19743589743589743, 0.16101694915254236, 0.12771084337349398, 0.27951807228915665, 0.19915254237288135, 0.11864406779661017, 0.18055555555555555, 0.012711864406779662, 0.024752475247524754, 0.09315068493150686, 0.12835820895522387, 0.22033898305084745, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.06776859504132231, 0.06776859504132231, 0.06776859504132231, 0.06776859504132231, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.04462809917355372, 0.04462809917355372, 0.04462809917355372, 0.04462809917355372, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.1768595041322314, 0.003305785123966942, 0.1768595041322314, 0.1768595041322314, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.0049586776859504135, 0.001652892561983471, 0.0049586776859504135, 0.0049586776859504135, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.06115702479338843, 0.06115702479338843, 0.06115702479338843, 0.06115702479338843, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.05950413223140496, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.011570247933884297, 0.008264462809917356, 0.011570247933884297, 0.011570247933884297, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.01818181818181818, 0.01818181818181818, 0.01818181818181818, 0.01818181818181818, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.1669421487603306, 0.1669421487603306, 0.1669421487603306, 0.1669421487603306, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.009917355371900827, 0.009917355371900827, 0.009917355371900827, 0.009917355371900827, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.01818181818181818, 0.01818181818181818, 0.01818181818181818, 0.01818181818181818, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.07768595041322314, 0.07768595041322314, 0.07768595041322314, 0.07768595041322314, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.024793388429752067, 0.024793388429752067, 0.024793388429752067, 0.024793388429752067, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.019834710743801654, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.02975206611570248, 0.02975206611570248, 0.02975206611570248, 0.02975206611570248, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03636363636363636, 0.019834710743801654, 0.03636363636363636, 0.03636363636363636, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.008264462809917356, 0.006611570247933884, 0.008264462809917356, 0.008264462809917356, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03471074380165289, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.03305785123966942, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.009917355371900827, 0.009917355371900827, 0.009917355371900827, 0.009917355371900827, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.04297520661157025, 0.04132231404958678, 0.04297520661157025, 0.04297520661157025, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01818181818181818, 0.01818181818181818, 0.01818181818181818, 0.01818181818181818, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.023140495867768594, 0.01818181818181818, 0.023140495867768594, 0.023140495867768594, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.009917355371900827, 0.009917355371900827, 0.009917355371900827, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.009917355371900827, 0.009917355371900827, 0.009917355371900827, 0.009917355371900827, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.0049586776859504135, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.01652892561983471, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.05619834710743802, 0.05619834710743802, 0.05619834710743802, 0.05619834710743802, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.02809917355371901, 0.021487603305785124, 0.02809917355371901, 0.02809917355371901, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0049586776859504135, 0.003305785123966942, 0.0049586776859504135, 0.0049586776859504135, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.023140495867768594, 0.001652892561983471, 0.023140495867768594, 0.023140495867768594, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.08925619834710743, 0.08925619834710743, 0.08925619834710743, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.013223140495867768, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.006611570247933884, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.008264462809917356, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.047933884297520664, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04628099173553719, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.0512396694214876, 0.04297520661157025, 0.04297520661157025, 0.04297520661157025, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.019834710743801654, 0.0049586776859504135, 0.019834710743801654, 0.019834710743801654, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.07107438016528926, 0.07107438016528926, 0.07107438016528926, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.003305785123966942, 0.003305785123966942, 0.003305785123966942, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.011570247933884297, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.3090909090909091, 0.0049586776859504135, 0.3090909090909091, 0.3090909090909091, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.021487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.01487603305785124, 0.001652892561983471, 0.001652892561983471, 0.001652892561983471, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289, 0.03471074380165289], "nan_length_distribution": {"1": 526636, "4": 4493, "2": 71450, "3": 25422, "1129": 4, "6": 1761, "1131": 2, "1118": 4, "2914": 2, "5191": 2, "3409": 1, "7720": 2, "4311": 1, "2860": 2, "6557": 6, "4129": 1, "7611": 1, "3482": 1, "15456": 22, "4564": 2, "6870": 2, "6556": 6, "2639": 1, "7529": 1, "4890": 1, "4855": 4, "1133": 4, "5189": 2, "4253": 1, "7689": 1, "3436": 1, "1143": 2, "4569": 1, "7687": 1, "3118": 1, "6891": 4, "2863": 2, "4619": 2, "3991": 2, "342": 2, "4296": 2, "4572": 1, "7703": 1, "3131": 1, "307": 2, "6871": 2, "4629": 2, "1135": 4, "4617": 2, "2851": 2, "4894": 2, "4618": 2, "1137": 4, "6602": 2, "6873": 2, "6872": 2, "2018": 4, "4849": 2, "844": 2, "305": 3, "5495": 1, "5190": 1, "6869": 2, "1140": 4, "1142": 2, "1138": 5, "2872": 1, "4630": 1, "1758": 1, "6917": 2, "2009": 2, "4878": 2, "6886": 2, "2277": 4, "4573": 1, "7705": 1, "3132": 1, "2861": 1, "4859": 1, "4906": 2, "5804": 1, "898": 2, "2562": 2, "3475": 2, "4908": 1, "5802": 1, "894": 2, "1153": 2, "6632": 2, "5712": 2, "6568": 2, "273": 2, "4634": 2, "3135": 1, "6896": 1, "3761": 1, "2006": 2, "3420": 2, "1130": 3, "355": 3, "1132": 2, "306": 3, "7463": 2, "816": 2, "2839": 1, "7723": 1, "4884": 1, "4599": 1, "7714": 1, "3115": 1, "251": 6, "7681": 1, "7375": 1, "4615": 2, "4903": 1, "5801": 1, "1141": 2, "5800": 1, "4567": 2, "4852": 2, "94": 14, "93": 6, "243": 2, "364": 6, "284": 1, "575": 1, "291": 1, "239": 5, "547": 11, "344": 1, "566": 4, "222": 1, "1154": 6, "381": 7, "573": 12, "220": 1, "559": 2, "339": 2, "405": 10, "95": 32, "218": 1, "192": 2, "574": 9, "385": 8, "333": 2, "29": 462, "358": 2, "193": 4, "26": 52, "318": 7, "238": 2, "340": 2, "550": 2, "168": 190, "404": 2, "390": 1, "240": 1, "78": 4, "338": 2, "190": 4, "576": 2, "337": 1, "409": 4, "416": 2, "7": 2117, "214": 2, "221": 2, "415": 2, "28": 32, "485": 2, "408": 2, "548": 2, "23": 63, "261": 1, "506": 1, "245": 1, "167": 3, "285": 3, "30": 460, "554": 2, "68": 4, "237": 2, "383": 1, "191": 3, "21": 98, "572": 1, "546": 3, "4281": 6, "377": 434, "367": 430, "10": 942, "6848": 2, "4288": 4, "8": 707, "457": 2, "4190": 2, "4202": 2, "4205": 4, "3089": 2, "4146": 2, "4187": 2, "4285": 2, "6227": 10, "2596": 2, "3039": 2, "4201": 4, "424": 2, "4283": 4, "4204": 2, "1334": 2, "4284": 4, "6155": 2, "4869": 4, "4286": 4, "4282": 4, "3357": 2, "6754": 2, "4287": 2, "6507": 4, "4203": 2, "4206": 2, "4200": 2, "406": 2, "4188": 2, "6820": 2, "4289": 2, "6468": 2, "6505": 2, "3048": 2, "6844": 2, "4198": 2, "2327": 2, "6797": 2, "356": 14, "570": 4, "357": 18, "38": 32, "1152": 17, "349": 6, "350": 18, "257": 2, "345": 2, "348": 2, "518": 10, "216": 2, "253": 2, "35": 37, "111": 3, "512": 2, "279": 2, "562": 3, "542": 6, "33": 26, "568": 2, "539": 2, "254": 2, "13": 336, "15": 144, "14": 164, "35040": 160, "93272": 1, "85018": 1, "43628": 1, "82521": 1, "65817": 1, "53722": 1, "96624": 1, "94718": 1, "73095": 1, "106464": 11, "109824": 2, "109248": 1, "52513": 1, "112096": 1, "70176": 3, "63902": 1, "35681": 1, "108480": 1, "5038": 1, "93929": 1, "51648": 1, "75744": 1, "42720": 1, "53952": 1, "124224": 1, "104064": 1, "111264": 1, "54336": 1, "86592": 1, "95948": 1, "86880": 1, "64992": 1, "9216": 1, "102048": 1, "103008": 1, "57024": 1, "93312": 1, "57556": 1, "106752": 1, "365": 160, "971": 1, "885": 1, "454": 1, "859": 1, "685": 1, "1006": 1, "986": 1, "761": 1, "1109": 11, "1144": 2, "1167": 1, "731": 3, "665": 1, "371": 1, "52": 172, "978": 1, "538": 1, "789": 1, "445": 1, "1281": 1, "1084": 1, "1159": 1, "902": 1, "999": 1, "905": 1, "677": 1, "96": 2, "1063": 1, "1073": 1, "594": 3, "972": 2, "599": 1, "1112": 1, "8760": 160, "23318": 1, "21254": 1, "10907": 1, "20630": 1, "16454": 1, "13430": 1, "24156": 1, "23679": 1, "18274": 1, "26616": 11, "27456": 2, "27312": 1, "13128": 1, "28024": 1, "17544": 3, "15975": 1, "8920": 1, "27120": 1, "1259": 1, "23482": 1, "12912": 1, "18936": 1, "10680": 1, "13488": 1, "31056": 1, "26016": 1, "27816": 1, "13584": 1, "21648": 1, "23987": 1, "21720": 1, "16248": 1, "2304": 1, "25512": 1, "25752": 1, "14256": 1, "23328": 1, "14389": 1, "26688": 1, "138": 5, "126": 1, "64": 3, "122": 3, "97": 4, "79": 2, "143": 1, "140": 1, "108": 2, "158": 12, "163": 3, "162": 2, "166": 2, "104": 3, "53": 9, "161": 2, "139": 1, "76": 2, "112": 1, "63": 2, "80": 9, "176": 7, "154": 1, "165": 35, "128": 1, "142": 1, "129": 3, "151": 1, "153": 3, "84": 4, "85": 8, "9": 923, "24": 84, "81": 9, "31": 25, "88": 8, "5": 2338, "70": 6, "75": 9, "17": 126, "19": 101, "32": 18, "37": 208, "18": 308, "12": 487, "34": 21, "43": 13, "39": 27, "11": 285, "20": 109, "183": 4, "247": 9, "83": 9, "25": 52, "354": 1, "16": 388, "246": 151, "44": 17, "91": 5, "22": 101, "46": 18, "613": 32, "42": 20, "116": 3, "40": 13, "92": 1, "50": 13, "58": 7, "45": 14, "55": 5, "614": 1, "60": 3, "236": 1, "74": 4, "27": 22, "59": 7, "172": 1, "48": 12, "90": 3, "51": 7, "89": 3, "41": 11, "57": 4, "54": 13, "36": 18, "199": 1, "66": 13, "61": 7, "125": 1, "1981": 6, "102": 1, "49": 10, "114": 1, "56": 11, "103": 1, "77": 9, "47": 22, "120": 1, "72": 2, "248": 5, "169": 3, "73": 2, "772": 1, "249": 1, "2148": 5, "117": 2, "65": 4, "86": 2, "2278": 1, "268": 1, "255": 1, "171": 1, "1723": 1, "1824": 1, "1823": 4, "144": 3, "189": 1, "69": 4, "62": 2, "67": 6, "174": 2, "175": 1, "878": 6, "100": 3, "211": 1, "71": 7, "232": 6, "123": 3, "145": 1, "441": 4, "87": 3, "478": 1, "808": 1, "272": 1, "99": 1, "2053": 6, "173": 1, "106": 3, "187": 1, "146": 2, "653": 1, "124": 2, "353": 1, "397": 1, "758": 1, "676": 1, "101": 2, "196": 2, "118": 1, "812": 1, "437": 1, "159": 3, "1956": 1, "1306": 1, "1056": 1, "720": 1, "149": 1, "119": 1, "4598": 2, "107": 3, "134": 2, "205": 2, "224": 1, "133": 1, "590": 1, "267": 2, "132": 1, "964": 1, "105": 4, "202": 1, "5567": 4, "177": 1, "7098": 2, "203": 1, "6856": 1, "206": 1, "2019": 1, "219": 1, "131": 1, "282": 1, "635": 1, "135": 1, "178": 1, "115": 2, "201": 1, "443": 1, "9658": 1}} \ No newline at end of file diff --git a/examples/generate_synthetic_data.py b/examples/generate_synthetic_data.py new file mode 100644 index 0000000000000000000000000000000000000000..a3d09249ad5ed2d07829809b1eebbd7978e6423e --- /dev/null +++ b/examples/generate_synthetic_data.py @@ -0,0 +1,204 @@ +import logging +import os +from typing import List, Optional + +import torch + +from src.data.containers import BatchTimeSeriesContainer +from src.data.utils import sample_future_length +from src.plotting.plot_timeseries import plot_from_container +from src.synthetic_generation.anomalies.anomaly_generator_wrapper import ( + AnomalyGeneratorWrapper, +) +from src.synthetic_generation.cauker.cauker_generator_wrapper import ( + CauKerGeneratorWrapper, +) +from src.synthetic_generation.forecast_pfn_prior.forecast_pfn_generator_wrapper import ( + ForecastPFNGeneratorWrapper, +) +from src.synthetic_generation.generator_params import ( + AnomalyGeneratorParams, + CauKerGeneratorParams, + FinancialVolatilityAudioParams, + ForecastPFNGeneratorParams, + GPGeneratorParams, + KernelGeneratorParams, + MultiScaleFractalAudioParams, + NetworkTopologyAudioParams, + OrnsteinUhlenbeckProcessGeneratorParams, + SawToothGeneratorParams, + SineWaveGeneratorParams, + SpikesGeneratorParams, + StepGeneratorParams, + StochasticRhythmAudioParams, +) +from src.synthetic_generation.gp_prior.gp_generator_wrapper import GPGeneratorWrapper +from src.synthetic_generation.kernel_synth.kernel_generator_wrapper import ( + KernelGeneratorWrapper, +) +from src.synthetic_generation.ornstein_uhlenbeck_process.ou_generator_wrapper import ( + OrnsteinUhlenbeckProcessGeneratorWrapper, +) +from src.synthetic_generation.sawtooth.sawtooth_generator_wrapper import ( + SawToothGeneratorWrapper, +) +from src.synthetic_generation.sine_waves.sine_wave_generator_wrapper import ( + SineWaveGeneratorWrapper, +) +from src.synthetic_generation.spikes.spikes_generator_wrapper import ( + SpikesGeneratorWrapper, +) +from src.synthetic_generation.steps.step_generator_wrapper import StepGeneratorWrapper + +PYO_AVAILABLE = True +try: + import pyo # requires portaudio to be installed +except (ImportError, OSError): + PYO_AVAILABLE = False +else: + from src.synthetic_generation.audio_generators.financial_volatility_wrapper import ( + FinancialVolatilityAudioWrapper, + ) + from src.synthetic_generation.audio_generators.multi_scale_fractal_wrapper import ( + MultiScaleFractalAudioWrapper, + ) + from src.synthetic_generation.audio_generators.network_topology_wrapper import ( + NetworkTopologyAudioWrapper, + ) + from src.synthetic_generation.audio_generators.stochastic_rhythm_wrapper import ( + StochasticRhythmAudioWrapper, + ) + +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger(__name__) + + +def visualize_batch_sample( + generator, + batch_size: int = 8, + output_dir: str = "outputs/plots", + sample_idx: Optional[int] = None, + prefix: str = "", + seed: Optional[int] = None, +) -> None: + os.makedirs(output_dir, exist_ok=True) + name = generator.__class__.__name__ + logger.info(f"[{name}] Generating batch of size {batch_size}") + + batch = generator.generate_batch(batch_size=batch_size, seed=seed) + values = torch.from_numpy(batch.values) + if values.ndim == 2: + values = values.unsqueeze(-1) + + future_length = sample_future_length(range="gift_eval") + history_values = values[:, :-future_length, :] + future_values = values[:, -future_length:, :] + + container = BatchTimeSeriesContainer( + history_values=history_values, + future_values=future_values, + start=batch.start, + frequency=batch.frequency, + ) + + indices = [sample_idx] if sample_idx is not None else range(batch_size) + for i in indices: + filename = ( + f"{prefix}_{name.lower().replace('generatorwrapper', '')}_sample_{i}.png" + ) + output_file = os.path.join(output_dir, filename) + title = f"{prefix.capitalize()} {name.replace('GeneratorWrapper', '')} Synthetic Series (Sample {i})" + plot_from_container( + container, sample_idx=i, output_file=output_file, show=False, title=title + ) + logger.info(f"[{name}] Saved plot to {output_file}") + + +def generator_factory(global_seed: int, total_length: int) -> List: + generators = [ + KernelGeneratorWrapper( + KernelGeneratorParams(global_seed=global_seed, length=total_length) + ), + GPGeneratorWrapper( + GPGeneratorParams(global_seed=global_seed, length=total_length) + ), + ForecastPFNGeneratorWrapper( + ForecastPFNGeneratorParams(global_seed=global_seed, length=total_length) + ), + SineWaveGeneratorWrapper( + SineWaveGeneratorParams(global_seed=global_seed, length=total_length) + ), + SawToothGeneratorWrapper( + SawToothGeneratorParams(global_seed=global_seed, length=total_length) + ), + StepGeneratorWrapper( + StepGeneratorParams(global_seed=global_seed, length=total_length) + ), + AnomalyGeneratorWrapper( + AnomalyGeneratorParams(global_seed=global_seed, length=total_length) + ), + SpikesGeneratorWrapper( + SpikesGeneratorParams(global_seed=global_seed, length=total_length) + ), + CauKerGeneratorWrapper( + CauKerGeneratorParams( + global_seed=global_seed, length=total_length, num_channels=5 + ) + ), + OrnsteinUhlenbeckProcessGeneratorWrapper( + OrnsteinUhlenbeckProcessGeneratorParams( + global_seed=global_seed, length=total_length + ) + ), + ] + + if PYO_AVAILABLE: + generators.extend( + [ + StochasticRhythmAudioWrapper( + StochasticRhythmAudioParams( + global_seed=global_seed, length=total_length + ) + ), + FinancialVolatilityAudioWrapper( + FinancialVolatilityAudioParams( + global_seed=global_seed, length=total_length + ) + ), + MultiScaleFractalAudioWrapper( + MultiScaleFractalAudioParams( + global_seed=global_seed, length=total_length + ) + ), + NetworkTopologyAudioWrapper( + NetworkTopologyAudioParams( + global_seed=global_seed, length=total_length + ) + ), + ] + ) + else: + logger.warning("Audio generators skipped (pyo not available)") + + return generators + + +if __name__ == "__main__": + batch_size = 2 + total_length = 2048 + output_dir = "outputs/plots" + global_seed = 2025 + + logger.info(f"Saving plots to {output_dir}") + + for gen in generator_factory(global_seed, total_length): + prefix = "multivariate" if getattr(gen.params, "num_channels", 1) > 1 else "" + visualize_batch_sample( + gen, + batch_size=batch_size, + output_dir=output_dir, + prefix=prefix, + seed=global_seed, + ) diff --git a/examples/gift_eval/gift_eval_runner.py b/examples/gift_eval/gift_eval_runner.py new file mode 100755 index 0000000000000000000000000000000000000000..fdee50eca14338c2e3c762a1a96d7ec1e57726e3 --- /dev/null +++ b/examples/gift_eval/gift_eval_runner.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python +""" +GIFT-Eval Runner Script + +This script evaluates the Time Series model on GIFT-Eval datasets using the `src/gift_eval` pipeline. + +- Uses `src/gift_eval/data.py` for dataset handling. +- Uses `src/gift_eval/predictor.TimeSeriesPredictor` for inference. +- Loads a model from a checkpoint. +- Writes per-dataset CSV metrics to `output_dir` without creating plots. +""" + +import argparse +import logging +from pathlib import Path +from typing import List, Optional + +from examples.utils import download_checkpoint_if_needed +from src.gift_eval.constants import ALL_DATASETS +from src.gift_eval.evaluate import evaluate_datasets +from src.gift_eval.predictor import TimeSeriesPredictor +from src.gift_eval.results import aggregate_results, write_results_to_disk + + +# Configure logging +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logging.getLogger("matplotlib").setLevel(logging.WARNING) +logging.getLogger("matplotlib.font_manager").setLevel(logging.WARNING) +logger = logging.getLogger("gift_eval_runner") + + +def _expand_datasets_arg(datasets_arg: List[str] | str) -> List[str]: + """Expand dataset argument to list of dataset names.""" + if isinstance(datasets_arg, str): + if datasets_arg == "all": + return list(ALL_DATASETS) + datasets_list = [datasets_arg] + else: + datasets_list = datasets_arg + if datasets_list and datasets_list[0] == "all": + return list(ALL_DATASETS) + + for ds in datasets_list: + if ds not in ALL_DATASETS: + raise ValueError(f"Invalid dataset: {ds}. Use one of {ALL_DATASETS}") + return datasets_list + + +def run_evaluation( + predictor: TimeSeriesPredictor, + datasets_arg: List[str] | str, + terms_arg: List[str], + dataset_storage_path: str, + max_windows_arg: Optional[int], + batch_size_arg: int, + max_context_length_arg: Optional[int], + output_dir_arg: str, + model_name_arg: str, + after_each_dataset_flush: bool = True, +) -> None: + """Run evaluation on specified datasets.""" + datasets_to_run = _expand_datasets_arg(datasets_arg) + results_root = Path(output_dir_arg) + + for ds_name in datasets_to_run: + items = evaluate_datasets( + predictor=predictor, + dataset=ds_name, + dataset_storage_path=dataset_storage_path, + terms=terms_arg, + max_windows=max_windows_arg, + batch_size=batch_size_arg, + max_context_length=max_context_length_arg, + create_plots=False, + max_plots_per_dataset=0, + ) + write_results_to_disk( + items=items, + dataset_name=ds_name, + output_dir=results_root, + model_name=model_name_arg, + create_plots=False, + ) + if after_each_dataset_flush: + logger.info("Flushed results for %s", ds_name) + + +def main(): + """Main execution function.""" + parser = argparse.ArgumentParser( + description="GIFT-Eval Runner: Evaluate TimeSeriesModel on GIFT-Eval datasets" + ) + + # Model configuration + parser.add_argument( + "--model_path", + type=str, + default=None, + help="Path to model checkpoint. If not provided, will download from checkpoint_url.", + ) + parser.add_argument( + "--config_path", + type=str, + default="configs/example.yaml", + help="Path to model config YAML (default: configs/example.yaml)", + ) + parser.add_argument( + "--checkpoint_url", + type=str, + default="https://www.dropbox.com/scl/fi/mqsni5lehooyaw93y3uzq/checkpoint_38M.pth?rlkey=3uyehvmtted02xkha24zgpzb6&st=seevsbkn&dl=0", + help="URL to download checkpoint from if model_path is not provided", + ) + parser.add_argument( + "--download_dir", + type=str, + default="models", + help="Directory to download checkpoint to (default: models)", + ) + + # Dataset configuration + parser.add_argument( + "--datasets", + type=str, + nargs="+", + default=["all"], + help='List of dataset names or ["all"] (default: all)', + ) + parser.add_argument( + "--terms", + type=str, + nargs="+", + default=["short", "medium", "long"], + help="Prediction terms to evaluate (default: short medium long)", + ) + parser.add_argument( + "--dataset_storage_path", + type=str, + default="/work/dlclarge2/moroshav-GiftEvalPretrain/gift_eval", + # required=True, + help="Path to the root of the gift eval datasets storage directory", + ) + parser.add_argument( + "--max_windows", + type=int, + default=20, + help="Maximum number of windows to use for evaluation (default: 20)", + ) + + # Inference configuration + parser.add_argument( + "--batch_size", + type=int, + default=64, + help="Batch size for inference (default: 128)", + ) + parser.add_argument( + "--max_context_length", + type=int, + default=3072, + help="Maximum context length (default: 3072)", + ) + + # Output configuration + parser.add_argument( + "--output_dir", + type=str, + default="gift_eval_results", + help="Output directory for results (default: gift_eval_results)", + ) + parser.add_argument( + "--model_name", + type=str, + default="TempoPFN", + help="Model name identifier for results (default: TempoPFN)", + ) + parser.add_argument( + "--no_flush", + action="store_true", + help="Disable flushing results after each dataset", + ) + + args = parser.parse_args() + + # Resolve paths + config_path = Path(args.config_path) + download_dir = Path(args.download_dir) + output_dir = Path(args.output_dir) + + # Determine model path + resolved_model_path = None + if args.model_path: + resolved_model_path = args.model_path + elif args.checkpoint_url: + resolved_model_path = download_checkpoint_if_needed( + args.checkpoint_url, target_dir=download_dir + ) + + if not resolved_model_path: + raise FileNotFoundError( + "No model checkpoint provided. Set --model_path or --checkpoint_url." + ) + + if not config_path.exists(): + raise FileNotFoundError(f"Config not found: {config_path}") + + logger.info("Loading predictor from checkpoint: %s", resolved_model_path) + predictor = TimeSeriesPredictor.from_paths( + model_path=resolved_model_path, + config_path=str(config_path), + ds_prediction_length=1, # placeholder; set per dataset + ds_freq="D", # placeholder; set per dataset + batch_size=args.batch_size, + max_context_length=args.max_context_length, + ) + + logger.info("Starting evaluation...") + logger.info(" Datasets: %s", args.datasets) + logger.info(" Terms: %s", args.terms) + logger.info(" Output directory: %s", output_dir) + + # Run evaluation + run_evaluation( + predictor=predictor, + datasets_arg=args.datasets, + terms_arg=args.terms, + dataset_storage_path=args.dataset_storage_path, + max_windows_arg=args.max_windows, + batch_size_arg=args.batch_size, + max_context_length_arg=args.max_context_length, + output_dir_arg=str(output_dir), + model_name_arg=args.model_name, + after_each_dataset_flush=not args.no_flush, + ) + + logger.info("Evaluation complete. See results under: %s", output_dir) + + # Aggregate all results into a single CSV file + logger.info("Aggregating results from all datasets...") + combined_df = aggregate_results(result_root_dir=output_dir) + + if combined_df is not None: + logger.info("Successfully created aggregated results file: %s/all_results.csv", output_dir) + else: + logger.warning("No results to aggregate. Check that evaluation completed successfully.") + + +if __name__ == "__main__": + main() + diff --git a/examples/gift_eval/gift_eval_submission.ipynb b/examples/gift_eval/gift_eval_submission.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..ba4d22c9b22fd9874862515882e88b3460196d09 --- /dev/null +++ b/examples/gift_eval/gift_eval_submission.ipynb @@ -0,0 +1,1439 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e8a9f0b1", + "metadata": {}, + "source": [ + "# Running TempoPFN on GIFT-Eval Benchmark\n", + "\n", + "This notebook evaluates the **TempoPFN** model on the GIFT-Eval benchmark. \n", + "\n", + "Make sure you download the gift-eval benchmark and set the `GIFT_EVAL_DATASET_STORAGE_PATH` environment variable correctly before running this notebook." + ] + }, + { + "cell_type": "markdown", + "id": "f1d2e3c4", + "metadata": {}, + "source": [ + "## 1. Setup and Dependencies\n", + "\n", + "First, install the required packages. \n", + "\n", + "**Note:** This notebook assumes that the core `TempoPFN` model code (e.g., `src.models.model`, `src.data.containers`) and dependencies are installed as a Python package or are otherwise available in the `PYTHONPATH`." + ] + }, + { + "cell_type": "markdown", + "id": "b9c8d7e6", + "metadata": {}, + "source": [ + "## 2. Imports\n", + "\n", + "Import all necessary libraries. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c7d8e9f0", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "import logging\n", + "import os\n", + "import math\n", + "import csv\n", + "import glob\n", + "import argparse\n", + "import warnings\n", + "import yaml\n", + "from pathlib import Path\n", + "from typing import List, Optional, Dict, Tuple, Union, Iterator, Iterable, Any\n", + "from functools import cached_property\n", + "from enum import Enum\n", + "from dataclasses import dataclass\n", + "\n", + "import pandas as pd\n", + "import numpy as np\n", + "import torch\n", + "from torch.nn.parallel import DistributedDataParallel as DDP\n", + "from dotenv import load_dotenv\n", + "\n", + "# GluonTS and Data Handling\n", + "import datasets\n", + "import pyarrow.compute as pc\n", + "from gluonts.dataset import DataEntry\n", + "from gluonts.dataset.common import ProcessDataEntry\n", + "from gluonts.dataset.split import TestData, TrainingDataset, split\n", + "from gluonts.itertools import Map\n", + "from gluonts.time_feature import norm_freq_str, get_seasonality\n", + "from gluonts.transform import Transformation\n", + "from pandas.tseries.frequencies import to_offset\n", + "from toolz import compose\n", + "\n", + "# GluonTS Evaluation\n", + "from gluonts.ev.metrics import (\n", + " MAE,\n", + " MAPE,\n", + " MASE,\n", + " MSE,\n", + " MSIS,\n", + " ND,\n", + " NRMSE,\n", + " RMSE,\n", + " SMAPE,\n", + " MeanWeightedSumQuantileLoss,\n", + ")\n", + "from gluonts.model.evaluation import evaluate_model\n", + "from gluonts.model.forecast import QuantileForecast\n", + "from gluonts.model.predictor import Predictor\n", + "\n", + "# Plotting and Warnings\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from linear_operator.utils.cholesky import NumericalWarning\n", + "\n", + "# --- TempoPFN Core Model Imports ---\n", + "# These are assumed to be installed or in the PYTHONPATH\n", + "from src.data.containers import BatchTimeSeriesContainer\n", + "from src.data.frequency import parse_frequency\n", + "from src.data.scalers import RobustScaler\n", + "from src.models.model import TimeSeriesModel\n", + "from src.utils.utils import device\n", + "\n", + "# --- Setup Logging ---\n", + "logging.basicConfig(level=logging.INFO, format=\"%(asctime)s - %(levelname)s - %(message)s\")\n", + "logging.getLogger(\"matplotlib\").setLevel(logging.WARNING)\n", + "logging.getLogger(\"matplotlib.font_manager\").setLevel(logging.WARNING)\n", + "logging.getLogger(\"PIL\").setLevel(logging.WARNING)\n", + "logger = logging.getLogger(\"gift_eval_runner\")\n", + "\n", + "# Filter out specific gluonts warnings\n", + "class WarningFilter(logging.Filter):\n", + " def __init__(self, text_to_filter: str) -> None:\n", + " super().__init__()\n", + " self.text_to_filter = text_to_filter\n", + "\n", + " def filter(self, record: logging.LogRecord) -> bool:\n", + " return self.text_to_filter not in record.getMessage()\n", + "\n", + "gts_logger = logging.getLogger(\"gluonts.model.forecast\")\n", + "gts_logger.addFilter(\n", + " WarningFilter(\"The mean prediction is not stored in the forecast data\")\n", + ")\n", + "\n", + "# Filter out numerical warnings\n", + "warnings.filterwarnings(\"ignore\", category=NumericalWarning)\n", + "warnings.filterwarnings(\"ignore\", category=FutureWarning)\n", + "warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n", + "\n", + "# Load environment variables (e.g., GIFT_EVAL_DATASET_STORAGE_PATH)\n", + "load_dotenv()" + ] + }, + { + "cell_type": "markdown", + "id": "d6e7f8a1", + "metadata": {}, + "source": [ + "## 3. Constants and Configuration\n", + "\n", + "Define dataset lists, metrics, and other constants following GIFT-Eval standards." + ] + }, + { + "cell_type": "markdown", + "id": "g4h5j6k7", + "metadata": {}, + "source": [ + "### 3.1. Constants " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "h5j6k7l8", + "metadata": {}, + "outputs": [], + "source": [ + "# Environment setup\n", + "os.environ[\"CUBLAS_WORKSPACE_CONFIG\"] = \":4096:8\"\n", + "\n", + "# Use absolute path relative to the project root\n", + "_MODULE_DIR = Path.cwd().parent.parent # Assumes notebook is in `examples/gift_eval/`\n", + "DATASET_PROPERTIES_PATH = _MODULE_DIR / \"data\" / \"dataset_properties.json\"\n", + "\n", + "try:\n", + " with open(DATASET_PROPERTIES_PATH, \"r\") as f:\n", + " DATASET_PROPERTIES = json.load(f)\n", + "except Exception as exc: # pragma: no cover - logging path\n", + " DATASET_PROPERTIES = {}\n", + " logger.warning(\n", + " \"Could not load dataset properties from %s: %s. Domain and num_variates will fall back to defaults.\",\n", + " DATASET_PROPERTIES_PATH,\n", + " exc,\n", + " )\n", + "\n", + "# Datasets\n", + "SHORT_DATASETS = (\n", + " \"m4_yearly\",\n", + " \"m4_quarterly\",\n", + " \"m4_monthly\",\n", + " \"m4_weekly\",\n", + " \"m4_daily\",\n", + " \"m4_hourly\",\n", + " \"electricity/15T\",\n", + " \"electricity/H\",\n", + " \"electricity/D\",\n", + " \"electricity/W\",\n", + " \"solar/10T\",\n", + " \"solar/H\",\n", + " \"solar/D\",\n", + " \"solar/W\",\n", + " \"hospital\",\n", + " \"covid_deaths\",\n", + " \"us_births/D\",\n", + " \"us_births/M\",\n", + " \"us_births/W\",\n", + " \"saugeenday/D\",\n", + " \"saugeenday/M\",\n", + " \"saugeenday/W\",\n", + " \"temperature_rain_with_missing\",\n", + " \"kdd_cup_2018_with_missing/H\",\n", + " \"kdd_cup_2018_with_missing/D\",\n", + " \"car_parts_with_missing\",\n", + " \"restaurant\",\n", + " \"hierarchical_sales/D\",\n", + " \"hierarchical_sales/W\",\n", + " \"LOOP_SEATTLE/5T\",\n", + " \"LOOP_SEATTLE/H\",\n", + " \"LOOP_SEATTLE/D\",\n", + " \"SZ_TAXI/15T\",\n", + " \"SZ_TAXI/H\",\n", + " \"M_DENSE/H\",\n", + " \"M_DENSE/D\",\n", + " \"ett1/15T\",\n", + " \"ett1/H\",\n", + " \"ett1/D\",\n", + " \"ett1/W\",\n", + " \"ett2/15T\",\n", + " \"ett2/H\",\n", + " \"ett2/D\",\n", + " \"ett2/W\",\n", + " \"jena_weather/10T\",\n", + " \"jena_weather/H\",\n", + " \"jena_weather/D\",\n", + " \"bitbrains_fast_storage/5T\",\n", + " \"bitbrains_fast_storage/H\",\n", + " \"bitbrains_rnd/5T\",\n", + " \"bitbrains_rnd/H\",\n", + " \"bizitobs_application\",\n", + " \"bizitobs_service\",\n", + " \"bizitobs_l2c/5T\",\n", + " \"bizitobs_l2c/H\",\n", + ")\n", + "\n", + "MED_LONG_DATASETS = (\n", + " \"electricity/15T\",\n", + " \"electricity/H\",\n", + " \"solar/10T\",\n", + " \"solar/H\",\n", + " \"kdd_cup_2018_with_missing/H\",\n", + " \"LOOP_SEATTLE/5T\",\n", + " \"LOOP_SEATTLE/H\",\n", + " \"SZ_TAXI/15T\",\n", + " \"M_DENSE/H\",\n", + " \"ett1/15T\",\n", + " \"ett1/H\",\n", + " \"ett2/15T\",\n", + " \"ett2/H\",\n", + " \"jena_weather/10T\",\n", + " \"jena_weather/H\",\n", + " \"bitbrains_fast_storage/5T\",\n", + " \"bitbrains_rnd/5T\",\n", + " \"bizitobs_application\",\n", + " \"bizitobs_service\",\n", + " \"bizitobs_l2c/5T\",\n", + " \"bizitobs_l2c/H\",\n", + ")\n", + "\n", + "# Preserve insertion order\n", + "ALL_DATASETS = list(dict.fromkeys(SHORT_DATASETS + MED_LONG_DATASETS))\n", + "\n", + "# Evaluation terms\n", + "TERMS = (\"short\", \"medium\", \"long\")\n", + "\n", + "# Pretty names mapping\n", + "PRETTY_NAMES = {\n", + " \"saugeenday\": \"saugeen\",\n", + " \"temperature_rain_with_missing\": \"temperature_rain\",\n", + " \"kdd_cup_2018_with_missing\": \"kdd_cup_2018\",\n", + " \"car_parts_with_missing\": \"car_parts\",\n", + "}\n", + "\n", + "# Metrics\n", + "METRICS = (\n", + " MSE(forecast_type=\"mean\"),\n", + " MSE(forecast_type=0.5),\n", + " MAE(),\n", + " MASE(),\n", + " MAPE(),\n", + " SMAPE(),\n", + " MSIS(),\n", + " RMSE(),\n", + " NRMSE(),\n", + " ND(),\n", + " MeanWeightedSumQuantileLoss(\n", + " quantile_levels=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]\n", + " ),\n", + ")\n", + "\n", + "# Standard metric names for CSV header\n", + "STANDARD_METRIC_NAMES = (\n", + " \"MSE[mean]\",\n", + " \"MSE[0.5]\",\n", + " \"MAE[0.5]\",\n", + " \"MASE[0.5]\",\n", + " \"MAPE[0.5]\",\n", + " \"sMAPE[0.5]\",\n", + " \"MSIS\",\n", + " \"RMSE[mean]\",\n", + " \"NRMSE[mean]\",\n", + " \"ND[0.5]\",\n", + " \"mean_weighted_sum_quantile_loss\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "i7j8k9l0", + "metadata": {}, + "source": [ + "### 3.2. Core Data Structures " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "j8k9l0m1", + "metadata": {}, + "outputs": [], + "source": [ + "@dataclass\n", + "class DatasetMetadata:\n", + " \"\"\"Structured description of a dataset/term combination.\"\"\"\n", + "\n", + " full_name: str\n", + " key: str\n", + " freq: str\n", + " term: str\n", + " season_length: int\n", + " target_dim: int\n", + " to_univariate: bool\n", + " prediction_length: int\n", + " windows: int\n", + "\n", + "\n", + "@dataclass\n", + "class EvaluationItem:\n", + " \"\"\"Container for evaluation results and optional figures.\"\"\"\n", + "\n", + " dataset_metadata: DatasetMetadata\n", + " metrics: Dict\n", + " figures: List[Tuple[object, str]]\n", + "\n", + "\n", + "DatasetSelection = Union[List[str], Tuple[str, ...], str]\n", + "\n", + "\n", + "def expand_datasets_arg(datasets: DatasetSelection) -> List[str]:\n", + " \"\"\"Normalize dataset selection strings to explicit lists.\"\"\"\n", + "\n", + " if isinstance(datasets, str):\n", + " dataset_list = [datasets]\n", + " else:\n", + " dataset_list = list(datasets)\n", + "\n", + " if not dataset_list:\n", + " return []\n", + "\n", + " if dataset_list[0] == \"all\":\n", + " return list(ALL_DATASETS)\n", + "\n", + " for dataset in dataset_list:\n", + " if dataset not in ALL_DATASETS:\n", + " raise ValueError(f\"Invalid dataset: {dataset}. Use one of {ALL_DATASETS}\")\n", + "\n", + " return dataset_list" + ] + }, + { + "cell_type": "markdown", + "id": "k9l0m1n2", + "metadata": {}, + "source": [ + "### 3.3. GIFT-Eval Dataset Class (`data.py`)\n", + "\n", + "The `Dataset` class handles loading and preprocessing GIFT-Eval benchmark datasets. This implementation is adapted from the official GIFT-Eval repository." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "l0m1n2o3", + "metadata": {}, + "outputs": [], + "source": [ + "TEST_SPLIT = 0.1\n", + "MAX_WINDOW = 20\n", + "\n", + "M4_PRED_LENGTH_MAP = {\n", + " \"A\": 6,\n", + " \"Q\": 8,\n", + " \"M\": 18,\n", + " \"W\": 13,\n", + " \"D\": 14,\n", + " \"H\": 48,\n", + " \"h\": 48,\n", + " \"Y\": 6,\n", + "}\n", + "\n", + "PRED_LENGTH_MAP = {\n", + " \"M\": 12,\n", + " \"W\": 8,\n", + " \"D\": 30,\n", + " \"H\": 48,\n", + " \"h\": 48,\n", + " \"T\": 48,\n", + " \"S\": 60,\n", + " \"s\": 60,\n", + " \"min\": 48,\n", + "}\n", + "\n", + "TFB_PRED_LENGTH_MAP = {\n", + " \"A\": 6,\n", + " \"Y\": 6,\n", + " \"H\": 48,\n", + " \"h\": 48,\n", + " \"Q\": 8,\n", + " \"D\": 14,\n", + " \"M\": 18,\n", + " \"W\": 13,\n", + " \"U\": 8,\n", + " \"T\": 8,\n", + " \"min\": 8,\n", + " \"us\": 8,\n", + "}\n", + "\n", + "\n", + "class Term(Enum):\n", + " SHORT = \"short\"\n", + " MEDIUM = \"medium\"\n", + " LONG = \"long\"\n", + "\n", + " @property\n", + " def multiplier(self) -> int:\n", + " if self == Term.SHORT:\n", + " return 1\n", + " elif self == Term.MEDIUM:\n", + " return 10\n", + " elif self == Term.LONG:\n", + " return 15\n", + "\n", + "\n", + "def itemize_start(data_entry: DataEntry) -> DataEntry:\n", + " data_entry[\"start\"] = data_entry[\"start\"].item()\n", + " return data_entry\n", + "\n", + "\n", + "class MultivariateToUnivariate(Transformation):\n", + " def __init__(self, field):\n", + " self.field = field\n", + "\n", + " def __call__(\n", + " self, data_it: Iterable[DataEntry], is_train: bool = False\n", + " ) -> Iterator:\n", + " for data_entry in data_it:\n", + " item_id = data_entry[\"item_id\"]\n", + " val_ls = list(data_entry[self.field])\n", + " for id, val in enumerate(val_ls):\n", + " univariate_entry = data_entry.copy()\n", + " univariate_entry[self.field] = val\n", + " univariate_entry[\"item_id\"] = item_id + \"_dim\" + str(id)\n", + " yield univariate_entry\n", + "\n", + "\n", + "class Dataset:\n", + " def __init__(\n", + " self,\n", + " name: str,\n", + " term: Term | str = Term.SHORT,\n", + " to_univariate: bool = False,\n", + " storage_path: str = None,\n", + " max_windows: Optional[int] = None,\n", + " ):\n", + " storage_path = Path(storage_path)\n", + " self.hf_dataset = datasets.load_from_disk(str(storage_path / name)).with_format(\n", + " \"numpy\"\n", + " )\n", + " process = ProcessDataEntry(\n", + " self.freq,\n", + " one_dim_target=self.target_dim == 1,\n", + " )\n", + "\n", + " self.gluonts_dataset = Map(compose(process, itemize_start), self.hf_dataset)\n", + " if to_univariate:\n", + " self.gluonts_dataset = MultivariateToUnivariate(\"target\").apply(\n", + " self.gluonts_dataset\n", + " )\n", + "\n", + " self.term = Term(term)\n", + " self.name = name\n", + " self.max_windows = max_windows if max_windows is not None else MAX_WINDOW\n", + "\n", + " @cached_property\n", + " def prediction_length(self) -> int:\n", + " freq = norm_freq_str(to_offset(self.freq).name)\n", + " if freq.endswith(\"E\"):\n", + " freq = freq[:-1]\n", + " pred_len = (\n", + " M4_PRED_LENGTH_MAP[freq] if \"m4\" in self.name else PRED_LENGTH_MAP[freq]\n", + " )\n", + " return self.term.multiplier * pred_len\n", + "\n", + " @cached_property\n", + " def freq(self) -> str:\n", + " return self.hf_dataset[0][\"freq\"]\n", + "\n", + " @cached_property\n", + " def target_dim(self) -> int:\n", + " return (\n", + " target.shape[0]\n", + " if len((target := self.hf_dataset[0][\"target\"]).shape) > 1\n", + " else 1\n", + " )\n", + "\n", + " @cached_property\n", + " def past_feat_dynamic_real_dim(self) -> int:\n", + " if \"past_feat_dynamic_real\" not in self.hf_dataset[0]:\n", + " return 0\n", + " elif (\n", + " len(\n", + " (\n", + " past_feat_dynamic_real := self.hf_dataset[0][\n", + " \"past_feat_dynamic_real\"\n", + " ]\n", + " ).shape\n", + " )\n", + " > 1\n", + " ):\n", + " return past_feat_dynamic_real.shape[0]\n", + " else:\n", + " return 1\n", + "\n", + " @cached_property\n", + " def windows(self) -> int:\n", + " if \"m4\" in self.name:\n", + " return 1\n", + " w = math.ceil(TEST_SPLIT * self._min_series_length / self.prediction_length)\n", + " return min(max(1, w), self.max_windows)\n", + "\n", + " @cached_property\n", + " def _min_series_length(self) -> int:\n", + " if self.hf_dataset[0][\"target\"].ndim > 1:\n", + " lengths = pc.list_value_length(\n", + " pc.list_flatten(\n", + " pc.list_slice(self.hf_dataset.data.column(\"target\"), 0, 1)\n", + " )\n", + " )\n", + " else:\n", + " lengths = pc.list_value_length(self.hf_dataset.data.column(\"target\"))\n", + " return min(lengths.to_numpy())\n", + "\n", + " @cached_property\n", + " def sum_series_length(self) -> int:\n", + " if self.hf_dataset[0][\"target\"].ndim > 1:\n", + " lengths = pc.list_value_length(\n", + " pc.list_flatten(self.hf_dataset.data.column(\"target\"))\n", + " )\n", + " else:\n", + " lengths = pc.list_value_length(self.hf_dataset.data.column(\"target\"))\n", + " return sum(lengths.to_numpy())\n", + "\n", + " @property\n", + " def training_dataset(self) -> TrainingDataset:\n", + " training_dataset, _ = split(\n", + " self.gluonts_dataset, offset=-self.prediction_length * (self.windows + 1)\n", + " )\n", + " return training_dataset\n", + "\n", + " @property\n", + " def validation_dataset(self) -> TrainingDataset:\n", + " validation_dataset, _ = split(\n", + " self.gluonts_dataset, offset=-self.prediction_length * self.windows\n", + " )\n", + " return validation_dataset\n", + "\n", + " @property\n", + " def test_data(self) -> TestData:\n", + " _, test_template = split(\n", + " self.gluonts_dataset, offset=-self.prediction_length * self.windows\n", + " )\n", + " test_data = test_template.generate_instances(\n", + " prediction_length=self.prediction_length,\n", + " windows=self.windows,\n", + " distance=self.prediction_length,\n", + " )\n", + " return test_data" + ] + }, + { + "cell_type": "markdown", + "id": "m1n2o3p4", + "metadata": {}, + "source": [ + "### 3.4. Predictor Wrapper (`predictor.py`)\n", + "\n", + "This is the model-specific `TimeSeriesPredictor` class for `TempoPFN`. It wraps the core `TimeSeriesModel` and adapts it to the `gluonts`-style `Predictor` interface, which expects a `.predict()` method." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "n2o3p4q5", + "metadata": {}, + "outputs": [], + "source": [ + "class TimeSeriesPredictor(Predictor):\n", + " \"\"\"Unified predictor for TimeSeriesModel supporting flexible construction.\"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " model: TimeSeriesModel,\n", + " config: dict,\n", + " ds_prediction_length: int,\n", + " ds_freq: str,\n", + " batch_size: int = 32,\n", + " max_context_length: Optional[int] = None,\n", + " debug: bool = False,\n", + " ) -> None:\n", + " # Dataset-specific context (can be updated per dataset/term)\n", + " self.ds_prediction_length = ds_prediction_length\n", + " self.ds_freq = ds_freq\n", + " self.batch_size = batch_size\n", + " self.max_context_length = max_context_length\n", + " self.debug = debug\n", + "\n", + " # Persistent model/config (unwrap DDP if needed)\n", + " self.model = model.module if isinstance(model, DDP) else model\n", + " self.model.eval()\n", + " self.config = config\n", + "\n", + " # Initialize scaler (using same type as model)\n", + " scaler_type = self.config.get(\"TimeSeriesModel\", {}).get(\n", + " \"scaler\", \"custom_robust\"\n", + " )\n", + " epsilon = self.config.get(\"TimeSeriesModel\", {}).get(\"epsilon\", 1e-3)\n", + " if scaler_type == \"custom_robust\":\n", + " self.scaler = RobustScaler(epsilon=epsilon)\n", + " else:\n", + " raise ValueError(f\"Unsupported scaler type: {scaler_type}\")\n", + "\n", + " def set_dataset_context(\n", + " self,\n", + " prediction_length: Optional[int] = None,\n", + " freq: Optional[str] = None,\n", + " batch_size: Optional[int] = None,\n", + " max_context_length: Optional[int] = None,\n", + " ) -> None:\n", + " \"\"\"Update lightweight dataset-specific attributes without reloading the model.\"\"\"\n", + "\n", + " if prediction_length is not None:\n", + " self.ds_prediction_length = prediction_length\n", + " if freq is not None:\n", + " self.ds_freq = freq\n", + " if batch_size is not None:\n", + " self.batch_size = batch_size\n", + " if max_context_length is not None:\n", + " self.max_context_length = max_context_length\n", + "\n", + " @classmethod\n", + " def from_model(\n", + " cls,\n", + " model: TimeSeriesModel,\n", + " config: dict,\n", + " ds_prediction_length: int,\n", + " ds_freq: str,\n", + " batch_size: int = 32,\n", + " max_context_length: Optional[int] = None,\n", + " debug: bool = False,\n", + " ) -> \"TimeSeriesPredictor\":\n", + " return cls(\n", + " model=model,\n", + " config=config,\n", + " ds_prediction_length=ds_prediction_length,\n", + " ds_freq=ds_freq,\n", + " batch_size=batch_size,\n", + " max_context_length=max_context_length,\n", + " debug=debug,\n", + " )\n", + "\n", + " @classmethod\n", + " def from_paths(\n", + " cls,\n", + " model_path: str,\n", + " config_path: str,\n", + " ds_prediction_length: int,\n", + " ds_freq: str,\n", + " batch_size: int = 32,\n", + " max_context_length: Optional[int] = None,\n", + " debug: bool = False,\n", + " ) -> \"TimeSeriesPredictor\":\n", + " with open(config_path, \"r\") as f:\n", + " config = yaml.safe_load(f)\n", + " model = cls._load_model_from_path(config=config, model_path=model_path)\n", + " return cls(\n", + " model=model,\n", + " config=config,\n", + " ds_prediction_length=ds_prediction_length,\n", + " ds_freq=ds_freq,\n", + " batch_size=batch_size,\n", + " max_context_length=max_context_length,\n", + " debug=debug,\n", + " )\n", + "\n", + " @staticmethod\n", + " def _load_model_from_path(config: dict, model_path: str) -> TimeSeriesModel:\n", + " try:\n", + " model = TimeSeriesModel(**config[\"TimeSeriesModel\"]).to(device)\n", + " checkpoint = torch.load(model_path, map_location=device)\n", + " model.load_state_dict(checkpoint[\"model_state_dict\"])\n", + " model.eval()\n", + " logger.info(f\"Successfully loaded model from {model_path}\")\n", + " return model\n", + " except Exception as exc: # pragma: no cover - logging path\n", + " logger.error(f\"Failed to load model from {model_path}: {exc}\")\n", + " raise\n", + "\n", + " def predict(self, test_data_input) -> Iterator[QuantileForecast]:\n", + " \"\"\"Generate forecasts for the test data.\"\"\"\n", + "\n", + " if hasattr(test_data_input, \"__iter__\") and not isinstance(test_data_input, list):\n", + " test_data_input = list(test_data_input)\n", + " logger.debug(f\"Processing {len(test_data_input)} time series\")\n", + "\n", + " # Group series by their effective length (after optional truncation),\n", + " # then process each uniform-length group in sub-batches up to batch_size.\n", + " def _effective_length(entry) -> int:\n", + " target = entry[\"target\"]\n", + " if target.ndim == 1:\n", + " seq_len = len(target)\n", + " else:\n", + " # target shape is [num_channels, seq_len]\n", + " seq_len = target.shape[1]\n", + " if self.max_context_length is not None:\n", + " seq_len = min(seq_len, self.max_context_length)\n", + " return seq_len\n", + "\n", + " length_to_items: dict[int, List[tuple[int, object]]] = {}\n", + " for idx, entry in enumerate(test_data_input):\n", + " seq_len = _effective_length(entry)\n", + " length_to_items.setdefault(seq_len, []).append((idx, entry))\n", + "\n", + " total = len(test_data_input)\n", + " ordered_results: List[Optional[QuantileForecast]] = [None] * total\n", + "\n", + " for _, items in length_to_items.items():\n", + " for i in range(0, len(items), self.batch_size):\n", + " chunk = items[i : i + self.batch_size]\n", + " entries = [entry for (_orig_idx, entry) in chunk]\n", + " batch_forecasts = self._predict_batch(entries)\n", + " for forecast_idx, (orig_idx, _entry) in enumerate(chunk):\n", + " ordered_results[orig_idx] = batch_forecasts[forecast_idx]\n", + "\n", + " return ordered_results # type: ignore[return-value]\n", + "\n", + " def _predict_batch(self, test_data_batch: List) -> List[QuantileForecast]:\n", + " \"\"\"Generate predictions for a batch of time series.\"\"\"\n", + "\n", + " logger.debug(f\"Processing batch of size: {len(test_data_batch)}\")\n", + "\n", + " try:\n", + " batch_container = self._convert_to_batch_container(test_data_batch)\n", + "\n", + " if isinstance(device, torch.device):\n", + " device_type = device.type\n", + " else:\n", + " device_type = \"cuda\" if \"cuda\" in str(device).lower() else \"cpu\"\n", + " enable_autocast = device_type == \"cuda\"\n", + "\n", + " with torch.autocast(\n", + " device_type=device_type,\n", + " dtype=torch.bfloat16,\n", + " enabled=enable_autocast,\n", + " ):\n", + " with torch.no_grad():\n", + " model_output = self.model(batch_container, drop_enc_allow=False)\n", + "\n", + " forecasts = self._convert_to_forecasts(\n", + " model_output, test_data_batch, batch_container\n", + " )\n", + "\n", + " logger.debug(f\"Generated {len(forecasts)} forecasts\")\n", + " return forecasts\n", + " except Exception as exc: # pragma: no cover - logging path\n", + " logger.error(f\"Error in batch prediction: {exc}\")\n", + " raise\n", + "\n", + " def _convert_to_batch_container(\n", + " self, test_data_batch: List\n", + " ) -> BatchTimeSeriesContainer:\n", + " \"\"\"Convert gluonts test data to BatchTimeSeriesContainer.\"\"\"\n", + "\n", + " batch_size = len(test_data_batch)\n", + " history_values_list = []\n", + " start_dates = []\n", + " frequencies = []\n", + "\n", + " for entry in test_data_batch:\n", + " target = entry[\"target\"]\n", + "\n", + " if target.ndim == 1:\n", + " target = target.reshape(-1, 1)\n", + " else:\n", + " target = target.T\n", + "\n", + " if (\n", + " self.max_context_length is not None\n", + " and len(target) > self.max_context_length\n", + " ):\n", + " target = target[-self.max_context_length :]\n", + "\n", + " history_values_list.append(target)\n", + " start_dates.append(entry[\"start\"].to_timestamp().to_datetime64())\n", + " frequencies.append(parse_frequency(entry[\"freq\"]))\n", + "\n", + " history_values_np = np.stack(history_values_list, axis=0)\n", + " num_channels = history_values_np.shape[2]\n", + "\n", + " history_values = torch.tensor(\n", + " history_values_np, dtype=torch.float32, device=device\n", + " )\n", + "\n", + " future_values = torch.zeros(\n", + " (batch_size, self.ds_prediction_length, num_channels),\n", + " dtype=torch.float32,\n", + " device=device,\n", + " )\n", + "\n", + " return BatchTimeSeriesContainer(\n", + " history_values=history_values,\n", + " future_values=future_values,\n", + " start=start_dates,\n", + " frequency=frequencies,\n", + " )\n", + "\n", + " def _convert_to_forecasts(\n", + " self,\n", + " model_output: dict,\n", + " test_data_batch: List,\n", + " batch_container: BatchTimeSeriesContainer,\n", + " ) -> List[QuantileForecast]:\n", + " \"\"\"Convert model predictions to QuantileForecast objects.\"\"\"\n", + "\n", + " predictions = model_output[\"result\"]\n", + " scale_statistics = model_output[\"scale_statistics\"]\n", + "\n", + " if predictions.ndim == 4:\n", + " predictions_unscaled = self.scaler.inverse_scale(\n", + " predictions, scale_statistics\n", + " )\n", + " is_quantile = True\n", + " quantile_levels = self.model.quantiles\n", + " else:\n", + " predictions_unscaled = self.scaler.inverse_scale(\n", + " predictions, scale_statistics\n", + " )\n", + " is_quantile = False\n", + " quantile_levels = [0.5]\n", + "\n", + " forecasts: List[QuantileForecast] = []\n", + " for idx, entry in enumerate(test_data_batch):\n", + " history_length = int(batch_container.history_values.shape[1])\n", + " start_date = entry[\"start\"]\n", + " forecast_start = start_date + history_length\n", + "\n", + " if is_quantile:\n", + " pred_array = predictions_unscaled[idx].cpu().numpy()\n", + "\n", + " if pred_array.shape[1] == 1:\n", + " pred_array = pred_array.squeeze(1)\n", + " forecast_arrays = pred_array.T\n", + " else:\n", + " forecast_arrays = pred_array.transpose(2, 0, 1)\n", + "\n", + " forecast = QuantileForecast(\n", + " forecast_arrays=forecast_arrays,\n", + " forecast_keys=[str(q) for q in quantile_levels],\n", + " start_date=forecast_start,\n", + " )\n", + " else:\n", + " pred_array = predictions_unscaled[idx].cpu().numpy()\n", + "\n", + " if pred_array.shape[1] == 1:\n", + " pred_array = pred_array.squeeze(1)\n", + " forecast_arrays = pred_array.reshape(1, -1)\n", + " else:\n", + " forecast_arrays = pred_array.reshape(1, *pred_array.shape)\n", + "\n", + " forecast = QuantileForecast(\n", + " forecast_arrays=forecast_arrays,\n", + " forecast_keys=[\"0.5\"],\n", + " start_date=forecast_start,\n", + " )\n", + "\n", + " forecasts.append(forecast)\n", + "\n", + " return forecasts" + ] + }, + { + "cell_type": "markdown", + "id": "o3p4q5r6", + "metadata": {}, + "source": [ + "### 3.5. Result Handling \n", + "\n", + "These functions handle writing the per-dataset metrics to CSV files and aggregating all results into a single `all_results.csv` at the end." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "p4q5r6s7", + "metadata": {}, + "outputs": [], + "source": [ + "def _ensure_results_csv(csv_file_path: Path) -> None:\n", + " if not csv_file_path.exists():\n", + " csv_file_path.parent.mkdir(parents=True, exist_ok=True)\n", + " with open(csv_file_path, \"w\", newline=\"\") as csvfile:\n", + " writer = csv.writer(csvfile)\n", + " header = (\n", + " [\"dataset\", \"model\"]\n", + " + [f\"eval_metrics/{name}\" for name in STANDARD_METRIC_NAMES]\n", + " + [\"domain\", \"num_variates\"]\n", + " )\n", + " writer.writerow(header)\n", + "\n", + "\n", + "def write_results_to_disk(\n", + " items: List[EvaluationItem],\n", + " dataset_name: str,\n", + " output_dir: Path,\n", + " model_name: str,\n", + " create_plots: bool,\n", + ") -> None:\n", + " output_dir = output_dir / dataset_name\n", + " output_dir.mkdir(parents=True, exist_ok=True)\n", + " output_csv_path = output_dir / \"results.csv\"\n", + " _ensure_results_csv(output_csv_path)\n", + "\n", + " with open(output_csv_path, \"a\", newline=\"\") as csvfile:\n", + " writer = csv.writer(csvfile)\n", + " for item in items:\n", + " md: DatasetMetadata = item.dataset_metadata\n", + " metric_values: List[Optional[float]] = []\n", + " for metric_name in STANDARD_METRIC_NAMES:\n", + " value = item.metrics.get(metric_name, None)\n", + " if value is None:\n", + " metric_values.append(None)\n", + " else:\n", + " if (\n", + " hasattr(value, \"__len__\")\n", + " and not isinstance(value, (str, bytes))\n", + " and len(value) == 1\n", + " ):\n", + " value = value[0]\n", + " elif hasattr(value, \"item\"):\n", + " value = value.item()\n", + " metric_values.append(value)\n", + "\n", + " ds_key = md.key.lower()\n", + " props = DATASET_PROPERTIES.get(ds_key, {})\n", + " domain = props.get(\"domain\", \"unknown\")\n", + " num_variates = props.get(\n", + " \"num_variates\", 1 if md.to_univariate else md.target_dim\n", + " )\n", + "\n", + " row = [md.full_name, model_name] + metric_values + [domain, num_variates]\n", + " writer.writerow(row)\n", + "\n", + " if create_plots and item.figures and plt is not None:\n", + " plots_dir = output_dir / \"plots\" / md.key / md.term\n", + " plots_dir.mkdir(parents=True, exist_ok=True)\n", + " for fig, filename in item.figures:\n", + " filepath = plots_dir / filename\n", + " fig.savefig(filepath, dpi=300, bbox_inches=\"tight\")\n", + " plt.close(fig)\n", + "\n", + " logger.info(\n", + " \"Evaluation complete for dataset '%s'. Results saved to %s\",\n", + " dataset_name,\n", + " output_csv_path,\n", + " )\n", + " if create_plots:\n", + " logger.info(\"Plots saved under %s\", output_dir / \"plots\")\n", + "\n", + "\n", + "def get_all_datasets_full_name() -> List[str]:\n", + " \"\"\"Get all possible dataset full names for validation.\"\"\"\n", + "\n", + " terms = [\"short\", \"medium\", \"long\"]\n", + " datasets_full_names: List[str] = []\n", + "\n", + " for name in ALL_DATASETS:\n", + " for term in terms:\n", + " if term in [\"medium\", \"long\"] and name not in MED_LONG_DATASETS:\n", + " continue\n", + "\n", + " if \"/\" in name:\n", + " ds_key, ds_freq = name.split(\"/\")\n", + " ds_key = ds_key.lower()\n", + " ds_key = PRETTY_NAMES.get(ds_key, ds_key)\n", + " else:\n", + " ds_key = name.lower()\n", + " ds_key = PRETTY_NAMES.get(ds_key, ds_key)\n", + " ds_freq = DATASET_PROPERTIES.get(ds_key, {}).get(\"frequency\")\n", + "\n", + " datasets_full_names.append(\n", + " f\"{ds_key}/{ds_freq if ds_freq else 'unknown'}/{term}\"\n", + " )\n", + "\n", + " return datasets_full_names\n", + "\n", + "\n", + "def aggregate_results(result_root_dir: str | Path) -> pd.DataFrame | None:\n", + " \"\"\"Aggregate results from multiple CSV files into a single dataframe.\"\"\"\n", + "\n", + " result_root = Path(result_root_dir)\n", + "\n", + " logger.info(\"Aggregating results in: %s\", result_root)\n", + "\n", + " result_files = glob.glob(f\"{result_root}/**/results.csv\", recursive=True)\n", + "\n", + " if not result_files:\n", + " logger.error(\"No result files found!\")\n", + " return None\n", + "\n", + " dataframes: List[pd.DataFrame] = []\n", + " for file in result_files:\n", + " try:\n", + " df = pd.read_csv(file)\n", + " if len(df) > 0:\n", + " dataframes.append(df)\n", + " else:\n", + " logger.warning(\"Empty file: %s\", file)\n", + " except pd.errors.EmptyDataError:\n", + " logger.warning(\"Skipping empty file: %s\", file)\n", + " except Exception as exc:\n", + " logger.error(\"Error reading %s: %s\", file, exc)\n", + "\n", + " if not dataframes:\n", + " logger.warning(\"No valid CSV files found to combine\")\n", + " return None\n", + "\n", + " combined_df = pd.concat(dataframes, ignore_index=True).sort_values(\"dataset\")\n", + "\n", + " if len(combined_df) != len(set(combined_df.dataset)):\n", + " duplicate_datasets = combined_df.dataset[\n", + " combined_df.dataset.duplicated()\n", + " ].tolist()\n", + " logger.warning(\"Warning: Duplicate datasets found: %s\", duplicate_datasets)\n", + " combined_df = combined_df.drop_duplicates(subset=[\"dataset\"], keep=\"first\")\n", + " logger.info(\n", + " \"Removed duplicates, %s unique datasets remaining\", len(combined_df)\n", + " )\n", + "\n", + " logger.info(\"Combined results: %s datasets\", len(combined_df))\n", + "\n", + " all_datasets_full_name = get_all_datasets_full_name()\n", + " completed_experiments = combined_df.dataset.tolist()\n", + "\n", + " completed_experiments_clean = [\n", + " exp for exp in completed_experiments if exp in all_datasets_full_name\n", + " ]\n", + " missing_or_failed_experiments = [\n", + " exp for exp in all_datasets_full_name if exp not in completed_experiments_clean\n", + " ]\n", + "\n", + " logger.info(\"=== EXPERIMENT SUMMARY ===\")\n", + " logger.info(\"Total expected datasets: %s\", len(all_datasets_full_name))\n", + " logger.info(\"Completed experiments: %s\", len(completed_experiments_clean))\n", + " logger.info(\"Missing/failed experiments: %s\", len(missing_or_failed_experiments))\n", + "\n", + " output_file = result_root / \"all_results.csv\"\n", + " combined_df.to_csv(output_file, index=False)\n", + " logger.info(\"Combined results saved to: %s\", output_file)\n", + "\n", + " return combined_df" + ] + }, + { + "cell_type": "markdown", + "id": "q5r6s7t8", + "metadata": {}, + "source": [ + "### 3.6. Evaluation Harness (`evaluate.py`)\n", + "\n", + "This is the main evaluation logic that iterates over dataset terms, prepares the data, calls the predictor, and gathers metrics." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "r6s7t8u9", + "metadata": {}, + "outputs": [], + "source": [ + "def construct_evaluation_data(\n", + " dataset_name: str,\n", + " dataset_storage_path: str,\n", + " terms: List[str] = [\"short\", \"medium\", \"long\"],\n", + " max_windows: Optional[int] = None,\n", + ") -> List[Tuple[Dataset, DatasetMetadata]]:\n", + " \"\"\"Build datasets and rich metadata per term for a dataset name.\"\"\"\n", + " sub_datasets: List[Tuple[Dataset, DatasetMetadata]] = []\n", + "\n", + " if \"/\" in dataset_name:\n", + " ds_key, ds_freq = dataset_name.split(\"/\")\n", + " ds_key = ds_key.lower()\n", + " ds_key = PRETTY_NAMES.get(ds_key, ds_key)\n", + " else:\n", + " ds_key = dataset_name.lower()\n", + " ds_key = PRETTY_NAMES.get(ds_key, ds_key)\n", + " ds_freq = DATASET_PROPERTIES.get(ds_key, {}).get(\"frequency\")\n", + "\n", + " for term in terms:\n", + " # Skip medium/long terms for datasets that don't support them\n", + " if (\n", + " term == \"medium\" or term == \"long\"\n", + " ) and dataset_name not in MED_LONG_DATASETS:\n", + " continue\n", + "\n", + " # Probe once to determine dimensionality\n", + " probe_dataset = Dataset(\n", + " name=dataset_name,\n", + " term=term,\n", + " to_univariate=False,\n", + " storage_path=dataset_storage_path,\n", + " max_windows=max_windows,\n", + " )\n", + "\n", + " to_univariate = probe_dataset.target_dim > 1\n", + "\n", + " dataset = Dataset(\n", + " name=dataset_name,\n", + " term=term,\n", + " to_univariate=to_univariate,\n", + " storage_path=dataset_storage_path,\n", + " max_windows=max_windows,\n", + " )\n", + "\n", + " # Compute metadata\n", + " season_length = get_seasonality(dataset.freq)\n", + " actual_freq = ds_freq if ds_freq else dataset.freq\n", + " \n", + " metadata = DatasetMetadata(\n", + " full_name=f\"{ds_key}/{actual_freq}/{term}\",\n", + " key=ds_key,\n", + " freq=actual_freq,\n", + " term=term,\n", + " season_length=season_length,\n", + " target_dim=probe_dataset.target_dim,\n", + " to_univariate=to_univariate,\n", + " prediction_length=dataset.prediction_length,\n", + " windows=dataset.windows,\n", + " )\n", + "\n", + " sub_datasets.append((dataset, metadata))\n", + "\n", + " return sub_datasets\n", + "\n", + "\n", + "def evaluate_datasets(\n", + " predictor: TimeSeriesPredictor,\n", + " dataset: str,\n", + " dataset_storage_path: str,\n", + " terms: List[str] = [\"short\", \"medium\", \"long\"],\n", + " max_windows: Optional[int] = None,\n", + " batch_size: int = 48,\n", + " max_context_length: Optional[int] = 1024,\n", + " create_plots: bool = False,\n", + " max_plots_per_dataset: int = 10,\n", + ") -> List[EvaluationItem]:\n", + " \"\"\"Evaluate predictor on one dataset across the requested terms.\"\"\"\n", + " sub_datasets = construct_evaluation_data(\n", + " dataset_name=dataset,\n", + " dataset_storage_path=dataset_storage_path,\n", + " terms=terms,\n", + " max_windows=max_windows,\n", + " )\n", + "\n", + " results: List[EvaluationItem] = []\n", + " for i, (sub_dataset, metadata) in enumerate(sub_datasets):\n", + " logger.info(f\"Evaluating {i + 1}/{len(sub_datasets)}: {metadata.full_name}\")\n", + " logger.info(f\" Dataset size: {len(sub_dataset.test_data)}\")\n", + " logger.info(f\" Frequency: {sub_dataset.freq}\")\n", + " logger.info(f\" Term: {metadata.term}\")\n", + " logger.info(f\" Prediction length: {sub_dataset.prediction_length}\")\n", + " logger.info(f\" Target dimensions: {sub_dataset.target_dim}\")\n", + " logger.info(f\" Windows: {sub_dataset.windows}\")\n", + "\n", + " # Update context on the reusable predictor\n", + " predictor.set_dataset_context(\n", + " prediction_length=sub_dataset.prediction_length,\n", + " freq=sub_dataset.freq,\n", + " batch_size=batch_size,\n", + " max_context_length=max_context_length,\n", + " )\n", + "\n", + " res = evaluate_model(\n", + " model=predictor,\n", + " test_data=sub_dataset.test_data,\n", + " metrics=METRICS,\n", + " axis=None,\n", + " mask_invalid_label=True,\n", + " allow_nan_forecast=False,\n", + " seasonality=metadata.season_length,\n", + " )\n", + "\n", + " figs: List[Tuple[object, str]] = []\n", + " if create_plots:\n", + " # We are missing `src.plotting.gift_eval_utils.create_plots_for_dataset`\n", + " # As this was not provided, plotting will be skipped.\n", + " logger.warning(\"Plotting is enabled but `create_plots_for_dataset` is not defined. Skipping plot generation.\")\n", + " pass\n", + "\n", + " results.append(\n", + " EvaluationItem(dataset_metadata=metadata, metrics=res, figures=figs)\n", + " )\n", + "\n", + " return results" + ] + }, + { + "cell_type": "markdown", + "id": "s7t8u9v0", + "metadata": {}, + "source": [ + "## 4. Configuration\n", + "\n", + "Set the parameters for the evaluation run. Update `config_path` and `checkpoint_url` to point to your model's files." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "t8u9v0w1", + "metadata": {}, + "outputs": [], + "source": [ + "# --- Parameters ---\n", + "model_path = None # e.g., \"/path/to/checkpoint.pth\"; if None, try checkpoint_url\n", + "config_path = Path.cwd().parent.parent / \"configs/example.yaml\" \n", + "checkpoint_url = \"https://www.dropbox.com/scl/fi/mqsni5lehooyaw93y3uzq/checkpoint_38M.pth?rlkey=3uyehvmtted02xkha24zgpzb6&st=seevsbkn&dl=0\" \n", + "\n", + "# --- Datasets and evaluation controls ---\n", + "# Use a small subset for testing, e.g., [\"m4_weekly\"]\n", + "datasets_arg = [\"all\"] # list of dataset names or [\"all\"]. \n", + "terms = [\"short\", \"medium\", \"long\"]\n", + "dataset_storage_path = os.getenv(\"GIFT_EVAL_DATASET_STORAGE_PATH\")\n", + "max_windows = 20\n", + "batch_size = 64\n", + "max_context_length = 3072 \n", + "\n", + "# --- Output ---\n", + "after_each_dataset_flush = True # write CSV as each dataset completes\n", + "model_name = \"TempoPFN\"\n", + "download_dir = Path.cwd().parent / \"models\"\n", + "output_dir = Path.cwd().parent / \"gift_eval_results\" / model_name\n", + "\n", + "# --- Helper Functions ---\n", + "\n", + "def download_checkpoint_if_needed(url: str, target_dir: Path, target_filename: str = \"checkpoint.pth\") -> Path:\n", + " \"\"\"Downloads a file from a URL if it doesn't exist.\"\"\"\n", + " try:\n", + " import requests\n", + " except ImportError:\n", + " logger.error(\"requests package not found. Please install it: pip install requests\")\n", + " raise\n", + " \n", + " target_dir.mkdir(parents=True, exist_ok=True)\n", + " target_file_path = target_dir / target_filename\n", + " \n", + " if target_file_path.exists():\n", + " logger.info(f\"Checkpoint already exists: {target_file_path}\")\n", + " return target_file_path\n", + " \n", + " logger.info(f\"Downloading checkpoint from {url} to {target_file_path}...\")\n", + " \n", + " # Handle Dropbox links\n", + " if \"dropbox.com\" in url:\n", + " url = url.replace(\"dl=0\", \"dl=1\").replace(\"st=\", \"dl=1&st=\")\n", + " \n", + " try:\n", + " with requests.get(url, stream=True) as r:\n", + " r.raise_for_status()\n", + " with open(target_file_path, 'wb') as f:\n", + " for chunk in r.iter_content(chunk_size=8192):\n", + " f.write(chunk)\n", + " logger.info(\"Download complete.\")\n", + " return target_file_path\n", + " except Exception as e:\n", + " logger.error(f\"Failed to download checkpoint: {e}\")\n", + " if target_file_path.exists():\n", + " os.remove(target_file_path) # Clean up partial download\n", + " raise\n", + "\n", + "def _load_yaml(path: str) -> dict:\n", + " with open(path, \"r\") as f:\n", + " return yaml.safe_load(f)" + ] + }, + { + "cell_type": "markdown", + "id": "u9v0w1x2", + "metadata": {}, + "source": [ + "## 5. Main Evaluation Loop\n", + "\n", + "This cell sets up the predictor and runs the main evaluation loop over all specified datasets." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "v0w1x2y3", + "metadata": {}, + "outputs": [], + "source": [ + "logger.info(\"Starting evaluation for model: %s\", model_name)\n", + "\n", + "# 1. Build predictor from a checkpoint\n", + "resolved_model_path = None\n", + "if model_path:\n", + " resolved_model_path = model_path\n", + "elif checkpoint_url:\n", + " resolved_model_path = download_checkpoint_if_needed(\n", + " checkpoint_url, \n", + " target_dir=download_dir,\n", + " target_filename=f\"{model_name}_checkpoint.pth\"\n", + " )\n", + "\n", + "if not resolved_model_path or not Path(resolved_model_path).exists():\n", + " raise FileNotFoundError(\n", + " f\"No model checkpoint found. Set `model_path` or `checkpoint_url`. Tried: {resolved_model_path}\"\n", + " )\n", + "\n", + "assert Path(config_path).exists(), f\"Config not found: {config_path}\"\n", + "logger.info(\"Loading predictor from checkpoint: %s\", resolved_model_path)\n", + "\n", + "predictor = TimeSeriesPredictor.from_paths(\n", + " model_path=resolved_model_path,\n", + " config_path=config_path,\n", + " ds_prediction_length=1, # placeholder; set per dataset\n", + " ds_freq=\"D\", # placeholder; set per dataset\n", + " batch_size=batch_size,\n", + " max_context_length=max_context_length,\n", + ")\n", + "\n", + "# 2. Run evaluation loop\n", + "datasets_to_run = expand_datasets_arg(datasets_arg)\n", + "results_root = Path(output_dir)\n", + "\n", + "for ds_name in datasets_to_run:\n", + " try:\n", + " items = evaluate_datasets(\n", + " predictor=predictor,\n", + " dataset=ds_name,\n", + " dataset_storage_path=dataset_storage_path,\n", + " terms=terms,\n", + " max_windows=max_windows,\n", + " batch_size=batch_size,\n", + " max_context_length=max_context_length,\n", + " create_plots=False, # Set to True if you implement plotting\n", + " max_plots_per_dataset=0,\n", + " )\n", + " write_results_to_disk(\n", + " items=items,\n", + " dataset_name=ds_name,\n", + " output_dir=results_root,\n", + " model_name=model_name,\n", + " create_plots=False,\n", + " )\n", + " if after_each_dataset_flush:\n", + " logger.info(\"Flushed results for %s\", ds_name)\n", + " except Exception as e:\n", + " logger.error(f\"FAILED evaluation for dataset: {ds_name}. Error: {e} !!!\")\n", + " logger.exception(e)\n", + " continue # Continue to the next dataset\n", + "\n", + "print(f\"\\nEvaluation complete. See results under: {output_dir}\")" + ] + }, + { + "cell_type": "markdown", + "id": "w1x2y3z4", + "metadata": {}, + "source": [ + "## 6. Aggregate Results\n", + "\n", + "Finally, we'll aggregate the individual CSV files into a single `all_results.csv` file for easy analysis, following the `gift-eval` convention." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "x2y3z4a5", + "metadata": {}, + "outputs": [], + "source": [ + "logger.info(\"Aggregating results from all datasets...\")\n", + "combined_df = aggregate_results(result_root_dir=output_dir)\n", + "\n", + "if combined_df is not None:\n", + " agg_path = Path(output_dir) / \"all_results.csv\"\n", + " logger.info(\"Successfully created aggregated results file: %s\", agg_path)\n", + " print(f\"\\n✅ Aggregated results saved to: {agg_path}\")\n", + " print(combined_df.head())\n", + "else:\n", + " logger.warning(\"No results to aggregate. Check that evaluation completed successfully.\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/quick_start_tempo_pfn.ipynb b/examples/quick_start_tempo_pfn.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..559b3fad57a70b91c52ae49e75b8a1a0d922bf32 --- /dev/null +++ b/examples/quick_start_tempo_pfn.ipynb @@ -0,0 +1,280 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "231c6227", + "metadata": {}, + "source": [ + "# Quick Start: Univariate Quantile Forecasting (CUDA, bfloat16)\n", + "\n", + "This notebook demonstrates how to:\n", + "- Generate synthetic sine wave time series data\n", + "- Pack data into `BatchTimeSeriesContainer`\n", + "- Load a pretrained model (from Dropbox)\n", + "- Run inference with bfloat16 on CUDA\n", + "- Visualize predictions\n" + ] + }, + { + "cell_type": "markdown", + "id": "bb6c5424-1c63-4cb0-a818-45d4199914e5", + "metadata": {}, + "source": [ + "## 1) Setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "612a78e8", + "metadata": {}, + "outputs": [], + "source": [ + "import urllib.request\n", + "import torch\n", + "import numpy as np\n", + "from pathlib import Path\n", + "\n", + "# Ensure CUDA is available\n", + "if not torch.cuda.is_available():\n", + " raise RuntimeError(\"CUDA is required to run this demo. No CUDA device detected.\")\n", + "\n", + "device = torch.device(\"cuda:0\")\n", + "\n", + "# Resolve repository root to be robust to running from subdirectories (e.g., examples/)\n", + "repo_root = Path.cwd()\n", + "if not (repo_root / \"configs\").exists():\n", + " repo_root = repo_root.parent\n", + "\n", + "# Inline plotting\n", + "%matplotlib inline\n" + ] + }, + { + "cell_type": "markdown", + "id": "3facf37d-0a77-4222-8464-6e42182547f8", + "metadata": {}, + "source": [ + "## 2) Define Checkpoint Path" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16dcb883", + "metadata": {}, + "outputs": [], + "source": [ + "CHECKPOINT_DIR = repo_root / \"models\"\n", + "CHECKPOINT_NAME = \"checkpoint_38M.pth\" \n", + "CHECKPOINT_PATH = CHECKPOINT_DIR / CHECKPOINT_NAME\n", + "\n", + "# Ensure the models directory exists\n", + "CHECKPOINT_DIR.mkdir(parents=True, exist_ok=True) \n", + "\n", + "if not CHECKPOINT_PATH.exists():\n", + " print(f\"--- WARNING: Checkpoint not found at: {CHECKPOINT_PATH} ---\")\n", + " print(\"Please ensure 'checkpoint_38M.pth' is in the 'models/' directory.\")\n", + " print(\"If you cloned from Hugging Face, you may need to run 'git lfs pull'.\")\n", + " raise FileNotFoundError(f\"Model checkpoint not found at {CHECKPOINT_PATH}\")\n", + "else:\n", + " print(f\"Using existing checkpoint at {CHECKPOINT_PATH}\")" + ] + }, + { + "cell_type": "markdown", + "id": "9be77e34-0c7a-4056-822f-ed2e3e090c40", + "metadata": {}, + "source": [ + "## 3) Generate synthetic sine wave data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1127526c", + "metadata": {}, + "outputs": [], + "source": [ + "from src.synthetic_generation.generator_params import SineWaveGeneratorParams\n", + "from src.synthetic_generation.sine_waves.sine_wave_generator_wrapper import (\n", + " SineWaveGeneratorWrapper,\n", + ")\n", + "\n", + "batch_size = 3\n", + "total_length = 1024\n", + "seed = 2025\n", + "\n", + "sine_params = SineWaveGeneratorParams(global_seed=seed, length=total_length)\n", + "wrapper = SineWaveGeneratorWrapper(sine_params)\n", + "\n", + "batch = wrapper.generate_batch(batch_size=batch_size, seed=seed)\n", + "values = torch.from_numpy(batch.values).to(torch.float32)\n", + "if values.ndim == 2:\n", + " values = values.unsqueeze(-1) # [B, S, 1]\n", + "\n", + "future_length = 256\n", + "history_values = values[:, :-future_length, :]\n", + "future_values = values[:, -future_length:, :]\n", + "\n", + "print(\"History:\", history_values.shape, \"Future:\", future_values.shape)" + ] + }, + { + "cell_type": "markdown", + "id": "a8844488-e51c-4805-baa9-491bfc67e8ca", + "metadata": {}, + "source": [ + "## 4) Build BatchTimeSeriesContainer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f3b4d361", + "metadata": {}, + "outputs": [], + "source": [ + "from src.data.containers import BatchTimeSeriesContainer\n", + "\n", + "container = BatchTimeSeriesContainer(\n", + " history_values=history_values.to(device),\n", + " future_values=future_values.to(device),\n", + " start=batch.start,\n", + " frequency=batch.frequency,\n", + ")\n", + "\n", + "container.batch_size, container.history_length, container.future_length" + ] + }, + { + "cell_type": "markdown", + "id": "b5e7e790-a9aa-49c2-9d45-2dc823036883", + "metadata": {}, + "source": [ + "## 5) Load model and run inference" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1dd4e0e4", + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "from src.models.model import TimeSeriesModel\n", + "\n", + "with open(repo_root / \"configs/example.yaml\", \"r\") as f:\n", + " config = yaml.safe_load(f)\n", + "\n", + "model = TimeSeriesModel(**config[\"TimeSeriesModel\"]).to(device)\n", + "ckpt = torch.load(CHECKPOINT_PATH, map_location=device)\n", + "model.load_state_dict(ckpt[\"model_state_dict\"])\n", + "model.eval()\n", + "\n", + "# bfloat16 autocast on CUDA\n", + "with (\n", + " torch.no_grad(),\n", + " torch.autocast(device_type=\"cuda\", dtype=torch.bfloat16, enabled=True),\n", + "):\n", + " output = model(container)\n", + "\n", + "preds = output[\"result\"].to(torch.float32)\n", + "if hasattr(model, \"scaler\") and \"scale_statistics\" in output:\n", + " preds = model.scaler.inverse_scale(preds, output[\"scale_statistics\"])\n", + "\n", + "preds.shape" + ] + }, + { + "cell_type": "markdown", + "id": "ba16120f-27c8-4462-91cb-c9b3e0630a9d", + "metadata": {}, + "source": [ + "## 6) Plot predictions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9bf02a0b", + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "\n", + "plt.set_loglevel(\"error\")\n", + "\n", + "# preds: [B, P, N, Q] for quantiles (univariate -> N=1)\n", + "preds_np = preds.cpu().numpy()\n", + "\n", + "batch_size = preds_np.shape[0]\n", + "prediction_length = preds_np.shape[1]\n", + "num_quantiles = preds_np.shape[-1]\n", + "\n", + "for i in range(batch_size):\n", + " fig, ax = plt.subplots(figsize=(12, 4))\n", + "\n", + " history = container.history_values[i, :, 0].detach().cpu().numpy()\n", + " future = container.future_values[i, :, 0].detach().cpu().numpy()\n", + "\n", + " # Time axes\n", + " hist_t = np.arange(len(history))\n", + " fut_t = np.arange(len(history), len(history) + len(future))\n", + "\n", + " # Plot history and ground truth future\n", + " ax.plot(hist_t, history, label=\"History\", color=\"black\")\n", + " ax.plot(fut_t, future, label=\"Ground Truth\", color=\"blue\")\n", + "\n", + " # Plot quantiles\n", + " median_idx = num_quantiles // 2\n", + " ax.plot(\n", + " fut_t,\n", + " preds_np[i, :, 0, median_idx],\n", + " label=\"Prediction (Median)\",\n", + " color=\"orange\",\n", + " linestyle=\"--\",\n", + " )\n", + " if num_quantiles >= 3:\n", + " ax.fill_between(\n", + " fut_t,\n", + " preds_np[i, :, 0, 0],\n", + " preds_np[i, :, 0, -1],\n", + " color=\"orange\",\n", + " alpha=0.2,\n", + " label=\"Prediction Interval\",\n", + " )\n", + "\n", + " ax.axvline(x=len(history), color=\"k\", linestyle=\":\", alpha=0.7)\n", + " ax.set_xlabel(\"Time Steps\")\n", + " ax.set_ylabel(\"Value\")\n", + " ax.set.title(f\"Sample {i + 1}\")\n", + " ax.legend()\n", + " ax.grid(True, alpha=0.3)\n", + " plt.show()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/quick_start_tempo_pfn.py b/examples/quick_start_tempo_pfn.py new file mode 100644 index 0000000000000000000000000000000000000000..3d601fccc43deef2e803ea41a9c21bbcd6884282 --- /dev/null +++ b/examples/quick_start_tempo_pfn.py @@ -0,0 +1,101 @@ +import argparse +import logging +import os + +import torch + +from examples.utils import ( + load_model, + run_inference_and_plot, +) +from src.data.containers import BatchTimeSeriesContainer +from src.synthetic_generation.generator_params import SineWaveGeneratorParams +from src.synthetic_generation.sine_waves.sine_wave_generator_wrapper import ( + SineWaveGeneratorWrapper, +) + +# Configure logging +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger(__name__) + + +def main(): + """Main execution function.""" + # CLI + parser = argparse.ArgumentParser(description="Quick start demo for TimeSeriesModel") + parser.add_argument( + "--config", + default="configs/example.yaml", + help="Path to model config YAML (default: configs/example.yaml)", + ) + parser.add_argument( + "--checkpoint", + default="models/checkpoint_38M.pth", + help="Path to model checkpoint file (default: models/checkpoint_38M.pth)", + ) + parser.add_argument("--batch_size", type=int, default=3) + parser.add_argument("--total_length", type=int, default=2048) + parser.add_argument("--seed", type=int, default=42) + parser.add_argument("--output_dir", default="outputs") + args = parser.parse_args() + + # Configuration + batch_size = args.batch_size + total_length = args.total_length + output_dir = args.output_dir + seed = args.seed + config_path = args.config + model_path = args.checkpoint + + + # Check if the checkpoint file exists + if not os.path.exists(model_path): + logger.error(f"Checkpoint file not found at: {model_path}") + logger.error( + "Please ensure 'checkpoint_38M.pth' is in the root directory" + " (or that you've cloned the repo with Git LFS)." + ) + logger.error("You can also specify a different path using --checkpoint.") + return # Exit if no model + + logger.info("=== Time Series Model Demo (Univariate Quantile) ===") + + # 1) Generate synthetic sine wave data + sine_params = SineWaveGeneratorParams(global_seed=seed, length=total_length) + sine_generator = SineWaveGeneratorWrapper(sine_params) + batch = sine_generator.generate_batch(batch_size=batch_size, seed=seed) + values = torch.from_numpy(batch.values).to(torch.float32) + if values.ndim == 2: + values = values.unsqueeze(-1) # Ensure [B, S, 1] for univariate + future_length = 256 + history_values = values[:, :-future_length, :] + future_values = values[:, -future_length:, :] + + # 2) Load the pretrained model (CUDA-only). This demo requires a CUDA GPU. + if not torch.cuda.is_available(): + raise RuntimeError( + "CUDA is required to run this demo. No CUDA device detected." + ) + device = torch.device("cuda:0") + model = load_model(config_path=config_path, model_path=model_path, device=device) + + # 3) Pack tensors into the model's input container + container = BatchTimeSeriesContainer( + history_values=history_values.to(device), + future_values=future_values.to(device), + start=batch.start, + frequency=batch.frequency, + ) + + # 4) Run inference (bfloat16 on CUDA) and plot results + run_inference_and_plot( + model=model, container=container, output_dir=output_dir, use_bfloat16=True + ) + + logger.info("=== Demo completed successfully! ===") + + +if __name__ == "__main__": + main() diff --git a/examples/utils.py b/examples/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..898f000d9e7700b6aa3c81ec5bc0064456f7e24e --- /dev/null +++ b/examples/utils.py @@ -0,0 +1,115 @@ +import logging +import os +import urllib.request +from typing import List + +import numpy as np +import torch +import yaml + +from src.data.containers import BatchTimeSeriesContainer +from src.models.model import TimeSeriesModel +from src.plotting.plot_timeseries import plot_from_container + +logger = logging.getLogger(__name__) + + +def load_model( + config_path: str, model_path: str, device: torch.device +) -> TimeSeriesModel: + """Load the TimeSeriesModel from config and checkpoint.""" + with open(config_path, "r") as f: + config = yaml.safe_load(f) + + model = TimeSeriesModel(**config["TimeSeriesModel"]).to(device) + checkpoint = torch.load(model_path, map_location=device) + model.load_state_dict(checkpoint["model_state_dict"]) + model.eval() + logger.info(f"Successfully loaded TimeSeriesModel from {model_path} on {device}") + return model + + +def download_checkpoint_if_needed(url: str, target_dir: str = "models") -> str: + """Download checkpoint from URL into target_dir if not present and return its path. + + Ensures direct download for Dropbox links by forcing dl=1. + """ + os.makedirs(target_dir, exist_ok=True) + target_path = os.path.join(target_dir, "checkpoint.pth") + + # Normalize Dropbox URL to force direct download + if "dropbox.com" in url and "dl=0" in url: + url = url.replace("dl=0", "dl=1") + + if not os.path.exists(target_path): + logger.info(f"Downloading checkpoint from {url} to {target_path}...") + urllib.request.urlretrieve(url, target_path) + logger.info("Checkpoint downloaded successfully.") + else: + logger.info(f"Using existing checkpoint at {target_path}") + + return target_path + + +def plot_with_library( + container: BatchTimeSeriesContainer, + predictions_np: np.ndarray, # [B, P, N, Q] + model_quantiles: List[float] | None, + output_dir: str = "outputs", + show_plots: bool = True, + save_plots: bool = True, +): + os.makedirs(output_dir, exist_ok=True) + batch_size = container.batch_size + for i in range(batch_size): + output_file = ( + os.path.join(output_dir, f"sine_wave_prediction_sample_{i + 1}.png") + if save_plots + else None + ) + plot_from_container( + batch=container, + sample_idx=i, + predicted_values=predictions_np, + model_quantiles=model_quantiles, + title=f"Sine Wave Time Series Prediction - Sample {i + 1}", + output_file=output_file, + show=show_plots, + ) + + +def run_inference_and_plot( + model: TimeSeriesModel, + container: BatchTimeSeriesContainer, + output_dir: str = "outputs", + use_bfloat16: bool = True, +) -> None: + """Run model inference with optional bfloat16 and plot using shared utilities.""" + device_type = "cuda" if (container.history_values.device.type == "cuda") else "cpu" + autocast_enabled = use_bfloat16 and device_type == "cuda" + with ( + torch.no_grad(), + torch.autocast( + device_type=device_type, dtype=torch.bfloat16, enabled=autocast_enabled + ), + ): + model_output = model(container) + + preds_full = model_output["result"].to(torch.float32) + if hasattr(model, "scaler") and "scale_statistics" in model_output: + preds_full = model.scaler.inverse_scale( + preds_full, model_output["scale_statistics"] + ) + + preds_np = preds_full.detach().cpu().numpy() + model_quantiles = ( + model.quantiles if getattr(model, "loss_type", None) == "quantile" else None + ) + plot_with_library( + container=container, + predictions_np=preds_np, + model_quantiles=model_quantiles, + output_dir=output_dir, + show_plots=True, + save_plots=True, + ) diff --git a/gitignore b/gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6b43665e9f6d46ce18ddc6b19df508c90da057b2 --- /dev/null +++ b/gitignore @@ -0,0 +1,167 @@ +logs/ +*.png +*.pth +# *.sh +*.slurm +*.pkl + +wandb/ +AutogluonModels/ +.vscode/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +.idea/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Datasets, logs, plots, etc. +outputs/ + +*.arrow +*.csv +*.png +*.pdf +*.gif +.DS_Store \ No newline at end of file diff --git a/models/checkpoint_38M.pth b/models/checkpoint_38M.pth new file mode 100644 index 0000000000000000000000000000000000000000..163bfd388d711a1ffb9bcb0a4099c39e255c8e71 --- /dev/null +++ b/models/checkpoint_38M.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a520c07e6f4dc6583b25a7129251c81eef15f168003766adf6ae4983db7b575b +size 498752361 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..5dd7182f382342ac71673b0877f254b38962628f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,62 @@ +[project] +name = "TempoPFN" +version = "0.1.0" +description = "Univariate Time Series Forecasting Using Linear RNNs" +authors = [ + { name = "Vladyslav Moroshan" }, + { name = "Julien Siems" }, +] +readme = "README.md" +license = { file = "LICENSE" } +requires-python = ">=3.10,<3.13" + +dependencies = [ + "torch>=2.5.0", + "torchmetrics", + "triton==3.2.0", + "numpy", + "pandas", + "matplotlib", + "gpytorch", + "flash-linear-attention @ git+https://github.com/fla-org/flash-linear-attention@main", + "scikit-learn", + "gluonts", + "notebook", + "datasets", + "ujson", +] + +classifiers = [ + "Intended Audience :: Science/Research", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Topic :: Software Development", + "Topic :: Scientific/Engineering", + "Operating System :: POSIX", + "Operating System :: Unix", + "Operating System :: MacOS", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] + +[project.optional-dependencies] +dev = [ + "wandb", + "build", + "pre-commit", + "ruff", + "mypy", + "commitizen", + "black", + "cupy-cuda12x", + "statsmodels", + "pyo", # Requires portaudio +] + +[build-system] +requires = ["setuptools>=68.2.2", "wheel>=0.41.2"] +build-backend = "setuptools.build_meta" + +package-dir = {"" = "src"} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a4c41a12b9f81a37c4c28627019b228a68ee3ae --- /dev/null +++ b/requirements.txt @@ -0,0 +1,25 @@ +# 'torch' must be installed separately first, using the command +# from the README.md to match your specific CUDA version. + +torchmetrics +triton==3.2.0 +numpy +pandas +matplotlib +flash-linear-attention @ git+https://github.com/fla-org/flash-linear-attention@main +scikit-learn +gluonts +notebook +datasets +ujson +pyyaml +wandb +build +pre-commit +ruff +mypy +commitizen +black +cupy-cuda12x +statsmodels +pyo # Requires portaudio \ No newline at end of file diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/data/__init__.py b/src/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/data/augmentations.py b/src/data/augmentations.py new file mode 100644 index 0000000000000000000000000000000000000000..11250519e5174e544cab6f731f36894c198fb33a --- /dev/null +++ b/src/data/augmentations.py @@ -0,0 +1,1318 @@ +import logging +import math +from collections import Counter +from pathlib import Path +from typing import Dict, List, Optional, Tuple + +import numpy as np +import torch +import torch.nn as nn +from joblib import Parallel, delayed +from torch.quasirandom import SobolEngine +import torch.nn.functional as F + + +from src.gift_eval.data import Dataset + +logger = logging.getLogger(__name__) + + +def find_consecutive_nan_lengths(series: np.ndarray) -> list[int]: + """Finds the lengths of all consecutive NaN blocks in a 1D array.""" + if series.ndim > 1: + # For multivariate series, flatten to treat it as one long sequence + series = series.flatten() + + is_nan = np.isnan(series) + padded_is_nan = np.concatenate(([False], is_nan, [False])) + diffs = np.diff(padded_is_nan.astype(int)) + + start_indices = np.where(diffs == 1)[0] + end_indices = np.where(diffs == -1)[0] + + return (end_indices - start_indices).tolist() + + +def analyze_datasets_for_augmentation(gift_eval_path_str: str) -> dict: + """ + Analyzes all datasets to derive statistics needed for NaN augmentation. + This version collects the full distribution of NaN ratios. + """ + logger.info( + "--- Starting Dataset Analysis for Augmentation (Full Distribution) ---" + ) + path = Path(gift_eval_path_str) + if not path.exists(): + raise FileNotFoundError( + f"Provided raw data path for augmentation analysis does not exist: {gift_eval_path_str}" + ) + + dataset_names = [] + for dataset_dir in path.iterdir(): + if dataset_dir.name.startswith(".") or not dataset_dir.is_dir(): + continue + freq_dirs = [d for d in dataset_dir.iterdir() if d.is_dir()] + if freq_dirs: + for freq_dir in freq_dirs: + dataset_names.append(f"{dataset_dir.name}/{freq_dir.name}") + else: + dataset_names.append(dataset_dir.name) + + total_series_count = 0 + series_with_nans_count = 0 + nan_ratio_distribution = [] + all_consecutive_nan_lengths = Counter() + + for ds_name in sorted(dataset_names): + try: + ds = Dataset(name=ds_name, term="short", to_univariate=False) + for series_data in ds.training_dataset: + total_series_count += 1 + target = np.atleast_1d(series_data["target"]) + num_nans = np.isnan(target).sum() + + if num_nans > 0: + series_with_nans_count += 1 + nan_ratio = num_nans / target.size + nan_ratio_distribution.append(float(nan_ratio)) + + nan_lengths = find_consecutive_nan_lengths(target) + all_consecutive_nan_lengths.update(nan_lengths) + except Exception as e: + logger.warning( + f"Could not process {ds_name} for augmentation analysis: {e}" + ) + + if total_series_count == 0: + raise ValueError( + "No series were found during augmentation analysis. Check dataset path." + ) + + p_series_has_nan = ( + series_with_nans_count / total_series_count if total_series_count > 0 else 0 + ) + + logger.info("--- Augmentation Analysis Complete ---") + # Print summary statistics + logger.info(f"Total series analyzed: {total_series_count}") + logger.info(f"Series with NaNs: {series_with_nans_count} ({p_series_has_nan:.4f})") + logger.info(f"NaN ratio distribution: {Counter(nan_ratio_distribution)}") + logger.info(f"Consecutive NaN lengths distribution: {all_consecutive_nan_lengths}") + logger.info("--- End of Dataset Analysis for Augmentation ---") + return { + "p_series_has_nan": p_series_has_nan, + "nan_ratio_distribution": nan_ratio_distribution, + "nan_length_distribution": all_consecutive_nan_lengths, + } + + +class NanAugmenter: + """ + Applies realistic NaN augmentation by generating and caching NaN patterns on-demand + during the first transform call for a given data shape. + """ + + def __init__( + self, + p_series_has_nan: float, + nan_ratio_distribution: List[float], + nan_length_distribution: Counter, + num_patterns: int = 100000, + n_jobs: int = -1, + nan_patterns_path: Optional[str] = None, + ): + """ + Initializes the augmenter. NaN patterns are not generated at this stage. + + Args: + p_series_has_nan (float): Probability that a series in a batch will be augmented. + nan_ratio_distribution (List[float]): A list of NaN ratios observed in the dataset. + nan_length_distribution (Counter): A Counter of consecutive NaN block lengths. + num_patterns (int): The number of unique NaN patterns to generate per data shape. + n_jobs (int): The number of CPU cores to use for parallel pattern generation (-1 for all cores). + """ + self.p_series_has_nan = p_series_has_nan + self.nan_ratio_distribution = nan_ratio_distribution + self.num_patterns = num_patterns + self.n_jobs = n_jobs + self.max_length = 2048 + self.nan_patterns_path = nan_patterns_path + # Cache to store patterns: Dict[shape_tuple -> pattern_tensor] + self.pattern_cache: Dict[Tuple[int, ...], torch.BoolTensor] = {} + + if not nan_length_distribution or sum(nan_length_distribution.values()) == 0: + self._has_block_distribution = False + logger.warning("NaN length distribution is empty. Augmentation disabled.") + else: + self._has_block_distribution = True + total_blocks = sum(nan_length_distribution.values()) + self.dist_lengths = list(int(i) for i in nan_length_distribution.keys()) + self.dist_probs = [ + count / total_blocks for count in nan_length_distribution.values() + ] + + if not self.nan_ratio_distribution: + logger.warning("NaN ratio distribution is empty. Augmentation disabled.") + + # Try to load existing patterns from disk + self._load_existing_patterns() + + def _load_existing_patterns(self): + """Load existing NaN patterns from disk if they exist.""" + # Determine where to look for patterns + explicit_path: Optional[Path] = ( + Path(self.nan_patterns_path).resolve() + if self.nan_patterns_path is not None + else None + ) + + candidate_files: List[Path] = [] + if explicit_path is not None: + # If the explicit path exists, use it directly + if explicit_path.is_file(): + candidate_files.append(explicit_path) + # Also search the directory of the explicit path for matching files + explicit_dir = explicit_path.parent + explicit_dir.mkdir(exist_ok=True, parents=True) + candidate_files.extend( + list(explicit_dir.glob(f"nan_patterns_{self.max_length}_*.pt")) + ) + else: + # Default to the ./data directory + data_dir = Path("data") + data_dir.mkdir(exist_ok=True) + candidate_files.extend( + list(data_dir.glob(f"nan_patterns_{self.max_length}_*.pt")) + ) + + # De-duplicate candidate files while preserving order + seen: set[str] = set() + unique_candidates: List[Path] = [] + for f in candidate_files: + key = str(f.resolve()) + if key not in seen: + seen.add(key) + unique_candidates.append(f) + + for pattern_file in unique_candidates: + try: + # Extract num_channels from filename + filename = pattern_file.stem + parts = filename.split("_") + if len(parts) >= 4: + num_channels = int(parts[-1]) + + # Load patterns + patterns = torch.load(pattern_file, map_location="cpu") + cache_key = (self.max_length, num_channels) + self.pattern_cache[cache_key] = patterns + + logger.info( + f"Loaded {patterns.shape[0]} patterns for shape {cache_key} from {pattern_file}" + ) + except (ValueError, RuntimeError, FileNotFoundError) as e: + logger.warning(f"Failed to load patterns from {pattern_file}: {e}") + + def _get_pattern_file_path(self, num_channels: int) -> Path: + """Resolve the target file path for storing/loading patterns for a given channel count.""" + # If user provided a file path, use its directory as the base directory + if self.nan_patterns_path is not None: + base_dir = Path(self.nan_patterns_path).resolve().parent + base_dir.mkdir(exist_ok=True, parents=True) + else: + base_dir = Path("data").resolve() + base_dir.mkdir(exist_ok=True, parents=True) + + return base_dir / f"nan_patterns_{self.max_length}_{num_channels}.pt" + + def _generate_nan_mask(self, series_shape: Tuple[int, ...]) -> np.ndarray: + """Generates a single boolean NaN mask for a given series shape.""" + series_size = int(np.prod(series_shape)) + sampled_ratio = np.random.choice(self.nan_ratio_distribution) + n_nans_to_add = int(round(series_size * sampled_ratio)) + + if n_nans_to_add == 0: + return np.zeros(series_shape, dtype=bool) + + mask_flat = np.zeros(series_size, dtype=bool) + nans_added = 0 + max_attempts = n_nans_to_add * 2 + attempts = 0 + while nans_added < n_nans_to_add and attempts < max_attempts: + attempts += 1 + block_length = np.random.choice(self.dist_lengths, p=self.dist_probs) + + if nans_added + block_length > n_nans_to_add: + block_length = n_nans_to_add - nans_added + if block_length <= 0: + break + + nan_counts_in_window = np.convolve( + mask_flat, np.ones(block_length), mode="valid" + ) + valid_starts = np.where(nan_counts_in_window == 0)[0] + + if valid_starts.size == 0: + continue + + start_pos = np.random.choice(valid_starts) + mask_flat[start_pos : start_pos + block_length] = True + nans_added += block_length + + return mask_flat.reshape(series_shape) + + def _pregenerate_patterns(self, series_shape: Tuple[int, ...]) -> torch.BoolTensor: + """Uses joblib to parallelize the generation of NaN masks for a given shape.""" + if not self._has_block_distribution or not self.nan_ratio_distribution: + return torch.empty(0, *series_shape, dtype=torch.bool) + + logger.info( + f"Generating {self.num_patterns} NaN patterns for shape {series_shape}..." + ) + + with Parallel(n_jobs=self.n_jobs, backend="loky") as parallel: + masks_list = parallel( + delayed(self._generate_nan_mask)(series_shape) + for _ in range(self.num_patterns) + ) + + logger.info(f"Pattern generation complete for shape {series_shape}.") + return torch.from_numpy(np.stack(masks_list)).bool() + + def transform(self, time_series_batch: torch.Tensor) -> torch.Tensor: + """ + Applies NaN patterns to a batch, generating them on-demand if the shape is new. + """ + if self.p_series_has_nan == 0: + return time_series_batch + + history_length, num_channels = time_series_batch.shape[1:] + assert history_length <= self.max_length, ( + f"History length {history_length} exceeds maximum allowed {self.max_length}." + ) + + # 1. Check cache and generate patterns if the shape is new + if ( + self.max_length, + num_channels, + ) not in self.pattern_cache: + # Try loading from a resolved file path if available + target_file = self._get_pattern_file_path(num_channels) + if target_file.exists(): + try: + patterns = torch.load(target_file, map_location="cpu") + self.pattern_cache[(self.max_length, num_channels)] = patterns + logger.info( + f"Loaded NaN patterns from {target_file} for shape {(self.max_length, num_channels)}" + ) + except (RuntimeError, FileNotFoundError): + # Fall back to generating if loading fails + patterns = self._pregenerate_patterns( + (self.max_length, num_channels) + ) + torch.save(patterns, target_file) + self.pattern_cache[(self.max_length, num_channels)] = patterns + logger.info( + f"Generated and saved {patterns.shape[0]} NaN patterns to {target_file}" + ) + else: + patterns = self._pregenerate_patterns((self.max_length, num_channels)) + torch.save(patterns, target_file) + self.pattern_cache[(self.max_length, num_channels)] = patterns + logger.info( + f"Generated and saved {patterns.shape[0]} NaN patterns to {target_file}" + ) + patterns = self.pattern_cache[(self.max_length, num_channels)][ + :, :history_length, : + ] + + # Early exit if patterns are empty (e.g., generation failed or was disabled) + if patterns.numel() == 0: + return time_series_batch + + batch_size = time_series_batch.shape[0] + device = time_series_batch.device + + # 2. Vectorized decision on which series to augment + augment_mask = torch.rand(batch_size, device=device) < self.p_series_has_nan + indices_to_augment = torch.where(augment_mask)[0] + num_to_augment = indices_to_augment.numel() + + if num_to_augment == 0: + return time_series_batch + + # 3. Randomly sample patterns for each series being augmented + pattern_indices = torch.randint( + 0, patterns.shape[0], (num_to_augment,), device=device + ) + # 4. Select patterns and apply them in a single vectorized operation + selected_patterns = patterns[pattern_indices].to(device) + + time_series_batch[indices_to_augment] = time_series_batch[ + indices_to_augment + ].masked_fill(selected_patterns, float("nan")) + + return time_series_batch + + +class CensorAugmenter: + """ + Applies censor augmentation by clipping values from above, below, or both. + """ + + def __init__(self): + """Initializes the CensorAugmenter.""" + pass + + def transform(self, time_series_batch: torch.Tensor) -> torch.Tensor: + """ + Applies a vectorized censor augmentation to a batch of time series. + """ + batch_size, seq_len, num_channels = time_series_batch.shape + assert num_channels == 1 + time_series_batch = time_series_batch.squeeze(-1) + with torch.no_grad(): + batch_size, seq_len = time_series_batch.shape + device = time_series_batch.device + + # Step 1: Choose an op mode for each series + op_mode = torch.randint(0, 3, (batch_size, 1), device=device) + + # Step 2: Calculate potential thresholds for all series + q1 = torch.rand(batch_size, device=device) + q2 = torch.rand(batch_size, device=device) + q_low = torch.minimum(q1, q2) + q_high = torch.maximum(q1, q2) + + sorted_series = torch.sort(time_series_batch, dim=1).values + indices_low = (q_low * (seq_len - 1)).long() + indices_high = (q_high * (seq_len - 1)).long() + + c_low = torch.gather(sorted_series, 1, indices_low.unsqueeze(1)) + c_high = torch.gather(sorted_series, 1, indices_high.unsqueeze(1)) + + # Step 3: Compute results for all possible clipping operations + clip_above = torch.minimum(time_series_batch, c_high) + clip_below = torch.maximum(time_series_batch, c_low) + + # Step 4: Select the final result based on the op_mode + result = torch.where( + op_mode == 1, + clip_above, + torch.where(op_mode == 2, clip_below, time_series_batch), + ) + augmented_batch = torch.where( + op_mode == 0, + time_series_batch, + result, + ) + + return augmented_batch.unsqueeze(-1) + + +class QuantizationAugmenter: + """ + Applies non-equidistant quantization using a Sobol sequence to generate + uniformly distributed levels. This implementation is fully vectorized. + """ + + def __init__( + self, + p_quantize: float, + level_range: Tuple[int, int], + seed: Optional[int] = None, + ): + """ + Initializes the augmenter. + + Args: + p_quantize (float): Probability of applying quantization to a series. + level_range (Tuple[int, int]): Inclusive range [min, max] to sample the + number of quantization levels from. + seed (Optional[int]): Seed for the Sobol sequence generator for reproducibility. + """ + assert 0.0 <= p_quantize <= 1.0, "Probability must be between 0 and 1." + assert level_range[0] >= 2, "Minimum number of levels must be at least 2." + assert level_range[0] <= level_range[1], ( + "Min levels cannot be greater than max." + ) + + self.p_quantize = p_quantize + self.level_range = level_range + + # Initialize a SobolEngine. The dimension is the max number of random + # levels we might need to generate for a single series. + max_intermediate_levels = self.level_range[1] - 2 + if max_intermediate_levels > 0: + # SobolEngine must be created on CPU + self.sobol_engine = SobolEngine( + dimension=max_intermediate_levels, scramble=True, seed=seed + ) + else: + self.sobol_engine = None + + def transform(self, time_series_batch: torch.Tensor) -> torch.Tensor: + """ + Applies augmentation in a fully vectorized way on the batch's device. + Handles input shape (batch, length, 1). + """ + # Handle input shape (batch, length, 1) + if time_series_batch.dim() == 3 and time_series_batch.shape[2] == 1: + is_3d = True + time_series_squeezed = time_series_batch.squeeze(-1) + else: + is_3d = False + time_series_squeezed = time_series_batch + + if self.p_quantize == 0 or self.sobol_engine is None: + return time_series_batch + + n_series, _ = time_series_squeezed.shape + device = time_series_squeezed.device + + # 1. Decide which series to augment + augment_mask = torch.rand(n_series, device=device) < self.p_quantize + n_augment = torch.sum(augment_mask) + if n_augment == 0: + return time_series_batch + + series_to_augment = time_series_squeezed[augment_mask] + + # 2. Determine a variable n_levels for EACH series + min_l, max_l = self.level_range + n_levels_per_series = torch.randint( + min_l, max_l + 1, size=(n_augment,), device=device + ) + max_levels_in_batch = n_levels_per_series.max().item() + + # 3. Find min/max for each series + min_vals = torch.amin(series_to_augment, dim=1, keepdim=True) + max_vals = torch.amax(series_to_augment, dim=1, keepdim=True) + value_range = max_vals - min_vals + is_flat = value_range == 0 + + # 4. Generate quasi-random levels using the Sobol sequence + num_intermediate_levels = max_levels_in_batch - 2 + if num_intermediate_levels > 0: + # Draw points from the Sobol engine (on CPU) and move to target device + sobol_points = self.sobol_engine.draw(n_augment).to(device) + # We only need the first `num_intermediate_levels` dimensions + quasi_rand_points = sobol_points[:, :num_intermediate_levels] + else: + # Handle case where max_levels_in_batch is 2 (no intermediate points needed) + quasi_rand_points = torch.empty(n_augment, 0, device=device) + + scaled_quasi_rand_levels = min_vals + value_range * quasi_rand_points + level_values = torch.cat([min_vals, max_vals, scaled_quasi_rand_levels], dim=1) + level_values, _ = torch.sort(level_values, dim=1) + + # 5. Find the closest level using a mask to ignore padded values + series_expanded = series_to_augment.unsqueeze(2) + levels_expanded = level_values.unsqueeze(1) + diff = torch.abs(series_expanded - levels_expanded) + + arange_mask = torch.arange(max_levels_in_batch, device=device).unsqueeze(0) + valid_levels_mask = arange_mask < n_levels_per_series.unsqueeze(1) + masked_diff = torch.where(valid_levels_mask.unsqueeze(1), diff, float("inf")) + closest_level_indices = torch.argmin(masked_diff, dim=2) + + # 6. Gather the results from the original level values + quantized_subset = torch.gather(level_values, 1, closest_level_indices) + + # 7. For flat series, revert to their original values + final_subset = torch.where(is_flat, series_to_augment, quantized_subset) + + # 8. Place augmented data back into a copy of the original batch + augmented_batch_squeezed = time_series_squeezed.clone() + augmented_batch_squeezed[augment_mask] = final_subset + + # Restore original shape before returning + if is_3d: + return augmented_batch_squeezed.unsqueeze(-1) + else: + return augmented_batch_squeezed + + +class MixUpAugmenter: + """ + Applies mixup augmentation by creating a weighted average of multiple time series. + + This version includes an option for time-dependent mixup using Simplex Path + Interpolation, creating a smooth transition between different mixing weights. + """ + + def __init__( + self, + max_n_series_to_combine: int = 10, + p_combine: float = 0.4, + p_time_dependent: float = 0.5, + randomize_k_per_series: bool = True, + dirichlet_alpha_range: Tuple[float, float] = (0.1, 5.0), + ): + """ + Initializes the augmenter. + + Args: + max_n_series_to_combine (int): The maximum number of series to combine. + The actual number k will be sampled from [2, max]. + p_combine (float): The probability of replacing a series with a combination. + p_time_dependent (float): The probability of using the time-dependent + simplex path method for a given mixup operation. Defaults to 0.5. + randomize_k_per_series (bool): If True, each augmented series will be a + combination of a different number of series (k). + If False, one k is chosen for the whole batch. + dirichlet_alpha_range (Tuple[float, float]): The [min, max] range to sample the + Dirichlet 'alpha' from. A smaller alpha (e.g., 0.2) creates mixes + dominated by one series. A larger alpha (e.g., 5.0) creates + more uniform weights. + """ + assert max_n_series_to_combine >= 2, "Must combine at least 2 series." + assert 0.0 <= p_combine <= 1.0, "p_combine must be between 0 and 1." + assert 0.0 <= p_time_dependent <= 1.0, ( + "p_time_dependent must be between 0 and 1." + ) + assert ( + dirichlet_alpha_range[0] > 0 + and dirichlet_alpha_range[0] <= dirichlet_alpha_range[1] + ) + self.max_k = max_n_series_to_combine + self.p_combine = p_combine + self.p_time_dependent = p_time_dependent + self.randomize_k = randomize_k_per_series + self.alpha_range = dirichlet_alpha_range + + def _sample_alpha(self) -> float: + log_alpha_min = math.log10(self.alpha_range[0]) + log_alpha_max = math.log10(self.alpha_range[1]) + log_alpha = log_alpha_min + np.random.rand() * (log_alpha_max - log_alpha_min) + return float(10**log_alpha) + + def _sample_k(self) -> int: + return int(torch.randint(2, self.max_k + 1, (1,)).item()) + + def _static_mix( + self, + source_series: torch.Tensor, + alpha: float, + return_weights: bool = False, + ): + """Mixes k source series using a single, static set of Dirichlet weights.""" + k = int(source_series.shape[0]) + device = source_series.device + concentration = torch.full((k,), float(alpha), device=device) + weights = torch.distributions.Dirichlet(concentration).sample() + weights_view = weights.view(k, 1, 1) + mixed_series = (source_series * weights_view).sum(dim=0, keepdim=True) + if return_weights: + return mixed_series, weights + return mixed_series + + def _simplex_path_mix( + self, + source_series: torch.Tensor, + alpha: float, + return_weights: bool = False, + ): + """Mixes k series using time-varying weights interpolated along a simplex path.""" + k, length, _ = source_series.shape + device = source_series.device + + # 1. Sample two endpoint weight vectors from the Dirichlet distribution + concentration = torch.full((k,), float(alpha), device=device) + dirichlet_dist = torch.distributions.Dirichlet(concentration) + w_start = dirichlet_dist.sample() + w_end = dirichlet_dist.sample() + + # 2. Create a linear ramp from 0 to 1 + alpha_ramp = torch.linspace(0, 1, length, device=device) + + # 3. Interpolate between the endpoint weights over time + # Reshape for broadcasting: w vectors become [k, 1], ramp becomes [1, length] + time_varying_weights = w_start.unsqueeze(1) * ( + 1 - alpha_ramp.unsqueeze(0) + ) + w_end.unsqueeze(1) * alpha_ramp.unsqueeze(0) + # The result `time_varying_weights` has shape [k, length] + + # 4. Apply the time-varying weights + weights_view = time_varying_weights.unsqueeze(-1) # Shape: [k, length, 1] + mixed_series = (source_series * weights_view).sum(dim=0, keepdim=True) + + if return_weights: + return mixed_series, time_varying_weights + return mixed_series + + def transform( + self, time_series_batch: torch.Tensor, return_debug_info: bool = False + ): + """ + Applies the mixup augmentation, randomly choosing between static and + time-dependent mixing methods. + """ + with torch.no_grad(): + if self.p_combine == 0: + return ( + (time_series_batch, {}) if return_debug_info else time_series_batch + ) + + batch_size, _, _ = time_series_batch.shape + device = time_series_batch.device + + if batch_size <= self.max_k: + return ( + (time_series_batch, {}) if return_debug_info else time_series_batch + ) + + # 1. Decide which series to replace + augment_mask = torch.rand(batch_size, device=device) < self.p_combine + indices_to_replace = torch.where(augment_mask)[0] + n_augment = indices_to_replace.numel() + + if n_augment == 0: + return ( + (time_series_batch, {}) if return_debug_info else time_series_batch + ) + + # 2. Determine k for each series to augment + if self.randomize_k: + k_values = torch.randint(2, self.max_k + 1, (n_augment,), device=device) + else: + k = self._sample_k() + k_values = torch.full((n_augment,), k, device=device) + + # 3. Augment series one by one + new_series_list = [] + all_batch_indices = torch.arange(batch_size, device=device) + debug_info = {} + + for i, target_idx in enumerate(indices_to_replace): + current_k = k_values[i].item() + + # Sample source indices + candidate_mask = all_batch_indices != target_idx + candidates = all_batch_indices[candidate_mask] + perm = torch.randperm(candidates.shape[0], device=device) + source_indices = candidates[perm[:current_k]] + source_series = time_series_batch[source_indices] + + alpha = self._sample_alpha() + mix_type = "static" + + # Randomly choose between static and time-dependent mixup + if torch.rand(1).item() < self.p_time_dependent: + mixed_series, weights = self._simplex_path_mix( + source_series, alpha=alpha, return_weights=True + ) + mix_type = "simplex" + else: + mixed_series, weights = self._static_mix( + source_series, alpha=alpha, return_weights=True + ) + + new_series_list.append(mixed_series) + + if return_debug_info: + debug_info[target_idx.item()] = { + "source_indices": source_indices.cpu().numpy(), + "weights": weights.cpu().numpy(), + "alpha": alpha, + "k": current_k, + "mix_type": mix_type, + } + + # 4. Place augmented series back into a clone of the original batch + augmented_batch = time_series_batch.clone() + if new_series_list: + new_series_tensor = torch.cat(new_series_list, dim=0) + augmented_batch[indices_to_replace] = new_series_tensor + + if return_debug_info: + return augmented_batch.detach(), debug_info + return augmented_batch.detach() + + +class TimeFlipAugmenter: + """ + Applies time-reversal augmentation to a random subset of time series in a batch. + """ + + def __init__(self, p_flip: float = 0.5): + """ + Initializes the TimeFlipAugmenter. + + Args: + p_flip (float): The probability of flipping a single time series in the batch. + Defaults to 0.5. + """ + assert 0.0 <= p_flip <= 1.0, "Probability must be between 0 and 1." + self.p_flip = p_flip + + def transform(self, time_series_batch: torch.Tensor) -> torch.Tensor: + """ + Applies time-reversal augmentation to a batch of time series. + + Args: + time_series_batch (torch.Tensor): The input batch of time series with + shape (batch_size, seq_len, num_channels). + + Returns: + torch.Tensor: The batch with some series potentially flipped. + """ + with torch.no_grad(): + if self.p_flip == 0: + return time_series_batch + + batch_size = time_series_batch.shape[0] + device = time_series_batch.device + + # 1. Decide which series in the batch to flip + flip_mask = torch.rand(batch_size, device=device) < self.p_flip + indices_to_flip = torch.where(flip_mask)[0] + + if indices_to_flip.numel() == 0: + return time_series_batch + + # 2. Select the series to be flipped + series_to_flip = time_series_batch[indices_to_flip] + + # 3. Flip them along the time dimension (dim=1) + flipped_series = torch.flip(series_to_flip, dims=[1]) + + # 4. Create a copy of the batch and place the flipped series into it + augmented_batch = time_series_batch.clone() + augmented_batch[indices_to_flip] = flipped_series + + return augmented_batch + + +class YFlipAugmenter: + """ + Applies y-reversal augmentation to a random subset of time series in a batch. + """ + + def __init__(self, p_flip: float = 0.5): + """ + Initializes the TimeFlipAugmenter. + + Args: + p_flip (float): The probability of flipping a single time series in the batch. + Defaults to 0.5. + """ + assert 0.0 <= p_flip <= 1.0, "Probability must be between 0 and 1." + self.p_flip = p_flip + + def transform(self, time_series_batch: torch.Tensor) -> torch.Tensor: + """ + Applies time-reversal augmentation to a batch of time series. + + Args: + time_series_batch (torch.Tensor): The input batch of time series with + shape (batch_size, seq_len, num_channels). + + Returns: + torch.Tensor: The batch with some series potentially flipped. + """ + with torch.no_grad(): + if self.p_flip == 0: + return time_series_batch + + batch_size = time_series_batch.shape[0] + device = time_series_batch.device + + # 1. Decide which series in the batch to flip + flip_mask = torch.rand(batch_size, device=device) < self.p_flip + indices_to_flip = torch.where(flip_mask)[0] + + if indices_to_flip.numel() == 0: + return time_series_batch + + # 2. Select the series to be flipped + series_to_flip = time_series_batch[indices_to_flip] + + # 3. Flip them along the time dimension (dim=1) + flipped_series = -series_to_flip + + # 4. Create a copy of the batch and place the flipped series into it + augmented_batch = time_series_batch.clone() + augmented_batch[indices_to_flip] = flipped_series + + return augmented_batch + + +class DifferentialAugmenter: + """ + Applies calculus-inspired augmentations. This version includes up to the + fourth derivative and uses nn.Conv1d with built-in 'reflect' padding for + cleaner and more efficient convolutions. + + The Gaussian kernel size and sigma for the initial smoothing are randomly + sampled at every transform() call from user-defined ranges. + """ + + def __init__( + self, + p_transform: float, + gaussian_kernel_size_range: Tuple[int, int] = (5, 51), + gaussian_sigma_range: Tuple[float, float] = (2.0, 20.0), + ): + """ + Initializes the augmenter. + + Args: + p_transform (float): The probability of applying an augmentation to any given + time series in a batch. + gaussian_kernel_size_range (Tuple[int, int]): The [min, max] inclusive range + for the Gaussian kernel size. + Sizes will be forced to be odd. + gaussian_sigma_range (Tuple[float, float]): The [min, max] inclusive range + for the Gaussian sigma. + """ + self.p_transform = p_transform + self.kernel_size_range = gaussian_kernel_size_range + self.sigma_range = gaussian_sigma_range + + # Validate ranges + if not ( + self.kernel_size_range[0] <= self.kernel_size_range[1] + and self.kernel_size_range[0] >= 3 + ): + raise ValueError( + "Invalid kernel size range. Ensure min <= max and min >= 3." + ) + if not (self.sigma_range[0] <= self.sigma_range[1] and self.sigma_range[0] > 0): + raise ValueError("Invalid sigma range. Ensure min <= max and min > 0.") + + # Cache for fixed-kernel convolution layers (Sobel, Laplace, etc.) + self.conv_cache: Dict[Tuple[int, torch.device], Dict[str, nn.Module]] = {} + + def _create_fixed_kernel_layers( + self, num_channels: int, device: torch.device + ) -> dict: + """ + Creates and configures nn.Conv1d layers for fixed-kernel derivative operations. + These layers are cached to improve performance. + """ + sobel_conv = nn.Conv1d( + in_channels=num_channels, + out_channels=num_channels, + kernel_size=3, + padding="same", + padding_mode="reflect", + groups=num_channels, + bias=False, + device=device, + ) + laplace_conv = nn.Conv1d( + in_channels=num_channels, + out_channels=num_channels, + kernel_size=3, + padding="same", + padding_mode="reflect", + groups=num_channels, + bias=False, + device=device, + ) + d3_conv = nn.Conv1d( + in_channels=num_channels, + out_channels=num_channels, + kernel_size=5, + padding="same", + padding_mode="reflect", + groups=num_channels, + bias=False, + device=device, + ) + d4_conv = nn.Conv1d( + in_channels=num_channels, + out_channels=num_channels, + kernel_size=5, + padding="same", + padding_mode="reflect", + groups=num_channels, + bias=False, + device=device, + ) + + sobel_kernel = ( + torch.tensor([-1, 0, 1], device=device, dtype=torch.float32) + .view(1, 1, -1) + .repeat(num_channels, 1, 1) + ) + laplace_kernel = ( + torch.tensor([1, -2, 1], device=device, dtype=torch.float32) + .view(1, 1, -1) + .repeat(num_channels, 1, 1) + ) + d3_kernel = ( + torch.tensor([-1, 2, 0, -2, 1], device=device, dtype=torch.float32) + .view(1, 1, -1) + .repeat(num_channels, 1, 1) + ) + d4_kernel = ( + torch.tensor([1, -4, 6, -4, 1], device=device, dtype=torch.float32) + .view(1, 1, -1) + .repeat(num_channels, 1, 1) + ) + + sobel_conv.weight.data = sobel_kernel + laplace_conv.weight.data = laplace_kernel + d3_conv.weight.data = d3_kernel + d4_conv.weight.data = d4_kernel + + for layer in [sobel_conv, laplace_conv, d3_conv, d4_conv]: + layer.weight.requires_grad = False + + return { + "sobel": sobel_conv, + "laplace": laplace_conv, + "d3": d3_conv, + "d4": d4_conv, + } + + def _create_gaussian_layer( + self, kernel_size: int, sigma: float, num_channels: int, device: torch.device + ) -> nn.Module: + """Creates a single Gaussian convolution layer with the given dynamic parameters.""" + gauss_conv = nn.Conv1d( + in_channels=num_channels, + out_channels=num_channels, + kernel_size=kernel_size, + padding="same", + padding_mode="reflect", + groups=num_channels, + bias=False, + device=device, + ) + ax = torch.arange( + -(kernel_size // 2), + kernel_size // 2 + 1, + device=device, + dtype=torch.float32, + ) + gauss_kernel = torch.exp(-0.5 * (ax / sigma) ** 2) + gauss_kernel /= gauss_kernel.sum() + gauss_kernel = gauss_kernel.view(1, 1, -1).repeat(num_channels, 1, 1) + gauss_conv.weight.data = gauss_kernel + gauss_conv.weight.requires_grad = False + return gauss_conv + + def _rescale_signal( + self, processed_signal: torch.Tensor, original_signal: torch.Tensor + ) -> torch.Tensor: + """Rescales the processed signal to match the min/max range of the original.""" + original_min = torch.amin(original_signal, dim=2, keepdim=True) + original_max = torch.amax(original_signal, dim=2, keepdim=True) + processed_min = torch.amin(processed_signal, dim=2, keepdim=True) + processed_max = torch.amax(processed_signal, dim=2, keepdim=True) + + original_range = original_max - original_min + processed_range = processed_max - processed_min + epsilon = 1e-8 + rescaled_signal = ( + (processed_signal - processed_min) / (processed_range + epsilon) + ) * original_range + original_min + return torch.where(original_range < epsilon, original_signal, rescaled_signal) + + def transform(self, time_series_batch: torch.Tensor) -> torch.Tensor: + """Applies a random augmentation to a subset of the batch.""" + with torch.no_grad(): + if self.p_transform == 0: + return time_series_batch + + batch_size, seq_len, num_channels = time_series_batch.shape + device = time_series_batch.device + + augment_mask = torch.rand(batch_size, device=device) < self.p_transform + indices_to_augment = torch.where(augment_mask)[0] + num_to_augment = indices_to_augment.numel() + + if num_to_augment == 0: + return time_series_batch + + # --- 🎲 Randomly sample Gaussian parameters for this call --- + min_k, max_k = self.kernel_size_range + kernel_size = torch.randint(min_k, max_k + 1, (1,)).item() + kernel_size = kernel_size // 2 * 2 + 1 # Ensure kernel size is odd + + min_s, max_s = self.sigma_range + sigma = (min_s + (max_s - min_s) * torch.rand(1)).item() + + # --- Get/Create Convolution Layers --- + gauss_conv = self._create_gaussian_layer( + kernel_size, sigma, num_channels, device + ) + + cache_key = (num_channels, device) + if cache_key not in self.conv_cache: + self.conv_cache[cache_key] = self._create_fixed_kernel_layers( + num_channels, device + ) + fixed_layers = self.conv_cache[cache_key] + + # --- Apply Augmentations --- + subset_to_augment = time_series_batch[indices_to_augment] + subset_permuted = subset_to_augment.permute(0, 2, 1) + + op_choices = torch.randint(0, 6, (num_to_augment,), device=device) + + smoothed_subset = gauss_conv(subset_permuted) + sobel_on_smoothed = fixed_layers["sobel"](smoothed_subset) + laplace_on_smoothed = fixed_layers["laplace"](smoothed_subset) + d3_on_smoothed = fixed_layers["d3"](smoothed_subset) + d4_on_smoothed = fixed_layers["d4"](smoothed_subset) + + gauss_result = self._rescale_signal(smoothed_subset, subset_permuted) + sobel_result = self._rescale_signal(sobel_on_smoothed, subset_permuted) + laplace_result = self._rescale_signal(laplace_on_smoothed, subset_permuted) + d3_result = self._rescale_signal(d3_on_smoothed, subset_permuted) + d4_result = self._rescale_signal(d4_on_smoothed, subset_permuted) + + use_right_integral = torch.rand(num_to_augment, 1, 1, device=device) > 0.5 + flipped_subset = torch.flip(subset_permuted, dims=[2]) + right_integral = torch.flip(torch.cumsum(flipped_subset, dim=2), dims=[2]) + left_integral = torch.cumsum(subset_permuted, dim=2) + integral_result = torch.where( + use_right_integral, right_integral, left_integral + ) + integral_result_normalized = self._rescale_signal( + integral_result, subset_permuted + ) + + # --- Assemble the results based on op_choices --- + op_choices_view = op_choices.view(-1, 1, 1) + augmented_subset = torch.where( + op_choices_view == 0, gauss_result, subset_permuted + ) + augmented_subset = torch.where( + op_choices_view == 1, sobel_result, augmented_subset + ) + augmented_subset = torch.where( + op_choices_view == 2, laplace_result, augmented_subset + ) + augmented_subset = torch.where( + op_choices_view == 3, integral_result_normalized, augmented_subset + ) + augmented_subset = torch.where( + op_choices_view == 4, d3_result, augmented_subset + ) + augmented_subset = torch.where( + op_choices_view == 5, d4_result, augmented_subset + ) + + augmented_subset_final = augmented_subset.permute(0, 2, 1) + augmented_batch = time_series_batch.clone() + augmented_batch[indices_to_augment] = augmented_subset_final + + return augmented_batch + + +class RandomConvAugmenter: + """ + Applies a stack of 1-to-N random 1D convolutions to a time series batch. + + This augmenter is inspired by the principles of ROCKET and RandConv, + randomizing nearly every aspect of the convolution process to create a + highly diverse set of transformations. This version includes multiple + kernel generation strategies, random padding modes, and optional non-linearities. + """ + + def __init__( + self, + p_transform: float = 0.5, + kernel_size_range: Tuple[int, int] = (3, 31), + dilation_range: Tuple[int, int] = (1, 8), + layer_range: Tuple[int, int] = (1, 3), + sigma_range: Tuple[float, float] = (0.5, 5.0), + bias_range: Tuple[float, float] = (-0.5, 0.5), + ): + """ + Initializes the augmenter. + + Args: + p_transform (float): Probability of applying the augmentation to a series. + kernel_size_range (Tuple[int, int]): [min, max] range for kernel sizes. + Must be odd numbers. + dilation_range (Tuple[int, int]): [min, max] range for dilation factors. + layer_range (Tuple[int, int]): [min, max] range for the number of + stacked convolution layers. + sigma_range (Tuple[float, float]): [min, max] range for the sigma of + Gaussian kernels. + bias_range (Tuple[float, float]): [min, max] range for the bias term. + """ + assert kernel_size_range[0] % 2 == 1 and kernel_size_range[1] % 2 == 1, ( + "Kernel sizes must be odd." + ) + + self.p_transform = p_transform + self.kernel_size_range = kernel_size_range + self.dilation_range = dilation_range + self.layer_range = layer_range + self.sigma_range = sigma_range + self.bias_range = bias_range + self.padding_modes = ["reflect", "replicate", "circular"] + + def _rescale_signal( + self, processed_signal: torch.Tensor, original_signal: torch.Tensor + ) -> torch.Tensor: + """Rescales the processed signal to match the min/max range of the original.""" + original_min = torch.amin(original_signal, dim=-1, keepdim=True) + original_max = torch.amax(original_signal, dim=-1, keepdim=True) + processed_min = torch.amin(processed_signal, dim=-1, keepdim=True) + processed_max = torch.amax(processed_signal, dim=-1, keepdim=True) + + original_range = original_max - original_min + processed_range = processed_max - processed_min + epsilon = 1e-8 + + is_flat = processed_range < epsilon + + rescaled_signal = ( + (processed_signal - processed_min) / (processed_range + epsilon) + ) * original_range + original_min + + original_mean = torch.mean(original_signal, dim=-1, keepdim=True) + flat_rescaled = original_mean.expand_as(original_signal) + + return torch.where(is_flat, flat_rescaled, rescaled_signal) + + def _apply_random_conv_stack(self, series: torch.Tensor) -> torch.Tensor: + """ + Applies a randomly configured stack of convolutions to a single time series. + + Args: + series (torch.Tensor): A single time series of shape (1, num_channels, seq_len). + + Returns: + torch.Tensor: The augmented time series. + """ + num_channels = series.shape[1] + device = series.device + + num_layers = torch.randint( + self.layer_range[0], self.layer_range[1] + 1, (1,) + ).item() + + processed_series = series + for i in range(num_layers): + # 1. Sample kernel size + k_min, k_max = self.kernel_size_range + kernel_size = torch.randint(k_min // 2, k_max // 2 + 1, (1,)).item() * 2 + 1 + + # 2. Sample dilation + d_min, d_max = self.dilation_range + dilation = torch.randint(d_min, d_max + 1, (1,)).item() + + # 3. Sample bias + b_min, b_max = self.bias_range + bias_val = (b_min + (b_max - b_min) * torch.rand(1)).item() + + # 4. Sample padding mode + padding_mode = np.random.choice(self.padding_modes) + + conv_layer = nn.Conv1d( + in_channels=num_channels, + out_channels=num_channels, + kernel_size=kernel_size, + dilation=dilation, + padding="same", # Let PyTorch handle padding calculation + padding_mode=padding_mode, + groups=num_channels, + bias=True, + device=device, + ) + + # 5. Sample kernel weights from a wider variety of types + weight_type = torch.randint(0, 4, (1,)).item() + if weight_type == 0: # Gaussian kernel + s_min, s_max = self.sigma_range + sigma = (s_min + (s_max - s_min) * torch.rand(1)).item() + ax = torch.arange( + -(kernel_size // 2), + kernel_size // 2 + 1, + device=device, + dtype=torch.float32, + ) + kernel = torch.exp(-0.5 * (ax / sigma) ** 2) + elif weight_type == 1: # Standard normal kernel + kernel = torch.randn(kernel_size, device=device) + elif weight_type == 2: # Polynomial kernel + coeffs = torch.randn(3, device=device) # a, b, c for ax^2+bx+c + x_vals = torch.linspace(-1, 1, kernel_size, device=device) + kernel = coeffs[0] * x_vals**2 + coeffs[1] * x_vals + coeffs[2] + else: # Noisy Sobel kernel + # Ensure kernel is large enough for a Sobel filter + actual_kernel_size = 3 if kernel_size < 3 else kernel_size + sobel_base = torch.tensor( + [-1, 0, 1], dtype=torch.float32, device=device + ) + noise = torch.randn(3, device=device) * 0.1 + noisy_sobel = sobel_base + noise + # Pad if the random kernel size is larger than 3 + pad_total = actual_kernel_size - 3 + pad_left = pad_total // 2 + pad_right = pad_total - pad_left + kernel = F.pad(noisy_sobel, (pad_left, pad_right), "constant", 0) + + # 6. Probabilistic normalization + if torch.rand(1).item() < 0.8: # 80% chance to normalize + kernel /= torch.sum(torch.abs(kernel)) + 1e-8 + + kernel = kernel.view(1, 1, -1).repeat(num_channels, 1, 1) + + conv_layer.weight.data = kernel + conv_layer.bias.data.fill_(bias_val) + conv_layer.weight.requires_grad = False + conv_layer.bias.requires_grad = False + + # Apply convolution + processed_series = conv_layer(processed_series) + + # 7. Optional non-linearity (not on the last layer) + if i < num_layers - 1: + activation_type = torch.randint(0, 3, (1,)).item() + if activation_type == 1: + processed_series = F.relu(processed_series) + elif activation_type == 2: + processed_series = torch.tanh(processed_series) + # if 0, do nothing (linear) + + return processed_series + + def transform(self, time_series_batch: torch.Tensor) -> torch.Tensor: + """Applies a random augmentation to a subset of the batch.""" + with torch.no_grad(): + if self.p_transform == 0: + return time_series_batch + + batch_size, seq_len, num_channels = time_series_batch.shape + device = time_series_batch.device + + augment_mask = torch.rand(batch_size, device=device) < self.p_transform + indices_to_augment = torch.where(augment_mask)[0] + num_to_augment = indices_to_augment.numel() + + if num_to_augment == 0: + return time_series_batch + + subset_to_augment = time_series_batch[indices_to_augment] + + subset_permuted = subset_to_augment.permute(0, 2, 1) + + augmented_subset_list = [] + for i in range(num_to_augment): + original_series = subset_permuted[i : i + 1] + augmented_series = self._apply_random_conv_stack(original_series) + + rescaled_series = self._rescale_signal( + augmented_series.squeeze(0), original_series.squeeze(0) + ) + augmented_subset_list.append(rescaled_series.unsqueeze(0)) + + if augmented_subset_list: + augmented_subset = torch.cat(augmented_subset_list, dim=0) + augmented_subset_final = augmented_subset.permute(0, 2, 1) + + augmented_batch = time_series_batch.clone() + augmented_batch[indices_to_augment] = augmented_subset_final + return augmented_batch + else: + return time_series_batch diff --git a/src/data/batch_composer.py b/src/data/batch_composer.py new file mode 100644 index 0000000000000000000000000000000000000000..5b6e28d0a9e07ba9c2a571fbfdac5f89da321e69 --- /dev/null +++ b/src/data/batch_composer.py @@ -0,0 +1,705 @@ +import json +import logging +import random +from typing import Dict, Optional, Tuple + +import numpy as np +import pandas as pd +import torch + +from src.data.augmentations import ( + NanAugmenter, +) +from src.data.constants import DEFAULT_NAN_STATS_PATH, LENGTH_CHOICES, LENGTH_WEIGHTS +from src.data.containers import BatchTimeSeriesContainer +from src.data.datasets import CyclicalBatchDataset +from src.data.frequency import Frequency +from src.data.scalers import MeanScaler, MedianScaler, MinMaxScaler, RobustScaler +from src.data.utils import sample_future_length + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class BatchComposer: + """ + Composes batches from saved generator data according to specified proportions. + Manages multiple CyclicalBatchDataset instances and creates uniform or mixed batches. + """ + + def __init__( + self, + base_data_dir: str, + generator_proportions: Optional[Dict[str, float]] = None, + mixed_batches: bool = True, + device: Optional[torch.device] = None, + augmentations: Optional[Dict[str, bool]] = None, + augmentation_probabilities: Optional[Dict[str, float]] = None, + nan_stats_path: Optional[str] = None, + nan_patterns_path: Optional[str] = None, + global_seed: int = 42, + chosen_scaler_name: Optional[str] = None, + rank: int = 0, + world_size: int = 1, + ): + """ + Initialize the BatchComposer. + + Args: + base_data_dir: Base directory containing generator subdirectories + generator_proportions: Dict mapping generator names to proportions + mixed_batches: If True, create mixed batches; if False, uniform batches + device: Device to load tensors to + augmentations: Dict mapping augmentation names to booleans + augmentation_probabilities: Dict mapping augmentation names to probabilities + global_seed: Global random seed + chosen_scaler_name: Name of the scaler that used in training + rank: Rank of current process for distributed data loading + world_size: Total number of processes for distributed data loading + """ + self.base_data_dir = base_data_dir + self.mixed_batches = mixed_batches + self.device = device + self.global_seed = global_seed + self.nan_stats_path = nan_stats_path + self.nan_patterns_path = nan_patterns_path + self.rank = rank + self.world_size = world_size + self.augmentation_probabilities = augmentation_probabilities or { + "noise_augmentation": 0.3, + "scaler_augmentation": 0.5, + } + # Optional preferred scaler name provided by training config + self.chosen_scaler_name = ( + chosen_scaler_name.lower() if chosen_scaler_name is not None else None + ) + + # Setup random state + self.rng = np.random.default_rng(global_seed) + random.seed(global_seed) + torch.manual_seed(global_seed) + + # Setup augmentations + self._setup_augmentations(augmentations) + + # Setup generator proportions + self._setup_proportions(generator_proportions) + + # Initialize datasets + self.datasets = self._initialize_datasets() + + logger.info( + f"Initialized BatchComposer with {len(self.datasets)} generators, " + f"mixed_batches={mixed_batches}, proportions={self.generator_proportions}, " + f"augmentations={self.augmentations}, " + f"augmentation_probabilities={self.augmentation_probabilities}" + ) + + def _setup_augmentations(self, augmentations: Optional[Dict[str, bool]]): + """Setup only the augmentations that should remain online (NaN).""" + default_augmentations = { + "nan_augmentation": False, + "scaler_augmentation": False, + "length_shortening": False, + } + + self.augmentations = augmentations or default_augmentations + + # Initialize NaN augmenter if needed + self.nan_augmenter = None + if self.augmentations.get("nan_augmentation", False): + stats_path_to_use = self.nan_stats_path or DEFAULT_NAN_STATS_PATH + stats = json.load(open(stats_path_to_use, "r")) + self.nan_augmenter = NanAugmenter( + p_series_has_nan=stats["p_series_has_nan"], + nan_ratio_distribution=stats["nan_ratio_distribution"], + nan_length_distribution=stats["nan_length_distribution"], + nan_patterns_path=self.nan_patterns_path, + ) + + def _should_apply_scaler_augmentation(self) -> bool: + """ + Decide whether to apply scaler augmentation for a single series based on + the boolean toggle and probability from the configuration. + """ + if not self.augmentations.get("scaler_augmentation", False): + return False + probability = float( + self.augmentation_probabilities.get("scaler_augmentation", 0.0) + ) + probability = max(0.0, min(1.0, probability)) + return bool(self.rng.random() < probability) + + def _choose_random_scaler(self) -> Optional[object]: + """ + Choose a random scaler for augmentation, explicitly avoiding the one that + is already selected in the training configuration (if any). + + Returns an instance of the selected scaler or None when no valid option exists. + """ + chosen: Optional[str] = None + if self.chosen_scaler_name is not None: + chosen = self.chosen_scaler_name.strip().lower() + + candidates = ["custom_robust", "minmax", "median", "mean"] + + # Remove the chosen scaler from the candidates + if chosen in candidates: + candidates = [c for c in candidates if c != chosen] + if not candidates: + return None + + pick = str(self.rng.choice(candidates)) + if pick == "custom_robust": + return RobustScaler() + if pick == "minmax": + return MinMaxScaler() + if pick == "median": + return MedianScaler() + if pick == "mean": + return MeanScaler() + return None + + def _setup_proportions(self, generator_proportions): + """Setup default or custom generator proportions.""" + default_proportions = { + "forecast_pfn": 1.0, + "gp": 1.0, + "kernel": 1.0, + "sinewave": 1.0, + "sawtooth": 1.0, + "step": 0.1, + "anomaly": 1.0, + "spike": 2.0, + "cauker_univariate": 2.0, + "cauker_multivariate": 0.00, + "lmc": 0.00, # multivariate + "ou_process": 1.0, + "audio_financial_volatility": 0.1, + "audio_multi_scale_fractal": 0.1, + "audio_network_topology": 0.5, + "audio_stochastic_rhythm": 1.0, + "augmented_per_sample_2048": 3.0, + "augmented_temp_batch_2048": 3.0, + } + self.generator_proportions = generator_proportions or default_proportions + + # Normalize proportions + total = sum(self.generator_proportions.values()) + if total <= 0: + raise ValueError("Total generator proportions must be positive") + self.generator_proportions = { + k: v / total for k, v in self.generator_proportions.items() + } + + def _initialize_datasets(self) -> Dict[str, CyclicalBatchDataset]: + """Initialize CyclicalBatchDataset for each generator with proportion > 0.""" + datasets = {} + + for generator_name, proportion in self.generator_proportions.items(): + # Only initialize datasets for generators with positive proportion + if proportion <= 0: + logger.info(f"Skipping {generator_name} (proportion = {proportion})") + continue + + batches_dir = f"{self.base_data_dir}/{generator_name}" + + try: + dataset = CyclicalBatchDataset( + batches_dir=batches_dir, + generator_type=generator_name, + device=None, + prefetch_next=True, + prefetch_threshold=32, + rank=self.rank, + world_size=self.world_size, + ) + datasets[generator_name] = dataset + logger.info( + f"Loaded dataset for {generator_name} (proportion = {proportion})" + ) + + except Exception as e: + logger.warning(f"Failed to load dataset for {generator_name}: {e}") + continue + + if not datasets: + raise ValueError( + f"No valid datasets found in {self.base_data_dir} or all generators have proportion <= 0" + ) + + return datasets + + def _convert_sample_to_tensors( + self, sample: dict, future_length: Optional[int] = None + ) -> Tuple[torch.Tensor, np.datetime64, Frequency]: + """ + Convert a sample dict to tensors and metadata. + + Args: + sample: Sample dict from CyclicalBatchDataset + future_length: Desired future length (if None, use default split) + + Returns: + Tuple of (history_values, future_values, start, frequency) + """ + # Handle both old and new data formats + num_channels = sample.get("num_channels", 1) + values_data = sample["values"] + generator_type = sample.get("generator_type", "unknown") + + if num_channels == 1: + # Univariate data + if isinstance(values_data[0], list): + # New format: [[channel_values]] + values = torch.tensor(values_data[0], dtype=torch.float32) + logger.debug( + f"{generator_type}: Using new univariate format, shape: {values.shape}" + ) + else: + # Old format: [values] + values = torch.tensor(values_data, dtype=torch.float32) + values = values.unsqueeze(0).unsqueeze(-1) # Shape: [1, seq_len, 1] + else: + # Multivariate data (LMC) - new format: [[ch1_values], [ch2_values], ...] + channel_tensors = [] + for channel_values in values_data: + channel_tensor = torch.tensor(channel_values, dtype=torch.float32) + channel_tensors.append(channel_tensor) + + # Stack channels: [1, seq_len, num_channels] + values = torch.stack(channel_tensors, dim=-1).unsqueeze(0) + logger.debug( + f"{generator_type}: Using multivariate format, {num_channels} channels, shape: {values.shape}" + ) + + # Handle frequency conversion + freq_str = sample["frequency"] + try: + frequency = Frequency(freq_str) + except ValueError: + # Map common frequency strings to Frequency enum + freq_mapping = { + "h": Frequency.H, + "D": Frequency.D, + "W": Frequency.W, + "M": Frequency.M, + "Q": Frequency.Q, + "A": Frequency.A, + "Y": Frequency.A, # Annual + "1min": Frequency.T1, + "5min": Frequency.T5, + "10min": Frequency.T10, + "15min": Frequency.T15, + "30min": Frequency.T30, + "s": Frequency.S, + } + frequency = freq_mapping.get(freq_str, Frequency.H) # Default to hourly + + # Handle start timestamp + if isinstance(sample["start"], pd.Timestamp): + start = sample["start"].to_numpy() + else: + start = np.datetime64(sample["start"]) + + return values, start, frequency + + def _effective_proportions_for_length( + self, total_length_for_batch: int + ) -> Dict[str, float]: + """ + Build a simple, length-aware proportion map for the current batch. + + Rules: + - For generators named 'augmented{L}', keep only the one matching the + chosen length L; zero out others. + - Keep non-augmented generators as-is. + - Drop generators that are unavailable (not loaded) or zero-weight. + - If nothing remains, fall back to 'augmented{L}' if available, else any dataset. + - Normalize the final map to sum to 1. + """ + + def augmented_length_from_name(name: str) -> Optional[int]: + if not name.startswith("augmented"): + return None + suffix = name[len("augmented") :] + if not suffix: + return None + try: + return int(suffix) + except ValueError: + return None + + # 1) Adjust proportions with the length-aware rule + adjusted: Dict[str, float] = {} + for name, proportion in self.generator_proportions.items(): + aug_len = augmented_length_from_name(name) + if aug_len is None: + adjusted[name] = proportion + else: + adjusted[name] = ( + proportion if aug_len == total_length_for_batch else 0.0 + ) + + # 2) Keep only available, positive-weight datasets + adjusted = { + name: p for name, p in adjusted.items() if name in self.datasets and p > 0.0 + } + + # 3) Fallback if empty + if not adjusted: + preferred = f"augmented{total_length_for_batch}" + if preferred in self.datasets: + adjusted = {preferred: 1.0} + elif self.datasets: + # Choose any available dataset deterministically (first key) + first_key = next(iter(self.datasets.keys())) + adjusted = {first_key: 1.0} + else: + raise ValueError("No datasets available to create batch") + + # 4) Normalize + total = sum(adjusted.values()) + return {name: p / total for name, p in adjusted.items()} + + def _compute_sample_counts_for_batch( + self, proportions: Dict[str, float], batch_size: int + ) -> Dict[str, int]: + """ + Convert a proportion map into integer sample counts that sum to batch_size. + + Strategy: allocate floor(batch_size * p) to each generator in order, and let the + last generator absorb any remainder to ensure the total matches exactly. + """ + counts: Dict[str, int] = {} + remaining = batch_size + names = list(proportions.keys()) + values = list(proportions.values()) + for index, (name, p) in enumerate(zip(names, values)): + if index == len(names) - 1: + counts[name] = remaining + else: + n = int(batch_size * p) + counts[name] = n + remaining -= n + return counts + + def _calculate_generator_samples(self, batch_size: int) -> Dict[str, int]: + """ + Calculate the number of samples each generator should contribute. + + Args: + batch_size: Total batch size + + Returns: + Dict mapping generator names to sample counts + """ + generator_samples = {} + remaining_samples = batch_size + + generators = list(self.generator_proportions.keys()) + proportions = list(self.generator_proportions.values()) + + # Calculate base samples for each generator + for i, (generator, proportion) in enumerate(zip(generators, proportions)): + if generator not in self.datasets: + continue + + if i == len(generators) - 1: # Last generator gets remaining samples + samples = remaining_samples + else: + samples = int(batch_size * proportion) + remaining_samples -= samples + generator_samples[generator] = samples + + return generator_samples + + def create_batch( + self, + batch_size: int = 128, + seed: Optional[int] = None, + future_length: Optional[int] = None, + ) -> Tuple[BatchTimeSeriesContainer, str]: + """ + Create a batch of the specified size. + + Args: + batch_size: Size of the batch to create + seed: Random seed for this batch + future_length: Fixed future length to use. If None, samples from gift_eval range + + Returns: + Tuple of (batch_container, generator_info) + """ + if seed is not None: + batch_rng = np.random.default_rng(seed) + random.seed(seed) + else: + batch_rng = self.rng + + if self.mixed_batches: + return self._create_mixed_batch(batch_size, future_length) + else: + return self._create_uniform_batch(batch_size, batch_rng, future_length) + + def _create_mixed_batch( + self, batch_size: int, future_length: Optional[int] = None + ) -> Tuple[BatchTimeSeriesContainer, str]: + """Create a mixed batch with samples from multiple generators, rejecting NaNs.""" + + # Choose total length for this batch; respect length_shortening flag. + # When disabled, always use the maximum to avoid shortening. + if self.augmentations.get("length_shortening", False): + lengths = list(LENGTH_WEIGHTS.keys()) + probs = list(LENGTH_WEIGHTS.values()) + total_length_for_batch = int(self.rng.choice(lengths, p=probs)) + else: + total_length_for_batch = int(max(LENGTH_CHOICES)) + + if future_length is None: + prediction_length = int( + sample_future_length( + range="gift_eval", total_length=total_length_for_batch + ) + ) + else: + prediction_length = future_length + + history_length = total_length_for_batch - prediction_length + + # Calculate samples per generator using simple, per-batch length-aware proportions + effective_props = self._effective_proportions_for_length(total_length_for_batch) + generator_samples = self._compute_sample_counts_for_batch( + effective_props, batch_size + ) + + all_values = [] + all_starts = [] + all_frequencies = [] + actual_proportions = {} + + # Collect valid samples from each generator using batched fetches to reduce I/O overhead + for generator_name, num_samples in generator_samples.items(): + if num_samples == 0 or generator_name not in self.datasets: + continue + + dataset = self.datasets[generator_name] + + # Lists to hold valid samples for the current generator + generator_values = [] + generator_starts = [] + generator_frequencies = [] + + # Loop until we have collected the required number of VALID samples + max_attempts = 50 + attempts = 0 + while len(generator_values) < num_samples and attempts < max_attempts: + attempts += 1 + # Fetch a batch larger than needed to reduce round-trips + need = num_samples - len(generator_values) + fetch_n = max(need * 2, 8) + samples = dataset.get_samples(fetch_n) + + for sample in samples: + if len(generator_values) >= num_samples: + break + + values, sample_start, sample_freq = self._convert_sample_to_tensors( + sample, future_length + ) + + # Skip if NaNs exist (we inject NaNs later in history only) + if torch.isnan(values).any(): + continue + + # Resize to target batch length when longer + if total_length_for_batch < values.shape[1]: + strategy = self.rng.choice(["cut", "subsample"]) # 50/50 + if strategy == "cut": + max_start_idx = values.shape[1] - total_length_for_batch + start_idx = int(self.rng.integers(0, max_start_idx + 1)) + values = values[ + :, start_idx : start_idx + total_length_for_batch, : + ] + else: + indices = np.linspace( + 0, + values.shape[1] - 1, + total_length_for_batch, + dtype=int, + ) + values = values[:, indices, :] + + # Optionally apply scaler augmentation according to configuration + if self._should_apply_scaler_augmentation(): + scaler = self._choose_random_scaler() + if scaler is not None: + values = scaler.scale( + values, scaler.compute_statistics(values) + ) + + generator_values.append(values) + generator_starts.append(sample_start) + generator_frequencies.append(sample_freq) + + if len(generator_values) < num_samples: + logger.warning( + f"Generator {generator_name}: collected {len(generator_values)}/{num_samples} after {attempts} attempts" + ) + + # Add the collected valid samples to the main batch lists + if generator_values: + all_values.extend(generator_values) + all_starts.extend(generator_starts) + all_frequencies.extend(generator_frequencies) + actual_proportions[generator_name] = len(generator_values) + + if not all_values: + raise RuntimeError( + "No valid samples could be collected from any generator." + ) + + combined_values = torch.cat(all_values, dim=0) + # Split into history and future + combined_history = combined_values[:, :history_length, :] + combined_future = combined_values[ + :, history_length : history_length + prediction_length, : + ] + + if self.nan_augmenter is not None: + combined_history = self.nan_augmenter.transform(combined_history) + + # Create container + container = BatchTimeSeriesContainer( + history_values=combined_history, + future_values=combined_future, + start=all_starts, + frequency=all_frequencies, + ) + + return container, "MixedBatch" + + def _create_uniform_batch( + self, + batch_size: int, + batch_rng: np.random.Generator, + future_length: Optional[int] = None, + ) -> Tuple[BatchTimeSeriesContainer, str]: + """Create a uniform batch with samples from a single generator.""" + + # Select generator based on proportions + generators = list(self.datasets.keys()) + proportions = [self.generator_proportions[gen] for gen in generators] + selected_generator = batch_rng.choice(generators, p=proportions) + + # Sample future length + if future_length is None: + future_length = sample_future_length(range="gift_eval") + + # Get samples from selected generator + dataset = self.datasets[selected_generator] + samples = dataset.get_samples(batch_size) + + all_history_values = [] + all_future_values = [] + all_starts = [] + all_frequencies = [] + + for sample in samples: + values, sample_start, sample_freq = self._convert_sample_to_tensors( + sample, future_length + ) + + total_length = values.shape[1] + history_length = max(1, total_length - future_length) + + # Optionally apply scaler augmentation according to configuration + if self._should_apply_scaler_augmentation(): + scaler = self._choose_random_scaler() + if scaler is not None: + values = scaler.scale(values, scaler.compute_statistics(values)) + + # Reshape to [1, seq_len, 1] for single sample + hist_vals = values[:, :history_length, :] + fut_vals = values[:, history_length : history_length + future_length, :] + + all_history_values.append(hist_vals) + all_future_values.append(fut_vals) + all_starts.append(sample_start) + all_frequencies.append(sample_freq) + + # Combine samples + combined_history = torch.cat(all_history_values, dim=0) + combined_future = torch.cat(all_future_values, dim=0) + + # Create container + container = BatchTimeSeriesContainer( + history_values=combined_history, + future_values=combined_future, + start=all_starts, + frequency=all_frequencies, + ) + + return container, selected_generator + + def get_dataset_info(self) -> Dict[str, dict]: + """Get information about all datasets.""" + info = {} + for name, dataset in self.datasets.items(): + info[name] = dataset.get_info() + return info + + def get_generator_info(self) -> Dict[str, any]: + """Get information about the composer configuration.""" + return { + "mixed_batches": self.mixed_batches, + "generator_proportions": self.generator_proportions, + "active_generators": list(self.datasets.keys()), + "total_generators": len(self.datasets), + "augmentations": self.augmentations, + "augmentation_probabilities": self.augmentation_probabilities, + "nan_augmenter_enabled": self.nan_augmenter is not None, + } + + +class ComposedDataset(torch.utils.data.Dataset): + """ + PyTorch Dataset wrapper around BatchComposer for training pipeline integration. + """ + + def __init__( + self, + batch_composer: BatchComposer, + num_batches_per_epoch: int = 100, + batch_size: int = 128, + ): + """ + Initialize the dataset. + + Args: + batch_composer: The BatchComposer instance + num_batches_per_epoch: Number of batches to generate per epoch + batch_size: Size of each batch + """ + self.batch_composer = batch_composer + self.num_batches_per_epoch = num_batches_per_epoch + self.batch_size = batch_size + + def __len__(self) -> int: + return self.num_batches_per_epoch + + def __getitem__(self, idx: int) -> BatchTimeSeriesContainer: + """ + Get a batch by index. + + Args: + idx: Batch index (used as seed for reproducibility) + + Returns: + BatchTimeSeriesContainer + """ + # Use index as seed for reproducible batches + batch, _ = self.batch_composer.create_batch( + batch_size=self.batch_size, seed=self.batch_composer.global_seed + idx + ) + return batch \ No newline at end of file diff --git a/src/data/constants.py b/src/data/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..a27d4f2fa756c29d6923dce2448536cfda9ad3bf --- /dev/null +++ b/src/data/constants.py @@ -0,0 +1,25 @@ +from datetime import date +from typing import Dict + +import numpy as np + +DEFAULT_START_DATE = date(1700, 1, 1) +DEFAULT_END_DATE = date(2200, 1, 1) +BASE_START_DATE = np.datetime64(DEFAULT_START_DATE) +BASE_END_DATE = np.datetime64(DEFAULT_END_DATE) + +# Maximum years to prevent timestamp overflow +MAX_YEARS = 500 + +LENGTH_CHOICES = [128, 256, 512, 1024, 1536, 2048] + +DEFAULT_NAN_STATS_PATH: str = "./data/nan_stats.json" + +LENGTH_WEIGHTS: Dict[int, float] = { + 128: 0.05, + 256: 0.10, + 512: 0.10, + 1024: 0.10, + 1536: 0.15, + 2048: 0.50, +} diff --git a/src/data/containers.py b/src/data/containers.py new file mode 100644 index 0000000000000000000000000000000000000000..fc3567348e3ee47336548f5fa1e8fd0e31449295 --- /dev/null +++ b/src/data/containers.py @@ -0,0 +1,204 @@ +from dataclasses import dataclass +from typing import List, Optional + +import numpy as np +import torch + +from src.data.frequency import Frequency + + +@dataclass +class BatchTimeSeriesContainer: + """ + Container for a batch of multivariate time series data and their associated features. + + Attributes: + history_values: Tensor of historical observations. + Shape: [batch_size, seq_len, num_channels] + future_values: Tensor of future observations to predict. + Shape: [batch_size, pred_len, num_channels] + start: Timestamp of the first history value. + Type: List[np.datetime64] + frequency: Frequency of the time series. + Type: List[Frequency] + history_mask: Optional boolean/float tensor indicating missing entries in history_values across channels. + Shape: [batch_size, seq_len] + future_mask: Optional boolean/float tensor indicating missing entries in future_values across channels. + Shape: [batch_size, pred_len] + """ + + history_values: torch.Tensor + future_values: torch.Tensor + start: List[np.datetime64] + frequency: List[Frequency] + + history_mask: Optional[torch.Tensor] = None + future_mask: Optional[torch.Tensor] = None + + def __post_init__(self): + """Validate all tensor shapes and consistency.""" + # --- Tensor Type Checks --- + if not isinstance(self.history_values, torch.Tensor): + raise TypeError("history_values must be a torch.Tensor") + if not isinstance(self.future_values, torch.Tensor): + raise TypeError("future_values must be a torch.Tensor") + if not isinstance(self.start, list) or not all( + isinstance(x, np.datetime64) for x in self.start + ): + raise TypeError("start must be a List[np.datetime64]") + if not isinstance(self.frequency, list) or not all( + isinstance(x, Frequency) for x in self.frequency + ): + raise TypeError("frequency must be a List[Frequency]") + + batch_size, seq_len, num_channels = self.history_values.shape + pred_len = self.future_values.shape[1] + + # --- Core Shape Checks --- + if self.future_values.shape[0] != batch_size: + raise ValueError("Batch size mismatch between history and future_values") + if self.future_values.shape[2] != num_channels: + raise ValueError("Channel size mismatch between history and future_values") + + # --- Optional Mask Checks --- + if self.history_mask is not None: + if not isinstance(self.history_mask, torch.Tensor): + raise TypeError("history_mask must be a Tensor or None") + if self.history_mask.shape[:2] != (batch_size, seq_len): + raise ValueError( + f"Shape mismatch in history_mask: {self.history_mask.shape[:2]} vs {(batch_size, seq_len)}" + ) + + if self.future_mask is not None: + if not isinstance(self.future_mask, torch.Tensor): + raise TypeError("future_mask must be a Tensor or None") + if not ( + self.future_mask.shape == (batch_size, pred_len) + or self.future_mask.shape == self.future_values.shape + ): + raise ValueError( + f"Shape mismatch in future_mask: expected {(batch_size, pred_len)} or {self.future_values.shape}, got {self.future_mask.shape}" + ) + + def to_device( + self, device: torch.device, attributes: Optional[List[str]] = None + ) -> None: + """ + Move specified tensors to the target device in place. + + Args: + device: Target device (e.g., 'cpu', 'cuda'). + attributes: Optional list of attribute names to move. If None, move all tensors. + + Raises: + ValueError: If an invalid attribute is specified or device transfer fails. + """ + all_tensors = { + "history_values": self.history_values, + "future_values": self.future_values, + "history_mask": self.history_mask, + "future_mask": self.future_mask, + } + + if attributes is None: + attributes = [k for k, v in all_tensors.items() if v is not None] + + for attr in attributes: + if attr not in all_tensors: + raise ValueError(f"Invalid attribute: {attr}") + if all_tensors[attr] is not None: + setattr(self, attr, all_tensors[attr].to(device)) + + def to(self, device: torch.device, attributes: Optional[List[str]] = None): + """ + Alias for to_device method for consistency with PyTorch conventions. + + Args: + device: Target device (e.g., 'cpu', 'cuda'). + attributes: Optional list of attribute names to move. If None, move all tensors. + """ + self.to_device(device, attributes) + return self + + @property + def batch_size(self) -> int: + return self.history_values.shape[0] + + @property + def history_length(self) -> int: + return self.history_values.shape[1] + + @property + def future_length(self) -> int: + return self.future_values.shape[1] + + @property + def num_channels(self) -> int: + return self.history_values.shape[2] + + +@dataclass +class TimeSeriesContainer: + """ + Container for batch of time series data without explicit history/future split. + + This container is used for storing generated synthetic time series data where + the entire series is treated as a single entity, typically for further processing + or splitting into history/future components later. + + Attributes: + values: np.ndarray of time series values. + Shape: [batch_size, seq_len, num_channels] for multivariate series + [batch_size, seq_len] for univariate series + start: List of start timestamps for each series in the batch. + Type: List[np.datetime64], length should match batch_size + frequency: List of frequency for each series in the batch. + Type: List[Frequency], length should match batch_size + """ + + values: np.ndarray + start: List[np.datetime64] + frequency: List[Frequency] + + def __post_init__(self): + """Validate all shapes and consistency.""" + # --- Numpy Type Checks --- + if not isinstance(self.values, np.ndarray): + raise TypeError("values must be a np.ndarray") + if not isinstance(self.start, list) or not all( + isinstance(x, np.datetime64) for x in self.start + ): + raise TypeError("start must be a List[np.datetime64]") + if not isinstance(self.frequency, list) or not all( + isinstance(x, Frequency) for x in self.frequency + ): + raise TypeError("frequency must be a List[Frequency]") + + # --- Shape and Length Consistency Checks --- + if len(self.values.shape) < 2 or len(self.values.shape) > 3: + raise ValueError( + f"values must have 2 or 3 dimensions [batch_size, seq_len] or [batch_size, seq_len, num_channels], got shape {self.values.shape}" + ) + + batch_size = self.values.shape[0] + + if len(self.start) != batch_size: + raise ValueError( + f"Length of start ({len(self.start)}) must match batch_size ({batch_size})" + ) + if len(self.frequency) != batch_size: + raise ValueError( + f"Length of frequency ({len(self.frequency)}) must match batch_size ({batch_size})" + ) + + @property + def batch_size(self) -> int: + return self.values.shape[0] + + @property + def seq_length(self) -> int: + return self.values.shape[1] + + @property + def num_channels(self) -> int: + return self.values.shape[2] if len(self.values.shape) == 3 else 1 diff --git a/src/data/datasets.py b/src/data/datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..d28fd84f6cf9f913904b53d6e709b88b7f6610b3 --- /dev/null +++ b/src/data/datasets.py @@ -0,0 +1,267 @@ +import logging +import os +import random +from typing import List, Optional + +import pyarrow.feather as feather +import torch + +logger = logging.getLogger(__name__) + + +class CyclicalBatchDataset: + """ + Dataset class that loads saved batches from continuous generation script. + Maintains a pointer and provides cyclical access to individual samples. + Includes enhanced logging to track data shard cycling during training. + Supports per-rank file sharding for large-scale distributed training. + """ + + def __init__( + self, + batches_dir: str, + generator_type: str, + device: Optional[torch.device] = None, + prefetch_next: bool = True, + prefetch_threshold: int = 32, + rank: int = 0, + world_size: int = 1, + ): + """ + Initialize the cyclical batch dataset. + + Args: + batches_dir: Directory containing the batch arrow files + generator_type: Type of generator (for logging) + device: Device to load tensors to + prefetch_next: Whether to prefetch the next batch + prefetch_threshold: Number of remaining samples to trigger prefetching + rank: Rank of the current process (for file sharding) + world_size: Total number of processes (for file sharding) + """ + self.batches_dir = batches_dir + self.generator_type = generator_type + self.device = device + self.prefetch_next = prefetch_next + self.prefetch_threshold = prefetch_threshold + self.rank = rank + self.world_size = world_size + + self.batch_files = self._find_batch_files() + if not self.batch_files: + raise ValueError(f"No batch files found in {batches_dir}") + + # --- State tracking --- + self.current_batch_idx = 0 + self.current_sample_idx = 0 + self.current_batch_data = None + self.next_batch_data = None + self.prefetching_in_progress = False + + # --- NEW: Logging and cycle tracking --- + self.visited_batch_indices = set() + self.full_cycles_completed = 0 + + # Load first batch and update tracking + self._load_current_batch() + self.visited_batch_indices.add(self.current_batch_idx) + + logger.info( + f"Initialized '{self.generator_type}' dataset with {len(self.batch_files)} batches. " + f"Current batch file: '{os.path.basename(self.batch_files[self.current_batch_idx])}' " + f"has {len(self.current_batch_data)} samples." + ) + + def _find_batch_files(self) -> List[str]: + """ + Find and sort batch files with per-rank sharding for distributed training. + + Each rank gets a disjoint subset of files to minimize I/O contention + when scaling to hundreds of GPUs. + """ + import glob + + pattern = os.path.join(self.batches_dir, "batch_*.arrow") + all_files = sorted(glob.glob(pattern)) # Sort for deterministic sharding + + if not all_files: + return [] + + # Shard files across ranks: each rank gets every world_size-th file + # Example with 4 ranks: rank0=[0,4,8,...], rank1=[1,5,9,...], etc. + rank_files = [ + f for i, f in enumerate(all_files) if i % self.world_size == self.rank + ] + + # Shuffle only within this rank's shard for variety + random.shuffle(rank_files) + + logger.info( + f"[Rank {self.rank}] '{self.generator_type}': Sharded {len(all_files)} files → " + f"{len(rank_files)} files for this rank ({len(rank_files) / len(all_files) * 100:.1f}%)" + ) + + return rank_files + + def _load_batch_from_file(self, batch_file: str) -> List[dict]: + """Load a batch from arrow file.""" + try: + table = feather.read_table(batch_file) + has_num_channels = "num_channels" in table.column_names + batch_data = [] + for i in range(len(table)): + row = { + "series_id": table["series_id"][i].as_py(), + "values": table["values"][i].as_py(), + "length": table["length"][i].as_py(), + "generator_type": table["generator_type"][i].as_py(), + "start": table["start"][i].as_py(), + "frequency": table["frequency"][i].as_py(), + "generation_timestamp": table["generation_timestamp"][i].as_py(), + } + if has_num_channels: + row["num_channels"] = table["num_channels"][i].as_py() + else: + row["num_channels"] = 1 + batch_data.append(row) + return batch_data + except Exception as e: + logger.error(f"Error loading batch from {batch_file}: {e}") + raise + + def _load_current_batch(self): + """Load the current batch into memory.""" + if hasattr(self, "current_batch_data") and self.current_batch_data is not None: + del self.current_batch_data + batch_file = self.batch_files[self.current_batch_idx] + self.current_batch_data = self._load_batch_from_file(batch_file) + self.current_sample_idx = 0 + logger.debug( + f"Loaded batch {self.current_batch_idx} for {self.generator_type} " + f"with {len(self.current_batch_data)} samples" + ) + + def _trigger_smart_prefetch(self): + """Trigger prefetching when batch is almost exhausted.""" + if not self.prefetch_next or len(self.batch_files) <= 1: + return + remaining_samples = self.get_remaining_samples_in_current_batch() + should_prefetch = ( + remaining_samples <= self.prefetch_threshold + and self.next_batch_data is None + and not self.prefetching_in_progress + ) + if should_prefetch: + self._prefetch_next_batch() + + def _prefetch_next_batch(self): + """Prefetch the next batch.""" + if self.prefetching_in_progress: + return + self.prefetching_in_progress = True + next_batch_idx = (self.current_batch_idx + 1) % len(self.batch_files) + next_batch_file = self.batch_files[next_batch_idx] + try: + self.next_batch_data = self._load_batch_from_file(next_batch_file) + logger.debug( + f"Prefetched next batch {next_batch_idx} for {self.generator_type}" + ) + except Exception as e: + logger.warning(f"Failed to prefetch batch {next_batch_idx}: {e}") + self.next_batch_data = None + finally: + self.prefetching_in_progress = False + + def _advance_to_next_batch(self): + """Advance to the next batch and log the transition.""" + if hasattr(self, "current_batch_data") and self.current_batch_data is not None: + del self.current_batch_data + + previous_batch_idx = self.current_batch_idx + self.current_batch_idx = (self.current_batch_idx + 1) % len(self.batch_files) + + if hasattr(self, "next_batch_data") and self.next_batch_data is not None: + self.current_batch_data = self.next_batch_data + self.next_batch_data = None + else: + self._load_current_batch() + + self.current_sample_idx = 0 + self.prefetching_in_progress = False + + # --- NEW: Enhanced Logging Logic --- + self.visited_batch_indices.add(self.current_batch_idx) + + # Calculate progress + total_files = len(self.batch_files) + visited_count = len(self.visited_batch_indices) + progress_percent = (visited_count / total_files) * 100 + + # Log the shard cycle event + logger.info( + f"\nDATA SHARD CYCLED for '{self.generator_type}': " + f"Moved from file index {previous_batch_idx} to {self.current_batch_idx}. " + f"Unique files visited: {visited_count}/{total_files} ({progress_percent:.1f}%)." + ) + + # Check if a full cycle has been completed + if visited_count == total_files: + self.full_cycles_completed += 1 + logger.info( + f"🎉 FULL CYCLE #{self.full_cycles_completed} COMPLETED for '{self.generator_type}'! " + f"All {total_files} data files have been visited at least once. " + "Resetting visited set to track the next cycle." + ) + # Reset for the next cycle count + self.visited_batch_indices.clear() + self.visited_batch_indices.add(self.current_batch_idx) + + def get_sample(self) -> dict: + """Get the current sample and advance pointer.""" + if not hasattr(self, "current_batch_data") or self.current_batch_data is None: + self._load_current_batch() + if self.current_batch_data is None: + raise RuntimeError("No batch data loaded") + if self.current_sample_idx >= len(self.current_batch_data): + self._advance_to_next_batch() + self._trigger_smart_prefetch() + sample = self.current_batch_data[self.current_sample_idx] + self.current_sample_idx += 1 + return sample + + def get_samples(self, num_samples: int) -> List[dict]: + """Get multiple samples.""" + samples = [] + for _ in range(num_samples): + samples.append(self.get_sample()) + return samples + + def get_total_samples_in_current_batch(self) -> int: + """Get total samples in current batch.""" + if not hasattr(self, "current_batch_data") or self.current_batch_data is None: + return 0 + return len(self.current_batch_data) + + def get_remaining_samples_in_current_batch(self) -> int: + """Get remaining samples in current batch.""" + if not hasattr(self, "current_batch_data") or self.current_batch_data is None: + return 0 + return max(0, len(self.current_batch_data) - self.current_sample_idx) + + def get_info(self) -> dict: + """Get extended dataset info, including cycle progress.""" + total_files = len(self.batch_files) + visited_count = len(self.visited_batch_indices) + return { + "generator_type": self.generator_type, + "total_batch_files": total_files, + "current_batch_idx": self.current_batch_idx, + "current_sample_idx": self.current_sample_idx, + "current_batch_size": self.get_total_samples_in_current_batch(), + "remaining_in_batch": self.get_remaining_samples_in_current_batch(), + "unique_files_visited": visited_count, + "cycle_progress_percent": (visited_count / total_files) * 100 + if total_files > 0 + else 0, + "full_cycles_completed": self.full_cycles_completed, + } \ No newline at end of file diff --git a/src/data/filter.py b/src/data/filter.py new file mode 100644 index 0000000000000000000000000000000000000000..e051eea66eccfca7eb2453ceef990cdf2f6ba5dd --- /dev/null +++ b/src/data/filter.py @@ -0,0 +1,73 @@ +import numpy as np +import torch +from scipy import signal +from statsmodels.tsa.stattools import acf + + +def lempel_ziv_complexity(binary_sequence: np.ndarray) -> int: + """Computes the Lempel-Ziv complexity of a binary sequence.""" + sub_strings = set() + n = len(binary_sequence) + i = 0 + count = 0 + while i < n: + sub_str = "" + for j in range(i, n): + sub_str += str(binary_sequence[j]) + if sub_str not in sub_strings: + sub_strings.add(sub_str) + count += 1 + i = j + 1 + break + else: + i += 1 + return count + + +def is_low_quality( + series: torch.Tensor, + autocorr_threshold: float = 0.2, + snr_threshold: float = 0.5, + complexity_threshold: float = 0.4, +) -> bool: + """ + Returns True if the series appears non-forecastable (noise-like): + - weak autocorrelation + - low SNR proxy + - high normalized Lempel-Ziv complexity + """ + x = series.squeeze().detach().cpu().numpy() + if x.size < 20: + return True + if np.var(x) < 1e-10: + return True + + x_detrended = signal.detrend(x) + + try: + max_lags = min(len(x_detrended) // 4, 40) + if max_lags < 1: + autocorr_strength = 0.0 + else: + acf_vals = acf(x_detrended, nlags=max_lags, fft=True)[1:] + autocorr_strength = float(np.max(np.abs(acf_vals))) + except Exception: + autocorr_strength = 0.0 + + win_size = max(3, min(len(x) // 10, 15)) + signal_est = np.convolve(x, np.ones(win_size) / win_size, mode="valid") + noise_est = x[win_size - 1 :] - signal_est + var_signal = float(np.var(signal_est)) + var_noise = float(np.var(noise_est)) + snr_proxy = var_signal / var_noise if var_noise > 1e-8 else 1.0 + + median_val = float(np.median(x_detrended)) + binary_seq = (x_detrended > median_val).astype(np.uint8) + complexity_score = lempel_ziv_complexity(binary_seq) + normalized_complexity = complexity_score / max(1, len(binary_seq)) + + is_random_like = (snr_proxy < snr_threshold) and ( + normalized_complexity > complexity_threshold + ) + is_uncorrelated = autocorr_strength < autocorr_threshold + return bool(is_uncorrelated and is_random_like) diff --git a/src/data/frequency.py b/src/data/frequency.py new file mode 100644 index 0000000000000000000000000000000000000000..ea4d5097ad51a291e9cabdcd3fc122c39e1304ee --- /dev/null +++ b/src/data/frequency.py @@ -0,0 +1,538 @@ +""" +Comprehensive frequency management module for time series forecasting. + +This module centralizes all frequency-related functionality including: +- Frequency enum with helper methods +- Frequency parsing and validation +- Pandas frequency string conversion +- Safety checks for date ranges +- Frequency selection utilities +- All frequency constants and mappings +""" + +import logging +import re +from enum import Enum +from typing import Dict, Tuple + +import numpy as np +import pandas as pd +from numpy.random import Generator + +from src.data.constants import BASE_END_DATE, BASE_START_DATE, MAX_YEARS + +logger = logging.getLogger(__name__) + + +class Frequency(Enum): + """ + Enhanced Frequency enum with comprehensive helper methods. + + Each frequency includes methods for pandas conversion, safety checks, + and other frequency-specific operations. + """ + + A = "A" # Annual + Q = "Q" # Quarterly + M = "M" # Monthly + W = "W" # Weekly + D = "D" # Daily + H = "h" # Hourly + S = "s" # Seconds + T1 = "1min" # 1 minute + T5 = "5min" # 5 minutes + T10 = "10min" # 10 minutes + T15 = "15min" # 15 minutes + T30 = "30min" # 30 minutes + + def to_pandas_freq(self, for_date_range: bool = True) -> str: + """ + Convert to pandas frequency string. + + Args: + for_date_range: If True, use strings suitable for pd.date_range(). + If False, use strings suitable for pd.PeriodIndex(). + + Returns: + Pandas frequency string + """ + base, prefix, _ = FREQUENCY_MAPPING[self] + + # Special handling for date_range vs period compatibility + if for_date_range: + # For date_range, use modern pandas frequency strings + if self == Frequency.M: + return "ME" # Month End + elif self == Frequency.A: + return "YE" # Year End + elif self == Frequency.Q: + return "QE" # Quarter End + else: + # For periods, use legacy frequency strings + if self == Frequency.M: + return "M" # Month for periods + elif self == Frequency.A: + return "Y" # Year for periods (not YE) + elif self == Frequency.Q: + return "Q" # Quarter for periods (not QE) + + # Construct frequency string for other frequencies + if prefix: + return f"{prefix}{base}" + else: + return base + + def to_pandas_offset(self) -> str: + """Get pandas offset string for time delta calculations.""" + return FREQUENCY_TO_OFFSET[self] + + def get_days_per_period(self) -> float: + """Get approximate days per period for this frequency.""" + _, _, days = FREQUENCY_MAPPING[self] + return days + + def get_max_safe_length(self) -> int: + """Get maximum safe sequence length to prevent timestamp overflow.""" + return ALL_FREQUENCY_MAX_LENGTHS.get(self, float("inf")) + + def is_high_frequency(self) -> bool: + """Check if this is a high frequency (minute/second level).""" + return self in [ + Frequency.S, + Frequency.T1, + Frequency.T5, + Frequency.T10, + Frequency.T15, + Frequency.T30, + ] + + def is_low_frequency(self) -> bool: + """Check if this is a low frequency (annual/quarterly/monthly).""" + return self in [Frequency.A, Frequency.Q, Frequency.M] + + def get_seasonality(self) -> int: + """Get typical seasonality for this frequency.""" + seasonality_map = { + Frequency.S: 3600, # 1 hour of seconds + Frequency.T1: 60, # 1 hour of minutes + Frequency.T5: 12, # 1 hour of 5-minute intervals + Frequency.T10: 6, # 1 hour of 10-minute intervals + Frequency.T15: 4, # 1 hour of 15-minute intervals + Frequency.T30: 2, # 1 hour of 30-minute intervals + Frequency.H: 24, # 1 day of hours + Frequency.D: 7, # 1 week of days + Frequency.W: 52, # 1 year of weeks + Frequency.M: 12, # 1 year of months + Frequency.Q: 4, # 1 year of quarters + Frequency.A: 1, # No clear seasonality for annual + } + return seasonality_map.get(self, 1) + + def get_gift_eval_weight(self) -> float: + """Get GIFT eval dataset frequency weight.""" + return GIFT_EVAL_FREQUENCY_WEIGHTS.get(self, 0.1) + + def get_length_range(self) -> Tuple[int, int, int, int]: + """Get (min_length, max_length, optimal_start, optimal_end) for this frequency.""" + return GIFT_EVAL_LENGTH_RANGES.get(self, (50, 1000, 100, 500)) + + +# ============================================================================ +# Frequency Mappings and Constants +# ============================================================================ + +# Core frequency mapping: (pandas_base, prefix, days_per_period) +FREQUENCY_MAPPING: Dict[Frequency, Tuple[str, str, float]] = { + Frequency.A: ( + "YE", + "", + 365.25, + ), # Average days per year (accounting for leap years) + Frequency.Q: ("Q", "", 91.3125), # 365.25/4 - average days per quarter + Frequency.M: ("M", "", 30.4375), # 365.25/12 - average days per month + Frequency.W: ("W", "", 7), + Frequency.D: ("D", "", 1), + Frequency.H: ("h", "", 1 / 24), + Frequency.S: ("s", "", 1 / 86400), # 24*60*60 + Frequency.T1: ("min", "1", 1 / 1440), # 24*60 + Frequency.T5: ("min", "5", 1 / 288), # 24*60/5 + Frequency.T10: ("min", "10", 1 / 144), # 24*60/10 + Frequency.T15: ("min", "15", 1 / 96), # 24*60/15 + Frequency.T30: ("min", "30", 1 / 48), # 24*60/30 +} + +# Frequency to pandas offset mapping for calculating time deltas +FREQUENCY_TO_OFFSET: Dict[Frequency, str] = { + Frequency.A: "AS", # Annual start + Frequency.Q: "QS", # Quarter start + Frequency.M: "MS", # Month start + Frequency.W: "W", # Weekly + Frequency.D: "D", # Daily + Frequency.H: "H", # Hourly + Frequency.T1: "1T", # 1 minute + Frequency.T5: "5T", # 5 minutes + Frequency.T10: "10T", # 10 minutes + Frequency.T15: "15T", # 15 minutes + Frequency.T30: "30T", # 30 minutes + Frequency.S: "S", # Seconds +} + +# Maximum sequence lengths to avoid pandas OutOfBoundsDatetime errors +SHORT_FREQUENCY_MAX_LENGTHS = { + Frequency.A: MAX_YEARS, + Frequency.Q: MAX_YEARS * 4, + Frequency.M: MAX_YEARS * 12, + Frequency.W: int(MAX_YEARS * 52.1775), + Frequency.D: int(MAX_YEARS * 365.2425), +} + +HIGH_FREQUENCY_MAX_LENGTHS = { + Frequency.H: int(MAX_YEARS * 365.2425 * 24), + Frequency.S: int(MAX_YEARS * 365.2425 * 24 * 60 * 60), + Frequency.T1: int(MAX_YEARS * 365.2425 * 24 * 60), + Frequency.T5: int(MAX_YEARS * 365.2425 * 24 * 12), + Frequency.T10: int(MAX_YEARS * 365.2425 * 24 * 6), + Frequency.T15: int(MAX_YEARS * 365.2425 * 24 * 4), + Frequency.T30: int(MAX_YEARS * 365.2425 * 24 * 2), +} + +# Combined max lengths for all frequencies +ALL_FREQUENCY_MAX_LENGTHS = { + **SHORT_FREQUENCY_MAX_LENGTHS, + **HIGH_FREQUENCY_MAX_LENGTHS, +} + +# GIFT eval-based frequency weights from actual dataset analysis +GIFT_EVAL_FREQUENCY_WEIGHTS: Dict[Frequency, float] = { + Frequency.H: 25.0, # Hourly - most common + Frequency.D: 23.4, # Daily - second most common + Frequency.W: 12.9, # Weekly - third most common + Frequency.T15: 9.7, # 15-minute + Frequency.T5: 9.7, # 5-minute + Frequency.M: 7.3, # Monthly + Frequency.T10: 4.8, # 10-minute + Frequency.S: 4.8, # 10-second + Frequency.T1: 1.6, # 1-minute + Frequency.Q: 0.8, # Quarterly + Frequency.A: 0.8, # Annual +} + +# GIFT eval-based length ranges derived from actual dataset analysis +# Format: (min_length, max_length, optimal_start, optimal_end) +GIFT_EVAL_LENGTH_RANGES: Dict[Frequency, Tuple[int, int, int, int]] = { + # Low frequency ranges (based on actual GIFT eval data + logical extensions) + Frequency.A: (25, 100, 30, 70), + Frequency.Q: (25, 150, 50, 120), + Frequency.M: (40, 1000, 100, 600), + Frequency.W: (50, 3500, 100, 1500), + # Medium frequency ranges + Frequency.D: (150, 25000, 300, 7000), # Daily: covers 1-year+ scenarios + Frequency.H: (600, 35000, 700, 17000), + # High frequency ranges (extended for shorter realistic scenarios) + Frequency.T1: (200, 2500, 1200, 1800), # 1-minute: day to few days + Frequency.S: (7500, 9500, 7900, 9000), + Frequency.T15: (1000, 140000, 50000, 130000), + Frequency.T5: (200, 105000, 20000, 95000), + Frequency.T10: (40000, 55000, 47000, 52000), + Frequency.T30: (100, 50000, 10000, 40000), +} + + +# ============================================================================ +# Frequency Parsing and Validation +# ============================================================================ + + +def parse_frequency(freq_str: str) -> Frequency: + """ + Parse frequency string to Frequency enum, robust to variations. + + Handles various frequency string formats: + - Standard: "A", "Q", "M", "W", "D", "H", "S" + - Pandas-style: "A-DEC", "W-SUN", "QE-MAR" + - Minutes: "5T", "10min", "1T" + - Case variations: "a", "h", "D" + + Args: + freq_str: The frequency string to parse (e.g., "5T", "W-SUN", "M") + + Returns: + Corresponding Frequency enum member + + Raises: + ValueError: If the frequency string is not supported + """ + # Handle minute-based frequencies BEFORE pandas standardization + # because pandas converts "5T" to just "min", losing the multiplier + minute_match = re.match(r"^(\d*)T$", freq_str, re.IGNORECASE) or re.match( + r"^(\d*)min$", freq_str, re.IGNORECASE + ) + if minute_match: + multiplier = int(minute_match.group(1)) if minute_match.group(1) else 1 + enum_key = f"T{multiplier}" + try: + return Frequency[enum_key] + except KeyError: + logger.warning( + f"Unsupported minute frequency '{freq_str}' (multiplier: {multiplier}). " + f"Falling back to '1min' ({Frequency.T1.value})." + ) + return Frequency.T1 + + # Now standardize frequency string for other cases + try: + offset = pd.tseries.frequencies.to_offset(freq_str) + standardized_freq = offset.name + except Exception: + standardized_freq = freq_str + + # Handle other frequencies by their base (e.g., 'W-SUN' -> 'W', 'A-DEC' -> 'A') + base_freq = standardized_freq.split("-")[0].upper() + + freq_map = { + "A": Frequency.A, + "Y": Frequency.A, # Alias for Annual + "YE": Frequency.A, # Alias for Annual + "Q": Frequency.Q, + "QE": Frequency.Q, # Alias for Quarterly + "M": Frequency.M, + "ME": Frequency.M, # Alias for Monthly + "W": Frequency.W, + "D": Frequency.D, + "H": Frequency.H, + "S": Frequency.S, + } + + if base_freq in freq_map: + return freq_map[base_freq] + + raise NotImplementedError(f"Frequency '{standardized_freq}' is not supported.") + + +def validate_frequency_safety( + start_date: np.datetime64, total_length: int, frequency: Frequency +) -> bool: + """ + Check if start date and frequency combination is safe for pandas datetime operations. + + This function verifies that pd.date_range(start=start_date, periods=total_length, freq=freq_str) + will not raise an OutOfBoundsDatetime error, accounting for pandas' datetime bounds + (1677-09-21 to 2262-04-11) and realistic frequency limitations. + + Args: + start_date: The proposed start date for the time series + total_length: Total length of the time series + frequency: The frequency of the time series + + Returns: + True if the combination is safe, False otherwise + """ + try: + # Get the pandas frequency string + freq_str = frequency.to_pandas_freq(for_date_range=True) + + # Convert numpy datetime64 to pandas Timestamp for date_range + start_pd = pd.Timestamp(start_date) + + # Check if start date is within pandas' valid datetime range + if start_pd < pd.Timestamp.min or start_pd > pd.Timestamp.max: + return False + + # Check maximum length constraints + max_length = frequency.get_max_safe_length() + if total_length > max_length: + return False + + # For low frequencies, be extra conservative + if frequency.is_low_frequency(): + if frequency == Frequency.A and total_length > 500: # Max ~500 years + return False + elif frequency == Frequency.Q and total_length > 2000: # Max ~500 years + return False + elif frequency == Frequency.M and total_length > 6000: # Max ~500 years + return False + + # Calculate approximate end date + days_per_period = frequency.get_days_per_period() + approx_days = total_length * days_per_period + + # For annual/quarterly frequencies, add extra safety margin + if frequency in [Frequency.A, Frequency.Q]: + approx_days *= 1.1 # 10% safety margin + + end_date = start_pd + pd.Timedelta(days=approx_days) + + # Check if end date is within pandas' valid datetime range + if end_date < pd.Timestamp.min or end_date > pd.Timestamp.max: + return False + + # Try to create the date range as final validation + pd.date_range(start=start_pd, periods=total_length, freq=freq_str) + return True + + except (pd.errors.OutOfBoundsDatetime, OverflowError, ValueError): + return False + + +# ============================================================================ +# Frequency Selection Utilities +# ============================================================================ + + +def select_safe_random_frequency(total_length: int, rng: Generator) -> Frequency: + """ + Select a random frequency suitable for a given total length of a time series, + based on actual GIFT eval dataset patterns and distributions. + + The selection logic: + 1. Filters frequencies that can handle the given total_length + 2. Applies base weights derived from actual GIFT eval frequency distribution + 3. Strongly boosts frequencies that are in their optimal length ranges + 4. Handles edge cases gracefully with fallbacks + + Args: + total_length: The total length of the time series (history + future) + rng: A numpy random number generator instance + + Returns: + A randomly selected frequency that matches GIFT eval patterns + """ + # Find valid frequencies and calculate weighted scores + valid_frequencies = [] + frequency_scores = [] + + for freq in Frequency: + # Check basic timestamp overflow limits + max_allowed = freq.get_max_safe_length() + if total_length > max_allowed: + continue + + # Check if frequency has defined ranges + min_len, max_len, optimal_start, optimal_end = freq.get_length_range() + + # Must be within the frequency's realistic range + if total_length < min_len or total_length > max_len: + continue + + valid_frequencies.append(freq) + + # Calculate fitness score based on GIFT eval patterns + base_weight = freq.get_gift_eval_weight() + + # Enhanced length-based fitness scoring + if optimal_start <= total_length <= optimal_end: + # In optimal range - very strong preference + length_multiplier = 5.0 + else: + # Outside optimal but within valid range - calculate penalty + if total_length < optimal_start: + # Below optimal range + distance_ratio = (optimal_start - total_length) / ( + optimal_start - min_len + ) + else: + # Above optimal range + distance_ratio = (total_length - optimal_end) / (max_len - optimal_end) + + # Apply graduated penalty: closer to optimal = higher score + length_multiplier = 0.3 + 1.2 * (1.0 - distance_ratio) # Range: 0.3-1.5 + + final_score = base_weight * length_multiplier + frequency_scores.append(final_score) + + # Handle edge cases with smart fallbacks + if not valid_frequencies: + # Fallback strategy based on typical length patterns + if total_length <= 100: + # Very short series - prefer low frequencies + fallback_order = [ + Frequency.A, + Frequency.Q, + Frequency.M, + Frequency.W, + Frequency.D, + ] + elif total_length <= 1000: + # Medium short series - prefer daily/weekly + fallback_order = [Frequency.D, Frequency.W, Frequency.H, Frequency.M] + else: + # Longer series - prefer higher frequencies + fallback_order = [Frequency.H, Frequency.D, Frequency.T15, Frequency.T5] + + for fallback_freq in fallback_order: + max_allowed = fallback_freq.get_max_safe_length() + if total_length <= max_allowed: + return fallback_freq + # Last resort + return Frequency.D + + if len(valid_frequencies) == 1: + return valid_frequencies[0] + + # Select based on weighted probabilities + scores = np.array(frequency_scores) + probabilities = scores / scores.sum() + + return rng.choice(valid_frequencies, p=probabilities) + + +def select_safe_start_date( + total_length: int, + frequency: Frequency, + rng: Generator = np.random.default_rng(), + max_retries: int = 10, +) -> np.datetime64: + """ + Select a safe start date that ensures the entire time series (history + future) + will not exceed pandas' datetime bounds. + + Args: + total_length: Total length of the time series (history + future) + frequency: Time series frequency + rng: Random number generator instance + max_retries: Maximum number of retry attempts + + Returns: + A safe start date that prevents timestamp overflow + + Raises: + ValueError: If no safe start date is found after max_retries or if the required + time span exceeds the available date window + """ + days_per_period = frequency.get_days_per_period() + + # Calculate approximate duration in days + total_days = total_length * days_per_period + + # Define safe bounds: ensure end date doesn't exceed BASE_END_DATE + latest_safe_start = BASE_END_DATE - np.timedelta64(int(total_days), "D") + earliest_safe_start = BASE_START_DATE + + # Check if the required time span exceeds the available window + if latest_safe_start < earliest_safe_start: + available_days = ( + (BASE_END_DATE - BASE_START_DATE).astype("timedelta64[D]").astype(int) + ) + available_years = available_days / 365.25 + required_years = total_days / 365.25 + raise ValueError( + f"Required time span ({required_years:.1f} years, {total_days:.0f} days) " + f"exceeds available date window ({available_years:.1f} years, {available_days} days). " + f"Reduce total_length ({total_length}) or extend the date window." + ) + + # Convert to nanoseconds for random sampling + earliest_ns = earliest_safe_start.astype("datetime64[ns]").astype(np.int64) + latest_ns = latest_safe_start.astype("datetime64[ns]").astype(np.int64) + + for _ in range(max_retries): + # Uniformly sample a start date within bounds + random_ns = rng.integers(earliest_ns, latest_ns + 1) + start_date = np.datetime64(int(random_ns), "ns") + + # Verify safety + if validate_frequency_safety(start_date, total_length, frequency): + return start_date + + # Default to base start date if no safe start date is found + return BASE_START_DATE diff --git a/src/data/loaders.py b/src/data/loaders.py new file mode 100644 index 0000000000000000000000000000000000000000..c5e886d9a740e17d41072c3ebfc3e25b11c79c25 --- /dev/null +++ b/src/data/loaders.py @@ -0,0 +1,661 @@ +import logging +import random +from typing import Dict, Iterator, List, Optional + +import numpy as np +import pandas as pd +import torch + +from src.data.batch_composer import BatchComposer, ComposedDataset +from src.data.containers import BatchTimeSeriesContainer +from src.data.frequency import parse_frequency +from src.gift_eval.constants import ALL_DATASETS +from src.gift_eval.data import Dataset as GiftEvalDataset + +logger = logging.getLogger(__name__) + + +class GiftEvalDataLoader: + """ + Data loader for GIFT-eval datasets, converting them to BatchTimeSeriesContainer format. + Supports both training and validation modes. + """ + + TERMS = ["short", "medium", "long"] + + def __init__( + self, + mode: str = "train", + batch_size: int = 32, + device: Optional[torch.device] = None, + shuffle: bool = True, + to_univariate: bool = False, + max_context_length: Optional[int] = None, + max_windows: int = 20, + skip_datasets_with_nans: bool = False, + datasets_to_use: Optional[List[str]] = None, + dataset_storage_path: Optional[str] = None, + ): + """ + Initialize GIFT-eval data loader. + + Args: + mode: Either "train" or "validation" + batch_size: Number of samples per batch + device: Device to load data to + shuffle: Whether to shuffle data + to_univariate: Whether to convert multivariate data to multiple univariate series + max_context_length: Optional maximum total window length (context + forecast) to prevent memory issues + max_windows: Number of windows to use for training/validation + skip_datasets_with_nans: Whether to skip datasets/series that contain NaN values + datasets_to_use: Optional list of dataset names to use. If None, uses all available datasets + dataset_storage_path: Path on disk where GIFT-eval HuggingFace datasets are stored + """ + # Use specified datasets or all available datasets if none specified + if datasets_to_use is not None and len(datasets_to_use) > 0: + # Validate that requested datasets are available + invalid_datasets = [ds for ds in datasets_to_use if ds not in ALL_DATASETS] + if invalid_datasets: + logger.warning(f"Invalid datasets requested: {invalid_datasets}") + logger.warning(f"Available datasets: {ALL_DATASETS}") + # Use only valid datasets + self.dataset_names = [ + ds for ds in datasets_to_use if ds in ALL_DATASETS + ] + else: + self.dataset_names = datasets_to_use + else: + self.dataset_names = ALL_DATASETS + + # Log dataset selection + if datasets_to_use is not None and len(datasets_to_use) > 0: + logger.info( + f"Using subset of datasets: {len(self.dataset_names)}/{len(ALL_DATASETS)} datasets" + ) + logger.info(f"Selected datasets: {self.dataset_names}") + else: + logger.info( + f"Using all available datasets: {len(self.dataset_names)} datasets" + ) + + self.terms = self.TERMS + self.mode = mode + self.batch_size = batch_size + self.device = device + self.shuffle = shuffle + self.to_univariate = to_univariate + self.max_context_length = max_context_length + self.skip_datasets_with_nans = skip_datasets_with_nans + + # Window configuration based on mode + self.max_windows = max_windows + self.dataset_storage_path = dataset_storage_path + + # Load all datasets and prepare data + self._load_datasets() + + # Create iterator state + self._current_idx = 0 + self._epoch_data = [] + self._prepare_epoch_data() + + def _load_datasets(self) -> None: + """Load all specified GIFT-eval datasets.""" + self.datasets = {} + self.dataset_prediction_lengths = {} + + for dataset_name in self.dataset_names: + if dataset_name.startswith("m4_"): + max_windows = 1 + else: + max_windows = self.max_windows + try: + # Determine if we need univariate conversion + # First check with multivariate to see target dimension + temp_dataset = GiftEvalDataset( + name=dataset_name, + term=self.terms[0], # Use first term to check dimensionality + to_univariate=False, + max_windows=max_windows, + storage_path=self.dataset_storage_path, + ) + + # Convert to univariate if needed + to_univariate = self.to_univariate and temp_dataset.target_dim > 1 + + # Load datasets for all terms + for term in self.terms: + dataset_key = f"{dataset_name}_{term}" + dataset = GiftEvalDataset( + name=dataset_name, + term=term, + to_univariate=to_univariate, + max_windows=max_windows, + storage_path=self.dataset_storage_path, + ) + + self.datasets[dataset_key] = dataset + self.dataset_prediction_lengths[dataset_key] = ( + dataset.prediction_length + ) + + logger.info( + f"Loaded {dataset_key} - prediction_length: {dataset.prediction_length}, " + f"frequency: {dataset.freq}, target_dim: {dataset.target_dim}, " + f"min_length: {dataset._min_series_length}, windows: {dataset.windows}" + ) + + except Exception as e: + logger.warning(f"Failed to load dataset {dataset_name}: {str(e)}") + continue + + def _contains_nan(self, data_entry: dict) -> bool: + """Check if a data entry contains NaN values.""" + target = data_entry.get("target") + if target is None: + return False + + # Convert to numeric numpy array for robust NaN checking + try: + target_np = np.asarray(target, dtype=np.float32) + return np.isnan(target_np).any() + except Exception: + logger.warning( + "NaN check: failed to coerce target to float32; skipping entry" + ) + return True + + def _convert_to_container( + self, data_entries: List[dict], prediction_length: int, dataset_freq: str + ) -> BatchTimeSeriesContainer: + """Convert a batch of data entries to BatchTimeSeriesContainer format with fixed future length.""" + batch_size = len(data_entries) + max_history_len = 0 + + # First pass: determine max history length after truncation + for entry in data_entries: + target = np.asarray(entry["target"], dtype=np.float32) + if target.ndim == 1: + target = target.reshape(1, -1) + + _, seq_len = target.shape + + # Only consider up to the last (max_context_length) values + effective_max_context = ( + self.max_context_length + if self.max_context_length is not None + else seq_len + ) + if seq_len > effective_max_context: + seq_len = effective_max_context + + # History is up to (max_context_length - prediction_length) + history_len = max( + 0, min(seq_len, effective_max_context) - prediction_length + ) + max_history_len = max(max_history_len, history_len) + + # Get number of channels from first entry + first_target = np.asarray(data_entries[0]["target"], dtype=np.float32) + if first_target.ndim == 1: + # Shape to [channels, time] + first_target = first_target.reshape(1, -1) + num_channels = first_target.shape[0] + + # Allocate arrays + history_values = np.full( + (batch_size, max_history_len, num_channels), np.nan, dtype=np.float32 + ) + future_values = np.full( + (batch_size, prediction_length, num_channels), np.nan, dtype=np.float32 + ) + history_mask = np.zeros((batch_size, max_history_len), dtype=bool) + + # Second pass: fill arrays + for i, entry in enumerate(data_entries): + target = np.asarray(entry["target"], dtype=np.float32) + if target.ndim == 1: + target = target.reshape(1, -1) + + # Truncate to last effective_max_context points if needed + full_seq_len = target.shape[1] + total_len_allowed = ( + self.max_context_length + if self.max_context_length is not None + else full_seq_len + ) + total_len_for_entry = min(full_seq_len, total_len_allowed) + + if total_len_for_entry < prediction_length + 1: + # Not enough length to build (history + future). Signal to caller. + raise ValueError( + "Entry too short after max_context_length truncation to form history+future window" + ) + + truncated = target[:, -total_len_for_entry:] + cur_history_len = total_len_for_entry - prediction_length + + hist = truncated[:, :cur_history_len] # [C, H] + fut = truncated[ + :, cur_history_len : cur_history_len + prediction_length + ] # [C, P] + + # Write into batch arrays with time last -> transpose to [H, C] / [P, C] + history_values[i, :cur_history_len, :] = hist.T + future_values[i, :, :] = fut.T + history_mask[i, :cur_history_len] = True + + # Get start timestamp and frequency (replicate across batch) + start_timestamp = data_entries[0]["start"] + if hasattr(start_timestamp, "to_timestamp"): + start_numpy = start_timestamp.to_timestamp().to_numpy() + else: + start_numpy = pd.Timestamp(start_timestamp).to_numpy() + start_list = [start_numpy for _ in range(batch_size)] + + # Get frequency enum and replicate across batch + frequency_enum = parse_frequency(dataset_freq) + frequency_list = [frequency_enum for _ in range(batch_size)] + + # Create the container + return BatchTimeSeriesContainer( + history_values=torch.tensor(history_values, dtype=torch.float32), + future_values=torch.tensor(future_values, dtype=torch.float32), + start=start_list, + frequency=frequency_list, + history_mask=torch.tensor(history_mask, dtype=torch.bool) + if self.mode == "train" + else None, + ) + + def _prepare_epoch_data(self) -> None: + """Prepare all batches for one epoch.""" + self._epoch_data = [] + + for dataset_key, dataset in self.datasets.items(): + try: + # Get appropriate dataset based on mode + if self.mode == "train": + data = dataset.training_dataset + else: + data = dataset.validation_dataset + + # Collect all valid data entries + valid_entries = [] + dataset_freq = dataset.freq + prediction_length = self.dataset_prediction_lengths[dataset_key] + + for entry in data: + # Skip if contains NaN and configured to do so + if self.skip_datasets_with_nans and self._contains_nan(entry): + continue + + # Check if we have enough data + target = np.asarray(entry["target"]) + if target.ndim == 1: + seq_len = len(target) + else: + seq_len = target.shape[1] + + # Need at least prediction_length + 1 for training + if self.mode == "train" and seq_len < prediction_length + 1: + continue + + valid_entries.append(entry) + + if not valid_entries: + logger.warning(f"No valid entries found for {dataset_key}") + continue + + # Create batches + for i in range(0, len(valid_entries), self.batch_size): + batch_entries = valid_entries[i : i + self.batch_size] + try: + batch_container = self._convert_to_container( + batch_entries, prediction_length, dataset_freq + ) + self._epoch_data.append((dataset_key, batch_container)) + except Exception as e: + logger.warning( + f"Failed to create batch for {dataset_key}: {str(e)}" + ) + continue + + except Exception as e: + logger.warning( + f"Failed to process dataset {dataset_key}: {str(e)}. " + f"Dataset may be too short for the required offset." + ) + continue + + # Shuffle if in training mode + if self.mode == "train" and self.shuffle: + random.shuffle(self._epoch_data) + + logger.info(f"Prepared {len(self._epoch_data)} batches for {self.mode} mode") + + def __iter__(self) -> Iterator[BatchTimeSeriesContainer]: + """Iterate through batches for one epoch.""" + # Reset index at the start of each epoch + self._current_idx = 0 + + # Reshuffle data for each new epoch if in training mode + if self.mode == "train" and self.shuffle: + random.shuffle(self._epoch_data) + + return self + + def __next__(self) -> BatchTimeSeriesContainer: + """Get next batch.""" + if not self._epoch_data: + raise StopIteration("No valid data available") + + # Check if we've exhausted the epoch + if self._current_idx >= len(self._epoch_data): + raise StopIteration + + # Get current batch + dataset_key, batch = self._epoch_data[self._current_idx] + self._current_idx += 1 + + # Move to device if specified + if self.device is not None: + batch.to_device(self.device) + + return batch + + def __len__(self) -> int: + """Return number of batches per epoch.""" + return len(self._epoch_data) + + +class CyclicGiftEvalDataLoader: + """ + Wrapper for GiftEvalDataLoader that provides cycling behavior for training. + This allows training for a fixed number of iterations per epoch, cycling through + the available data as needed. + """ + + def __init__(self, base_loader: GiftEvalDataLoader, num_iterations_per_epoch: int): + """ + Initialize the cyclic data loader. + + Args: + base_loader: The underlying GiftEvalDataLoader + num_iterations_per_epoch: Number of iterations to run per epoch + """ + self.base_loader = base_loader + self.num_iterations_per_epoch = num_iterations_per_epoch + self.dataset_names = base_loader.dataset_names + self.device = base_loader.device + + def __iter__(self) -> Iterator[BatchTimeSeriesContainer]: + """Iterate for exactly num_iterations_per_epoch iterations.""" + self._current_iteration = 0 + self._base_iter = iter(self.base_loader) + return self + + def __next__(self) -> BatchTimeSeriesContainer: + """Get next batch, cycling through base loader as needed.""" + if self._current_iteration >= self.num_iterations_per_epoch: + raise StopIteration + + try: + batch = next(self._base_iter) + except StopIteration: + # Restart the base iterator when exhausted + self._base_iter = iter(self.base_loader) + batch = next(self._base_iter) + + self._current_iteration += 1 + return batch + + def __len__(self) -> int: + """Return the configured number of iterations per epoch.""" + return self.num_iterations_per_epoch + + +def create_synthetic_dataloader( + base_data_dir: str, + batch_size: int = 128, + num_batches_per_epoch: int = 1000, + generator_proportions: Optional[Dict[str, float]] = None, + mixed_batches: bool = True, + augmentations: Optional[Dict[str, bool]] = None, + augmentation_probabilities: Optional[Dict[str, float]] = None, + device: Optional[torch.device] = None, + num_workers: int = 0, + pin_memory: bool = True, + global_seed: int = 42, + nan_stats_path: Optional[str] = None, + nan_patterns_path: Optional[str] = None, + chosen_scaler_name: Optional[str] = None, +) -> torch.utils.data.DataLoader: + """ + Create a PyTorch DataLoader for training with saved generator batches. + + Args: + base_data_dir: Base directory containing generator subdirectories + batch_size: Size of each training batch + num_batches_per_epoch: Number of batches per epoch + generator_proportions: Dict mapping generator names to proportions + mixed_batches: Whether to create mixed or uniform batches + augmentations: Dict mapping augmentation names to booleans + augmentation_probabilities: Dict mapping augmentation names to probabilities + device: Target device + num_workers: Number of DataLoader workers + pin_memory: Whether to pin memory + global_seed: Global random seed + nan_stats_path: Path to nan stats file + chosen_scaler_name: Name of the scaler that used in training + + Returns: + PyTorch DataLoader + """ + + # Create batch composer + composer = BatchComposer( + base_data_dir=base_data_dir, + generator_proportions=generator_proportions, + mixed_batches=mixed_batches, + device=device, + augmentations=augmentations, + augmentation_probabilities=augmentation_probabilities, + global_seed=global_seed, + nan_stats_path=nan_stats_path, + nan_patterns_path=nan_patterns_path, + chosen_scaler_name=chosen_scaler_name, + ) + + # Create dataset + dataset = ComposedDataset( + batch_composer=composer, + num_batches_per_epoch=num_batches_per_epoch, + batch_size=batch_size, + ) + + # Custom collate function for BatchTimeSeriesContainer + def collate_fn(batch): + """Custom collate function that returns a single BatchTimeSeriesContainer.""" + # Since each item is already a BatchTimeSeriesContainer with batch_size samples, + # and DataLoader batch_size=1, we just return the first (and only) item + return batch[0] + + # Create DataLoader + dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=1, # Each dataset item is already a complete batch + shuffle=False, + num_workers=num_workers, + pin_memory=pin_memory, + collate_fn=collate_fn, + drop_last=False, + ) + + logger.info( + f"Created DataLoader with {len(dataset)} batches per epoch, " + f"batch_size={batch_size}, mixed_batches={mixed_batches}" + ) + + return dataloader + + +class SyntheticValidationDataset(torch.utils.data.Dataset): + """ + Fixed synthetic validation dataset that generates a small number of batches + using the same composition approach as training data. + """ + + def __init__( + self, + base_data_dir: str, + batch_size: int = 128, + num_batches: int = 2, + future_length: int = 512, + generator_proportions: Optional[Dict[str, float]] = None, + augmentations: Optional[Dict[str, bool]] = None, + augmentation_probabilities: Optional[Dict[str, float]] = None, + device: Optional[torch.device] = None, + global_seed: int = 42, + chosen_scaler_name: Optional[str] = None, + nan_stats_path: Optional[str] = None, + nan_patterns_path: Optional[str] = None, + rank: int = 0, + world_size: int = 1, + ): + """ + Initialize the validation dataset. + + Args: + base_data_dir: Base directory containing generator subdirectories + batch_size: Size of each validation batch + num_batches: Number of validation batches to generate (1 or 2) + generator_proportions: Dict mapping generator names to proportions + device: Device to load tensors to + global_seed: Global random seed + chosen_scaler_name: Name of the scaler that used in training + """ + self.batch_size = batch_size + self.num_batches = num_batches + self.device = device + + # Create batch composer; force validation to use max-length windows (no length shortening) + val_augmentations = dict(augmentations or {}) + val_augmentations["length_shortening"] = False + + self.batch_composer = BatchComposer( + base_data_dir=base_data_dir, + generator_proportions=generator_proportions, + mixed_batches=True, # Use mixed batches for validation + device=device, + global_seed=global_seed + 999999, + augmentations=val_augmentations, + augmentation_probabilities=augmentation_probabilities, + nan_stats_path=nan_stats_path, + nan_patterns_path=nan_patterns_path, + chosen_scaler_name=chosen_scaler_name, + rank=rank, + world_size=world_size, + ) + + # Pre-generate fixed validation batches + self.validation_batches = [] + for i in range(num_batches): + batch, _ = self.batch_composer.create_batch( + batch_size=batch_size, + future_length=future_length, + seed=global_seed + + 999999 + + i, # Fixed seeds for reproducible validation + ) + self.validation_batches.append(batch) + + logger.info( + f"Created {num_batches} fixed validation batches with batch_size={batch_size}" + ) + + def __len__(self) -> int: + return self.num_batches + + def __getitem__(self, idx: int) -> BatchTimeSeriesContainer: + """ + Get a pre-generated validation batch by index. + + Args: + idx: Batch index + + Returns: + BatchTimeSeriesContainer + """ + if idx >= len(self.validation_batches): + raise IndexError(f"Batch index {idx} out of range") + + batch = self.validation_batches[idx] + + # Move to device if needed + if self.device is not None: + batch.to_device(self.device) + + return batch + + +def create_synthetic_dataset( + base_data_dir: str, + batch_size: int = 128, + num_batches_per_epoch: int = 1000, + generator_proportions: Optional[Dict[str, float]] = None, + mixed_batches: bool = True, + augmentations: Optional[Dict[str, bool]] = None, + augmentation_probabilities: Optional[Dict[str, float]] = None, + global_seed: int = 42, + nan_stats_path: Optional[str] = None, + nan_patterns_path: Optional[str] = None, + chosen_scaler_name: Optional[str] = None, + rank: int = 0, + world_size: int = 1, +) -> ComposedDataset: + """ + Creates the ComposedDataset for training with saved generator batches. + + Args: + base_data_dir: Base directory containing generator subdirectories. + batch_size: Size of each training batch. + num_batches_per_epoch: Number of batches per epoch. + generator_proportions: Dict mapping generator names to proportions. + mixed_batches: Whether to create mixed or uniform batches. + augmentations: Dict mapping augmentation names to booleans. + global_seed: Global random seed. + nan_stats_path: Path to nan stats file. + chosen_scaler_name: Name of the scaler to use. + Returns: + A ComposedDataset instance. + """ + # Create batch composer + composer = BatchComposer( + base_data_dir=base_data_dir, + generator_proportions=generator_proportions, + mixed_batches=mixed_batches, + device=None, # Device is handled in the training loop + augmentations=augmentations, + augmentation_probabilities=augmentation_probabilities, + global_seed=global_seed, + nan_stats_path=nan_stats_path, + nan_patterns_path=nan_patterns_path, + chosen_scaler_name=chosen_scaler_name, + rank=rank, + world_size=world_size, + ) + + # Create and return the dataset + dataset = ComposedDataset( + batch_composer=composer, + num_batches_per_epoch=num_batches_per_epoch, + batch_size=batch_size, + ) + + logger.info( + f"Created ComposedDataset with {len(dataset)} batches per epoch, " + f"batch_size={batch_size}, mixed_batches={mixed_batches}" + ) + + return dataset \ No newline at end of file diff --git a/src/data/scalers.py b/src/data/scalers.py new file mode 100644 index 0000000000000000000000000000000000000000..07cc5c55e04f4f69bc07e74e1aaeedd9059fd0ba --- /dev/null +++ b/src/data/scalers.py @@ -0,0 +1,360 @@ +from abc import ABC, abstractmethod +from typing import Dict, Optional + +import torch + + +class BaseScaler(ABC): + """ + Abstract base class for time series scalers. + + Defines the interface for scaling multivariate time series data with support + for masked values and channel-wise scaling. + """ + + @abstractmethod + def compute_statistics( + self, history_values: torch.Tensor, history_mask: Optional[torch.Tensor] = None + ) -> Dict[str, torch.Tensor]: + """ + Compute scaling statistics from historical data. + """ + pass + + @abstractmethod + def scale( + self, data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply scaling transformation to data. + """ + pass + + @abstractmethod + def inverse_scale( + self, scaled_data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply inverse scaling transformation to recover original scale. + """ + pass + + +class RobustScaler(BaseScaler): + """ + Robust scaler using median and IQR for normalization. + """ + + def __init__(self, epsilon: float = 1e-6, min_scale: float = 1e-3): + if epsilon <= 0: + raise ValueError("epsilon must be positive") + if min_scale <= 0: + raise ValueError("min_scale must be positive") + self.epsilon = epsilon + self.min_scale = min_scale + + def compute_statistics( + self, history_values: torch.Tensor, history_mask: Optional[torch.Tensor] = None + ) -> Dict[str, torch.Tensor]: + """ + Compute median and IQR statistics from historical data with improved numerical stability. + """ + batch_size, seq_len, num_channels = history_values.shape + device = history_values.device + + medians = torch.zeros(batch_size, 1, num_channels, device=device) + iqrs = torch.ones(batch_size, 1, num_channels, device=device) + + for b in range(batch_size): + for c in range(num_channels): + channel_data = history_values[b, :, c] + + if history_mask is not None: + mask = history_mask[b, :].bool() + valid_data = channel_data[mask] + else: + valid_data = channel_data + + if len(valid_data) == 0: + continue + + valid_data = valid_data[torch.isfinite(valid_data)] + + if len(valid_data) == 0: + continue + + median_val = torch.median(valid_data) + medians[b, 0, c] = median_val + + if len(valid_data) > 1: + try: + q75 = torch.quantile(valid_data, 0.75) + q25 = torch.quantile(valid_data, 0.25) + iqr_val = q75 - q25 + iqr_val = torch.max( + iqr_val, torch.tensor(self.min_scale, device=device) + ) + iqrs[b, 0, c] = iqr_val + except Exception: + std_val = torch.std(valid_data) + iqrs[b, 0, c] = torch.max( + std_val, torch.tensor(self.min_scale, device=device) + ) + else: + iqrs[b, 0, c] = self.min_scale + + return {"median": medians, "iqr": iqrs} + + def scale( + self, data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply robust scaling: (data - median) / (iqr + epsilon). + """ + median = statistics["median"] + iqr = statistics["iqr"] + + denominator = torch.max( + iqr + self.epsilon, torch.tensor(self.min_scale, device=iqr.device) + ) + scaled_data = (data - median) / denominator + scaled_data = torch.clamp(scaled_data, -50.0, 50.0) + + return scaled_data + + def inverse_scale( + self, scaled_data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply inverse robust scaling, now compatible with 3D or 4D tensors. + """ + median = statistics["median"] + iqr = statistics["iqr"] + + denominator = torch.max( + iqr + self.epsilon, torch.tensor(self.min_scale, device=iqr.device) + ) + + if scaled_data.ndim == 4: + denominator = denominator.unsqueeze(-1) + median = median.unsqueeze(-1) + + return scaled_data * denominator + median + + +class MinMaxScaler(BaseScaler): + """ + Min-Max scaler that normalizes data to the range [-1, 1]. + """ + + def __init__(self, epsilon: float = 1e-8): + if epsilon <= 0: + raise ValueError("epsilon must be positive") + self.epsilon = epsilon + + def compute_statistics( + self, history_values: torch.Tensor, history_mask: Optional[torch.Tensor] = None + ) -> Dict[str, torch.Tensor]: + """ + Compute min and max statistics from historical data. + """ + batch_size, seq_len, num_channels = history_values.shape + device = history_values.device + + mins = torch.zeros(batch_size, 1, num_channels, device=device) + maxs = torch.ones(batch_size, 1, num_channels, device=device) + + for b in range(batch_size): + for c in range(num_channels): + channel_data = history_values[b, :, c] + + if history_mask is not None: + mask = history_mask[b, :].bool() + valid_data = channel_data[mask] + else: + valid_data = channel_data + + if len(valid_data) == 0: + continue + + min_val = torch.min(valid_data) + max_val = torch.max(valid_data) + + mins[b, 0, c] = min_val + maxs[b, 0, c] = max_val + + if torch.abs(max_val - min_val) < self.epsilon: + maxs[b, 0, c] = min_val + 1.0 + + return {"min": mins, "max": maxs} + + def scale( + self, data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply min-max scaling to range [-1, 1]. + """ + min_val = statistics["min"] + max_val = statistics["max"] + + normalized = (data - min_val) / (max_val - min_val + self.epsilon) + return normalized * 2.0 - 1.0 + + def inverse_scale( + self, scaled_data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply inverse min-max scaling, now compatible with 3D or 4D tensors. + """ + min_val = statistics["min"] + max_val = statistics["max"] + + if scaled_data.ndim == 4: + min_val = min_val.unsqueeze(-1) + max_val = max_val.unsqueeze(-1) + + normalized = (scaled_data + 1.0) / 2.0 + return normalized * (max_val - min_val + self.epsilon) + min_val + + +class MeanScaler(BaseScaler): + """ + A scaler that centers the data by subtracting the channel-wise mean. + + This scaler only performs centering and does not affect the scale of the data. + """ + + def compute_statistics( + self, history_values: torch.Tensor, history_mask: Optional[torch.Tensor] = None + ) -> Dict[str, torch.Tensor]: + """ + Compute the mean for each channel from historical data. + """ + batch_size, seq_len, num_channels = history_values.shape + device = history_values.device + + # Initialize a tensor to store the mean for each channel in each batch item + means = torch.zeros(batch_size, 1, num_channels, device=device) + + for b in range(batch_size): + for c in range(num_channels): + channel_data = history_values[b, :, c] + + # Use the mask to select only valid (observed) data points + if history_mask is not None: + mask = history_mask[b, :].bool() + valid_data = channel_data[mask] + else: + valid_data = channel_data + + # Skip if there's no valid data for this channel + if len(valid_data) == 0: + continue + + # Filter out non-finite values like NaN or Inf before computing + valid_data = valid_data[torch.isfinite(valid_data)] + + if len(valid_data) == 0: + continue + + # Compute the mean and store it + means[b, 0, c] = torch.mean(valid_data) + + return {"mean": means} + + def scale( + self, data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply mean centering: data - mean. + """ + mean = statistics["mean"] + return data - mean + + def inverse_scale( + self, scaled_data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply inverse mean centering: scaled_data + mean. + + Handles both 3D (e.g., training input) and 4D (e.g., model output samples) tensors. + """ + mean = statistics["mean"] + + # Adjust shape for 4D tensors (batch, seq_len, channels, samples) + if scaled_data.ndim == 4: + mean = mean.unsqueeze(-1) + + return scaled_data + mean + + +class MedianScaler(BaseScaler): + """ + A scaler that centers the data by subtracting the channel-wise median. + + This scaler only performs centering and does not affect the scale of the data. + It is more robust to outliers than the MeanScaler. + """ + + def compute_statistics( + self, history_values: torch.Tensor, history_mask: Optional[torch.Tensor] = None + ) -> Dict[str, torch.Tensor]: + """ + Compute the median for each channel from historical data. + """ + batch_size, seq_len, num_channels = history_values.shape + device = history_values.device + + # Initialize a tensor to store the median for each channel in each batch item + medians = torch.zeros(batch_size, 1, num_channels, device=device) + + for b in range(batch_size): + for c in range(num_channels): + channel_data = history_values[b, :, c] + + # Use the mask to select only valid (observed) data points + if history_mask is not None: + mask = history_mask[b, :].bool() + valid_data = channel_data[mask] + else: + valid_data = channel_data + + # Skip if there's no valid data for this channel + if len(valid_data) == 0: + continue + + # Filter out non-finite values like NaN or Inf before computing + valid_data = valid_data[torch.isfinite(valid_data)] + + if len(valid_data) == 0: + continue + + # Compute the median and store it + medians[b, 0, c] = torch.median(valid_data) + + return {"median": medians} + + def scale( + self, data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply median centering: data - median. + """ + median = statistics["median"] + return data - median + + def inverse_scale( + self, scaled_data: torch.Tensor, statistics: Dict[str, torch.Tensor] + ) -> torch.Tensor: + """ + Apply inverse median centering: scaled_data + median. + + Handles both 3D (e.g., training input) and 4D (e.g., model output samples) tensors. + """ + median = statistics["median"] + + # Adjust shape for 4D tensors (batch, seq_len, channels, samples) + if scaled_data.ndim == 4: + median = median.unsqueeze(-1) + + return scaled_data + median diff --git a/src/data/time_features.py b/src/data/time_features.py new file mode 100644 index 0000000000000000000000000000000000000000..2ce2d10efb4e45b66731d80390cd634aeeec57e3 --- /dev/null +++ b/src/data/time_features.py @@ -0,0 +1,564 @@ +import logging +from typing import Any, Dict, List, Optional + +import numpy as np +import pandas as pd +import scipy.fft as fft +import torch +from gluonts.time_feature import time_features_from_frequency_str +from gluonts.time_feature._base import ( + day_of_month, + day_of_month_index, + day_of_week, + day_of_week_index, + day_of_year, + hour_of_day, + hour_of_day_index, + minute_of_hour, + minute_of_hour_index, + month_of_year, + month_of_year_index, + second_of_minute, + second_of_minute_index, + week_of_year, + week_of_year_index, +) +from gluonts.time_feature.holiday import ( + BLACK_FRIDAY, + CHRISTMAS_DAY, + CHRISTMAS_EVE, + CYBER_MONDAY, + EASTER_MONDAY, + EASTER_SUNDAY, + GOOD_FRIDAY, + INDEPENDENCE_DAY, + LABOR_DAY, + MEMORIAL_DAY, + NEW_YEARS_DAY, + NEW_YEARS_EVE, + THANKSGIVING, + SpecialDateFeatureSet, + exponential_kernel, + squared_exponential_kernel, +) +from gluonts.time_feature.seasonality import get_seasonality +from scipy.signal import find_peaks + +from src.data.constants import BASE_END_DATE, BASE_START_DATE +from src.data.frequency import ( + Frequency, + validate_frequency_safety, +) +from src.utils.utils import device + +# Configure logging +logging.basicConfig( + level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger(__name__) + + +# Enhanced feature sets for different frequencies +ENHANCED_TIME_FEATURES = { + # High-frequency features (seconds, minutes) + "high_freq": { + "normalized": [ + second_of_minute, + minute_of_hour, + hour_of_day, + day_of_week, + day_of_month, + ], + "index": [ + second_of_minute_index, + minute_of_hour_index, + hour_of_day_index, + day_of_week_index, + ], + }, + # Medium-frequency features (hourly, daily) + "medium_freq": { + "normalized": [ + hour_of_day, + day_of_week, + day_of_month, + day_of_year, + month_of_year, + ], + "index": [ + hour_of_day_index, + day_of_week_index, + day_of_month_index, + week_of_year_index, + ], + }, + # Low-frequency features (weekly, monthly) + "low_freq": { + "normalized": [day_of_week, day_of_month, month_of_year, week_of_year], + "index": [day_of_week_index, month_of_year_index, week_of_year_index], + }, +} + +# Holiday features for different markets/regions +HOLIDAY_FEATURE_SETS = { + "us_business": [ + NEW_YEARS_DAY, + MEMORIAL_DAY, + INDEPENDENCE_DAY, + LABOR_DAY, + THANKSGIVING, + CHRISTMAS_EVE, + CHRISTMAS_DAY, + NEW_YEARS_EVE, + ], + "us_retail": [ + NEW_YEARS_DAY, + EASTER_SUNDAY, + MEMORIAL_DAY, + INDEPENDENCE_DAY, + LABOR_DAY, + THANKSGIVING, + BLACK_FRIDAY, + CYBER_MONDAY, + CHRISTMAS_EVE, + CHRISTMAS_DAY, + NEW_YEARS_EVE, + ], + "christian": [ + NEW_YEARS_DAY, + GOOD_FRIDAY, + EASTER_SUNDAY, + EASTER_MONDAY, + CHRISTMAS_EVE, + CHRISTMAS_DAY, + NEW_YEARS_EVE, + ], +} + + +class TimeFeatureGenerator: + """ + Enhanced time feature generator that leverages full GluonTS capabilities. + """ + + def __init__( + self, + use_enhanced_features: bool = True, + use_holiday_features: bool = True, + holiday_set: str = "us_business", + holiday_kernel: str = "exponential", + holiday_kernel_alpha: float = 1.0, + use_index_features: bool = True, + k_max: int = 15, + include_seasonality_info: bool = True, + use_auto_seasonality: bool = False, # New parameter + max_seasonal_periods: int = 3, # New parameter + ): + """ + Initialize enhanced time feature generator. + + Parameters + ---------- + use_enhanced_features : bool + Whether to use frequency-specific enhanced features + use_holiday_features : bool + Whether to include holiday features + holiday_set : str + Which holiday set to use ('us_business', 'us_retail', 'christian') + holiday_kernel : str + Holiday kernel type ('indicator', 'exponential', 'squared_exponential') + holiday_kernel_alpha : float + Kernel parameter for exponential kernels + use_index_features : bool + Whether to include index-based features alongside normalized ones + k_max : int + Maximum number of time features to pad to + include_seasonality_info : bool + Whether to include seasonality information as features + use_auto_seasonality : bool + Whether to use automatic FFT-based seasonality detection + max_seasonal_periods : int + Maximum number of seasonal periods to detect automatically + """ + self.use_enhanced_features = use_enhanced_features + self.use_holiday_features = use_holiday_features + self.holiday_set = holiday_set + self.use_index_features = use_index_features + self.k_max = k_max + self.include_seasonality_info = include_seasonality_info + self.use_auto_seasonality = use_auto_seasonality + self.max_seasonal_periods = max_seasonal_periods + + # Initialize holiday feature set + self.holiday_feature_set = None + if use_holiday_features and holiday_set in HOLIDAY_FEATURE_SETS: + kernel_func = self._get_holiday_kernel(holiday_kernel, holiday_kernel_alpha) + self.holiday_feature_set = SpecialDateFeatureSet( + HOLIDAY_FEATURE_SETS[holiday_set], kernel_func + ) + + def _get_holiday_kernel(self, kernel_type: str, alpha: float): + """Get holiday kernel function.""" + if kernel_type == "exponential": + return exponential_kernel(alpha) + elif kernel_type == "squared_exponential": + return squared_exponential_kernel(alpha) + else: + # Default indicator kernel + return lambda x: float(x == 0) + + def _get_feature_category(self, freq_str: str) -> str: + """Determine feature category based on frequency.""" + if freq_str in ["s", "1min", "5min", "10min", "15min"]: + return "high_freq" + elif freq_str in ["h", "D"]: + return "medium_freq" + else: + return "low_freq" + + def _compute_enhanced_features( + self, period_index: pd.PeriodIndex, freq_str: str + ) -> np.ndarray: + """Compute enhanced time features based on frequency.""" + if not self.use_enhanced_features: + return np.array([]).reshape(len(period_index), 0) + + category = self._get_feature_category(freq_str) + feature_config = ENHANCED_TIME_FEATURES[category] + + features = [] + + # Add normalized features + for feat_func in feature_config["normalized"]: + try: + feat_values = feat_func(period_index) + features.append(feat_values) + except Exception: + continue + + # Add index features if enabled + if self.use_index_features: + for feat_func in feature_config["index"]: + try: + feat_values = feat_func(period_index) + # Normalize index features to [0, 1] range + if feat_values.max() > 0: + feat_values = feat_values / feat_values.max() + features.append(feat_values) + except Exception: + continue + + if features: + return np.stack(features, axis=-1) + else: + return np.array([]).reshape(len(period_index), 0) + + def _compute_holiday_features(self, date_range: pd.DatetimeIndex) -> np.ndarray: + """Compute holiday features.""" + if not self.use_holiday_features or self.holiday_feature_set is None: + return np.array([]).reshape(len(date_range), 0) + + try: + holiday_features = self.holiday_feature_set(date_range) + return holiday_features.T # Transpose to get [time, features] shape + except Exception: + return np.array([]).reshape(len(date_range), 0) + + def _detect_auto_seasonality(self, time_series_values: np.ndarray) -> list: + """ + Detect seasonal periods automatically using FFT analysis. + + Parameters + ---------- + time_series_values : np.ndarray + Time series values for seasonality detection + + Returns + ------- + list + List of detected seasonal periods + """ + if not self.use_auto_seasonality or len(time_series_values) < 10: + return [] + + try: + # Remove NaN values + values = time_series_values[~np.isnan(time_series_values)] + if len(values) < 10: + return [] + + # Simple linear detrending + x = np.arange(len(values)) + coeffs = np.polyfit(x, values, 1) + trend = np.polyval(coeffs, x) + detrended = values - trend + + # Apply Hann window to reduce spectral leakage + window = np.hanning(len(detrended)) + windowed = detrended * window + + # Zero padding for better frequency resolution + padded_length = len(windowed) * 2 + padded_values = np.zeros(padded_length) + padded_values[: len(windowed)] = windowed + + # Compute FFT + fft_values = fft.rfft(padded_values) + fft_magnitudes = np.abs(fft_values) + freqs = np.fft.rfftfreq(padded_length) + + # Exclude DC component + fft_magnitudes[0] = 0.0 + + # Find peaks with threshold (5% of max magnitude) + threshold = 0.05 * np.max(fft_magnitudes) + peak_indices, _ = find_peaks(fft_magnitudes, height=threshold) + + if len(peak_indices) == 0: + return [] + + # Sort by magnitude and take top periods + sorted_indices = peak_indices[ + np.argsort(fft_magnitudes[peak_indices])[::-1] + ] + top_indices = sorted_indices[: self.max_seasonal_periods] + + # Convert frequencies to periods + periods = [] + for idx in top_indices: + if freqs[idx] > 0: + period = 1.0 / freqs[idx] + # Scale back to original length and round + period = round(period / 2) # Account for zero padding + if 2 <= period <= len(values) // 2: # Reasonable period range + periods.append(period) + + return list(set(periods)) # Remove duplicates + + except Exception: + return [] + + def _compute_seasonality_features( + self, + period_index: pd.PeriodIndex, + freq_str: str, + time_series_values: np.ndarray = None, + ) -> np.ndarray: + """Compute seasonality-aware features.""" + if not self.include_seasonality_info: + return np.array([]).reshape(len(period_index), 0) + + all_seasonal_features = [] + + # Original frequency-based seasonality + try: + seasonality = get_seasonality(freq_str) + if seasonality > 1: + positions = np.arange(len(period_index)) + sin_feat = np.sin(2 * np.pi * positions / seasonality) + cos_feat = np.cos(2 * np.pi * positions / seasonality) + all_seasonal_features.extend([sin_feat, cos_feat]) + except Exception: + pass + + # Automatic seasonality detection + if self.use_auto_seasonality and time_series_values is not None: + auto_periods = self._detect_auto_seasonality(time_series_values) + for period in auto_periods: + try: + positions = np.arange(len(period_index)) + sin_feat = np.sin(2 * np.pi * positions / period) + cos_feat = np.cos(2 * np.pi * positions / period) + all_seasonal_features.extend([sin_feat, cos_feat]) + except Exception: + continue + + if all_seasonal_features: + return np.stack(all_seasonal_features, axis=-1) + else: + return np.array([]).reshape(len(period_index), 0) + + def compute_features( + self, + period_index: pd.PeriodIndex, + date_range: pd.DatetimeIndex, + freq_str: str, + time_series_values: np.ndarray = None, + ) -> np.ndarray: + """ + Compute all time features for given period index. + + Parameters + ---------- + period_index : pd.PeriodIndex + Period index for computing features + date_range : pd.DatetimeIndex + Corresponding datetime index for holiday features + freq_str : str + Frequency string + time_series_values : np.ndarray, optional + Time series values for automatic seasonality detection + + Returns + ------- + np.ndarray + Time features array of shape [time_steps, num_features] + """ + all_features = [] + + # Standard GluonTS features + try: + standard_features = time_features_from_frequency_str(freq_str) + if standard_features: + std_feat = np.stack( + [feat(period_index) for feat in standard_features], axis=-1 + ) + all_features.append(std_feat) + except Exception: + pass + + # Enhanced features + enhanced_feat = self._compute_enhanced_features(period_index, freq_str) + if enhanced_feat.shape[1] > 0: + all_features.append(enhanced_feat) + + # Holiday features + holiday_feat = self._compute_holiday_features(date_range) + if holiday_feat.shape[1] > 0: + all_features.append(holiday_feat) + + # Seasonality features (including auto-detected) + seasonality_feat = self._compute_seasonality_features( + period_index, freq_str, time_series_values + ) + if seasonality_feat.shape[1] > 0: + all_features.append(seasonality_feat) + + if all_features: + combined_features = np.concatenate(all_features, axis=-1) + else: + combined_features = np.zeros((len(period_index), 1)) + + return combined_features + + +def compute_batch_time_features( + start: List[np.datetime64], + history_length: int, + future_length: int, + batch_size: int, + frequency: List[Frequency], + K_max: int = 6, + time_feature_config: Optional[Dict[str, Any]] = None, +): + """ + Compute time features from start timestamps and frequency. + + Parameters + ---------- + start : array-like, shape (batch_size,) + Start timestamps for each batch item. + history_length : int + Length of history sequence. + future_length : int + Length of target sequence. + batch_size : int + Batch size. + frequency : array-like, shape (batch_size,) + Frequency of the time series. + K_max : int, optional + Maximum number of time features to pad to (default: 6). + time_feature_config : dict, optional + Configuration for enhanced time features. + + Returns + ------- + tuple + (history_time_features, target_time_features) where each is a torch.Tensor + of shape (batch_size, length, K_max). + """ + # Initialize enhanced feature generator + feature_config = time_feature_config or {} + feature_generator = TimeFeatureGenerator(**feature_config) + + # Generate timestamps and features + history_features_list = [] + future_features_list = [] + total_length = history_length + future_length + for i in range(batch_size): + frequency_i = frequency[i] + freq_str = frequency_i.to_pandas_freq(for_date_range=True) + period_freq_str = frequency_i.to_pandas_freq(for_date_range=False) + + # Validate start timestamp is within safe bounds + start_ts = pd.Timestamp(start[i]) + if not validate_frequency_safety(start_ts, total_length, frequency_i): + logger.debug( + f"Start date {start_ts} not safe for total_length={total_length}, frequency={frequency_i}. " + f"Using BASE_START_DATE instead." + ) + start_ts = BASE_START_DATE + + # Create history range with bounds checking + history_range = pd.date_range( + start=start_ts, periods=history_length, freq=freq_str + ) + + # Check if history range goes beyond safe bounds + if history_range[-1] > BASE_END_DATE: + safe_start = BASE_END_DATE - pd.tseries.frequencies.to_offset(freq_str) * ( + history_length + future_length + ) + if safe_start < BASE_START_DATE: + safe_start = BASE_START_DATE + history_range = pd.date_range( + start=safe_start, periods=history_length, freq=freq_str + ) + + future_start = history_range[-1] + pd.tseries.frequencies.to_offset(freq_str) + future_range = pd.date_range( + start=future_start, periods=future_length, freq=freq_str + ) + + # Convert to period indices + history_period_idx = history_range.to_period(period_freq_str) + future_period_idx = future_range.to_period(period_freq_str) + + # Compute enhanced features + history_features = feature_generator.compute_features( + history_period_idx, history_range, freq_str + ) + future_features = feature_generator.compute_features( + future_period_idx, future_range, freq_str + ) + + # Pad or truncate to K_max + history_features = _pad_or_truncate_features(history_features, K_max) + future_features = _pad_or_truncate_features(future_features, K_max) + + history_features_list.append(history_features) + future_features_list.append(future_features) + + # Stack into batch tensors + history_time_features = np.stack(history_features_list, axis=0) + future_time_features = np.stack(future_features_list, axis=0) + + return ( + torch.from_numpy(history_time_features).float().to(device), + torch.from_numpy(future_time_features).float().to(device), + ) + + +def _pad_or_truncate_features(features: np.ndarray, K_max: int) -> np.ndarray: + """Pad with zeros or truncate features to K_max dimensions.""" + seq_len, num_features = features.shape + + if num_features < K_max: + # Pad with zeros + padding = np.zeros((seq_len, K_max - num_features)) + features = np.concatenate([features, padding], axis=-1) + elif num_features > K_max: + # Truncate to K_max (keep most important features first) + features = features[:, :K_max] + + return features diff --git a/src/data/utils.py b/src/data/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..eddcd9a35795d40a1577c658d1c35d1411380888 --- /dev/null +++ b/src/data/utils.py @@ -0,0 +1,75 @@ +import random +from typing import Optional, Tuple, Union + + +def sample_future_length( + range: Union[Tuple[int, int], str] = "gift_eval", + total_length: Optional[int] = None, +) -> int: + """ + Sample a forecast length. + + - If `range` is a tuple, uniformly sample in [min, max]. When `total_length` is + provided, enforce a cap so the result is at most floor(0.45 * total_length). + - If `range` is "gift_eval", sample from a pre-defined weighted set. When + `total_length` is provided, filter out candidates greater than + floor(0.45 * total_length) before sampling. + """ + # Compute the cap when total_length is provided + cap: Optional[int] = None + if total_length is not None: + cap = max(1, int(0.45 * int(total_length))) + + if isinstance(range, tuple): + min_len, max_len = range + if cap is not None: + effective_max_len = min(max_len, cap) + # Ensure valid bounds + if min_len > effective_max_len: + return effective_max_len + return random.randint(min_len, effective_max_len) + return random.randint(min_len, max_len) + elif range == "gift_eval": + # Gift eval forecast lengths with their frequencies + GIFT_EVAL_FORECAST_LENGTHS = { + 48: 5, + 720: 38, + 480: 38, + 30: 3, + 300: 16, + 8: 2, + 120: 3, + 450: 8, + 80: 8, + 12: 2, + 900: 10, + 180: 3, + 600: 10, + 60: 3, + 210: 3, + 195: 3, + 140: 3, + 130: 3, + 14: 1, + 18: 1, + 13: 1, + 6: 1, + } + + lengths = list(GIFT_EVAL_FORECAST_LENGTHS.keys()) + weights = list(GIFT_EVAL_FORECAST_LENGTHS.values()) + + if cap is not None: + filtered = [ + (length_candidate, weight) + for length_candidate, weight in zip(lengths, weights) + if length_candidate <= cap + ] + if filtered: + lengths, weights = zip(*filtered) + lengths = list(lengths) + weights = list(weights) + + return random.choices(lengths, weights=weights)[0] + else: + raise ValueError(f"Invalid range: {range}") diff --git a/src/gift_eval/__init__.py b/src/gift_eval/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a3571fe5ad1000abbb701c1e1e49a3f95420dff2 --- /dev/null +++ b/src/gift_eval/__init__.py @@ -0,0 +1,15 @@ +"""Public API for the GIFT-Eval utilities.""" + +from .core import DatasetMetadata, EvaluationItem, expand_datasets_arg +from .predictor import TimeSeriesPredictor +from .results import aggregate_results, get_all_datasets_full_name, write_results_to_disk + +__all__ = [ + "DatasetMetadata", + "EvaluationItem", + "TimeSeriesPredictor", + "aggregate_results", + "expand_datasets_arg", + "get_all_datasets_full_name", + "write_results_to_disk", +] diff --git a/src/gift_eval/constants.py b/src/gift_eval/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..4996299c8804641ae8020470019bb953ef6c5a11 --- /dev/null +++ b/src/gift_eval/constants.py @@ -0,0 +1,186 @@ +import json +import logging +import os +from pathlib import Path + +from gluonts.ev.metrics import ( + MAE, + MAPE, + MASE, + MSE, + MSIS, + ND, + NRMSE, + RMSE, + SMAPE, + MeanWeightedSumQuantileLoss, +) + + +logger = logging.getLogger(__name__) + + +# Environment setup +os.environ["CUBLAS_WORKSPACE_CONFIG"] = ":4096:8" + + +# Use absolute path relative to the project root +_MODULE_DIR = Path(__file__).parent.parent.parent # Goes to project root +DATASET_PROPERTIES_PATH = _MODULE_DIR / "data" / "dataset_properties.json" + + +try: + with open(DATASET_PROPERTIES_PATH, "r") as f: + DATASET_PROPERTIES = json.load(f) +except Exception as exc: # pragma: no cover - logging path + DATASET_PROPERTIES = {} + logger.warning( + "Could not load dataset properties from %s: %s. Domain and num_variates will fall back to defaults.", + DATASET_PROPERTIES_PATH, + exc, + ) + + +# Datasets +SHORT_DATASETS = ( + "m4_yearly", + "m4_quarterly", + "m4_monthly", + "m4_weekly", + "m4_daily", + "m4_hourly", + "electricity/15T", + "electricity/H", + "electricity/D", + "electricity/W", + "solar/10T", + "solar/H", + "solar/D", + "solar/W", + "hospital", + "covid_deaths", + "us_births/D", + "us_births/M", + "us_births/W", + "saugeenday/D", + "saugeenday/M", + "saugeenday/W", + "temperature_rain_with_missing", + "kdd_cup_2018_with_missing/H", + "kdd_cup_2018_with_missing/D", + "car_parts_with_missing", + "restaurant", + "hierarchical_sales/D", + "hierarchical_sales/W", + "LOOP_SEATTLE/5T", + "LOOP_SEATTLE/H", + "LOOP_SEATTLE/D", + "SZ_TAXI/15T", + "SZ_TAXI/H", + "M_DENSE/H", + "M_DENSE/D", + "ett1/15T", + "ett1/H", + "ett1/D", + "ett1/W", + "ett2/15T", + "ett2/H", + "ett2/D", + "ett2/W", + "jena_weather/10T", + "jena_weather/H", + "jena_weather/D", + "bitbrains_fast_storage/5T", + "bitbrains_fast_storage/H", + "bitbrains_rnd/5T", + "bitbrains_rnd/H", + "bizitobs_application", + "bizitobs_service", + "bizitobs_l2c/5T", + "bizitobs_l2c/H", +) + +MED_LONG_DATASETS = ( + "electricity/15T", + "electricity/H", + "solar/10T", + "solar/H", + "kdd_cup_2018_with_missing/H", + "LOOP_SEATTLE/5T", + "LOOP_SEATTLE/H", + "SZ_TAXI/15T", + "M_DENSE/H", + "ett1/15T", + "ett1/H", + "ett2/15T", + "ett2/H", + "jena_weather/10T", + "jena_weather/H", + "bitbrains_fast_storage/5T", + "bitbrains_rnd/5T", + "bizitobs_application", + "bizitobs_service", + "bizitobs_l2c/5T", + "bizitobs_l2c/H", +) + +# Preserve insertion order from SHORT_DATASETS followed by MED_LONG_DATASETS +ALL_DATASETS = list(dict.fromkeys(SHORT_DATASETS + MED_LONG_DATASETS)) + + +# Evaluation terms +TERMS = ("short", "medium", "long") + + +# Pretty names mapping (following GIFT eval standard) +PRETTY_NAMES = { + "saugeenday": "saugeen", + "temperature_rain_with_missing": "temperature_rain", + "kdd_cup_2018_with_missing": "kdd_cup_2018", + "car_parts_with_missing": "car_parts", +} + + +METRICS = ( + MSE(forecast_type="mean"), + MSE(forecast_type=0.5), + MAE(), + MASE(), + MAPE(), + SMAPE(), + MSIS(), + RMSE(), + NRMSE(), + ND(), + MeanWeightedSumQuantileLoss( + quantile_levels=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9] + ), +) + + +STANDARD_METRIC_NAMES = ( + "MSE[mean]", + "MSE[0.5]", + "MAE[0.5]", + "MASE[0.5]", + "MAPE[0.5]", + "sMAPE[0.5]", + "MSIS", + "RMSE[mean]", + "NRMSE[mean]", + "ND[0.5]", + "mean_weighted_sum_quantile_loss", +) + + +__all__ = [ + "ALL_DATASETS", + "DATASET_PROPERTIES", + "DATASET_PROPERTIES_PATH", + "MED_LONG_DATASETS", + "METRICS", + "PRETTY_NAMES", + "SHORT_DATASETS", + "STANDARD_METRIC_NAMES", + "TERMS", +] diff --git a/src/gift_eval/core.py b/src/gift_eval/core.py new file mode 100644 index 0000000000000000000000000000000000000000..20372cfcfb753d072b29015c11e52ea6be3e7d05 --- /dev/null +++ b/src/gift_eval/core.py @@ -0,0 +1,64 @@ +"""Core data structures and helpers shared across GIFT-Eval modules.""" + +from dataclasses import dataclass +from typing import Dict, List, Optional, Tuple, Union + +from src.gift_eval.constants import ALL_DATASETS + + +@dataclass +class DatasetMetadata: + """Structured description of a dataset/term combination.""" + + full_name: str + key: str + freq: str + term: str + season_length: int + target_dim: int + to_univariate: bool + prediction_length: int + windows: int + + +@dataclass +class EvaluationItem: + """Container for evaluation results and optional figures.""" + + dataset_metadata: DatasetMetadata + metrics: Dict + figures: List[Tuple[object, str]] + + +DatasetSelection = Union[List[str], Tuple[str, ...], str] + + +def expand_datasets_arg(datasets: DatasetSelection) -> List[str]: + """Normalize dataset selection strings to explicit lists.""" + + if isinstance(datasets, str): + dataset_list = [datasets] + else: + dataset_list = list(datasets) + + if not dataset_list: + return [] + + if dataset_list[0] == "all": + return list(ALL_DATASETS) + + for dataset in dataset_list: + if dataset not in ALL_DATASETS: + raise ValueError(f"Invalid dataset: {dataset}. Use one of {ALL_DATASETS}") + + return dataset_list + + +__all__ = [ + "DatasetMetadata", + "EvaluationItem", + "DatasetSelection", + "expand_datasets_arg", +] + + diff --git a/src/gift_eval/data.py b/src/gift_eval/data.py new file mode 100644 index 0000000000000000000000000000000000000000..7906509654ac8c4a5d8d52ec64455e7e02d3ce87 --- /dev/null +++ b/src/gift_eval/data.py @@ -0,0 +1,234 @@ +# Copyright (c) 2023, Salesforce, Inc. +# SPDX-License-Identifier: Apache-2 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +from collections.abc import Iterable, Iterator +from enum import Enum +from functools import cached_property +from pathlib import Path +from typing import Optional + +import datasets +import pyarrow.compute as pc +from gluonts.dataset import DataEntry +from gluonts.dataset.common import ProcessDataEntry +from gluonts.dataset.split import TestData, TrainingDataset, split +from gluonts.itertools import Map +from gluonts.time_feature import norm_freq_str +from gluonts.transform import Transformation +from pandas.tseries.frequencies import to_offset +from toolz import compose + +TEST_SPLIT = 0.1 +MAX_WINDOW = 20 + +M4_PRED_LENGTH_MAP = { + "A": 6, + "Q": 8, + "M": 18, + "W": 13, + "D": 14, + "H": 48, + "h": 48, + "Y": 6, +} + +PRED_LENGTH_MAP = { + "M": 12, + "W": 8, + "D": 30, + "H": 48, + "h": 48, + "T": 48, + "S": 60, + "s": 60, + "min": 48, +} + +TFB_PRED_LENGTH_MAP = { + "A": 6, + "Y": 6, + "H": 48, + "h": 48, + "Q": 8, + "D": 14, + "M": 18, + "W": 13, + "U": 8, + "T": 8, + "min": 8, + "us": 8, +} + + +class Term(Enum): + SHORT = "short" + MEDIUM = "medium" + LONG = "long" + + @property + def multiplier(self) -> int: + if self == Term.SHORT: + return 1 + elif self == Term.MEDIUM: + return 10 + elif self == Term.LONG: + return 15 + + +def itemize_start(data_entry: DataEntry) -> DataEntry: + data_entry["start"] = data_entry["start"].item() + return data_entry + + +class MultivariateToUnivariate(Transformation): + def __init__(self, field): + self.field = field + + def __call__( + self, data_it: Iterable[DataEntry], is_train: bool = False + ) -> Iterator: + for data_entry in data_it: + item_id = data_entry["item_id"] + val_ls = list(data_entry[self.field]) + for id, val in enumerate(val_ls): + univariate_entry = data_entry.copy() + univariate_entry[self.field] = val + univariate_entry["item_id"] = item_id + "_dim" + str(id) + yield univariate_entry + + +class Dataset: + def __init__( + self, + name: str, + term: Term | str = Term.SHORT, + to_univariate: bool = False, + storage_path: str = None, + max_windows: Optional[int] = None, + ): + storage_path = Path(storage_path) + self.hf_dataset = datasets.load_from_disk(str(storage_path / name)).with_format( + "numpy" + ) + process = ProcessDataEntry( + self.freq, + one_dim_target=self.target_dim == 1, + ) + + self.gluonts_dataset = Map(compose(process, itemize_start), self.hf_dataset) + if to_univariate: + self.gluonts_dataset = MultivariateToUnivariate("target").apply( + self.gluonts_dataset + ) + + self.term = Term(term) + self.name = name + self.max_windows = max_windows if max_windows is not None else MAX_WINDOW + + @cached_property + def prediction_length(self) -> int: + freq = norm_freq_str(to_offset(self.freq).name) + if freq.endswith("E"): + freq = freq[:-1] + pred_len = ( + M4_PRED_LENGTH_MAP[freq] if "m4" in self.name else PRED_LENGTH_MAP[freq] + ) + return self.term.multiplier * pred_len + + @cached_property + def freq(self) -> str: + return self.hf_dataset[0]["freq"] + + @cached_property + def target_dim(self) -> int: + return ( + target.shape[0] + if len((target := self.hf_dataset[0]["target"]).shape) > 1 + else 1 + ) + + @cached_property + def past_feat_dynamic_real_dim(self) -> int: + if "past_feat_dynamic_real" not in self.hf_dataset[0]: + return 0 + elif ( + len( + ( + past_feat_dynamic_real := self.hf_dataset[0][ + "past_feat_dynamic_real" + ] + ).shape + ) + > 1 + ): + return past_feat_dynamic_real.shape[0] + else: + return 1 + + @cached_property + def windows(self) -> int: + if "m4" in self.name: + return 1 + w = math.ceil(TEST_SPLIT * self._min_series_length / self.prediction_length) + return min(max(1, w), self.max_windows) + + @cached_property + def _min_series_length(self) -> int: + if self.hf_dataset[0]["target"].ndim > 1: + lengths = pc.list_value_length( + pc.list_flatten( + pc.list_slice(self.hf_dataset.data.column("target"), 0, 1) + ) + ) + else: + lengths = pc.list_value_length(self.hf_dataset.data.column("target")) + return min(lengths.to_numpy()) + + @cached_property + def sum_series_length(self) -> int: + if self.hf_dataset[0]["target"].ndim > 1: + lengths = pc.list_value_length( + pc.list_flatten(self.hf_dataset.data.column("target")) + ) + else: + lengths = pc.list_value_length(self.hf_dataset.data.column("target")) + return sum(lengths.to_numpy()) + + @property + def training_dataset(self) -> TrainingDataset: + training_dataset, _ = split( + self.gluonts_dataset, offset=-self.prediction_length * (self.windows + 1) + ) + return training_dataset + + @property + def validation_dataset(self) -> TrainingDataset: + validation_dataset, _ = split( + self.gluonts_dataset, offset=-self.prediction_length * self.windows + ) + return validation_dataset + + @property + def test_data(self) -> TestData: + _, test_template = split( + self.gluonts_dataset, offset=-self.prediction_length * self.windows + ) + test_data = test_template.generate_instances( + prediction_length=self.prediction_length, + windows=self.windows, + distance=self.prediction_length, + ) + return test_data diff --git a/src/gift_eval/evaluate.py b/src/gift_eval/evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..8d55a42825e33ab509bb2bcb7d241d1ae8756fae --- /dev/null +++ b/src/gift_eval/evaluate.py @@ -0,0 +1,421 @@ +import argparse +import logging +import warnings +from pathlib import Path +from typing import List, Optional, Tuple + +import matplotlib +from gluonts.model.evaluation import evaluate_model +from gluonts.time_feature import get_seasonality +from linear_operator.utils.cholesky import NumericalWarning + +from src.gift_eval.constants import ( + DATASET_PROPERTIES, + MED_LONG_DATASETS, + METRICS, + PRETTY_NAMES, +) +from src.gift_eval.core import DatasetMetadata, EvaluationItem, expand_datasets_arg +from src.gift_eval.data import Dataset +from src.gift_eval.predictor import TimeSeriesPredictor +from src.gift_eval.results import write_results_to_disk +from src.plotting.gift_eval_utils import create_plots_for_dataset + +logger = logging.getLogger(__name__) + +# Warnings configuration +warnings.filterwarnings("ignore", category=NumericalWarning) +warnings.filterwarnings("ignore", category=FutureWarning) +warnings.filterwarnings("ignore", category=DeprecationWarning) +matplotlib.set_loglevel("WARNING") +logging.getLogger("matplotlib").setLevel(logging.WARNING) +logging.getLogger("matplotlib.font_manager").setLevel(logging.WARNING) +logging.getLogger("PIL").setLevel(logging.WARNING) + + +class WarningFilter(logging.Filter): + def __init__(self, text_to_filter: str) -> None: + super().__init__() + self.text_to_filter = text_to_filter + + def filter(self, record: logging.LogRecord) -> bool: + return self.text_to_filter not in record.getMessage() + + +# Filter out gluonts warnings about mean predictions +gts_logger = logging.getLogger("gluonts.model.forecast") +gts_logger.addFilter( + WarningFilter("The mean prediction is not stored in the forecast data") +) + + +def construct_evaluation_data( + dataset_name: str, + dataset_storage_path: str, + terms: List[str] = ["short", "medium", "long"], + max_windows: Optional[int] = None, +) -> List[Tuple[Dataset, DatasetMetadata]]: + """Build datasets and rich metadata per term for a dataset name.""" + sub_datasets: List[Tuple[Dataset, DatasetMetadata]] = [] + + if "/" in dataset_name: + ds_key, ds_freq = dataset_name.split("/") + ds_key = ds_key.lower() + ds_key = PRETTY_NAMES.get(ds_key, ds_key) + else: + ds_key = dataset_name.lower() + ds_key = PRETTY_NAMES.get(ds_key, ds_key) + ds_freq = DATASET_PROPERTIES.get(ds_key, {}).get("frequency") + + for term in terms: + # Skip medium/long terms for datasets that don't support them + if ( + term == "medium" or term == "long" + ) and dataset_name not in MED_LONG_DATASETS: + continue + + # Probe once to determine dimensionality + probe_dataset = Dataset( + name=dataset_name, + term=term, + to_univariate=False, + storage_path=dataset_storage_path, + max_windows=max_windows, + ) + + to_univariate = probe_dataset.target_dim > 1 + + dataset = Dataset( + name=dataset_name, + term=term, + to_univariate=to_univariate, + storage_path=dataset_storage_path, + max_windows=max_windows, + ) + + # Compute metadata + season_length = get_seasonality(dataset.freq) + actual_freq = ds_freq if ds_freq else dataset.freq + + metadata = DatasetMetadata( + full_name=f"{ds_key}/{actual_freq}/{term}", + key=ds_key, + freq=actual_freq, + term=term, + season_length=season_length, + target_dim=probe_dataset.target_dim, + to_univariate=to_univariate, + prediction_length=dataset.prediction_length, + windows=dataset.windows, + ) + + sub_datasets.append((dataset, metadata)) + + return sub_datasets + + +def evaluate_datasets( + predictor: TimeSeriesPredictor, + dataset: str, + dataset_storage_path: str, + terms: List[str] = ["short", "medium", "long"], + max_windows: Optional[int] = None, + batch_size: int = 48, + max_context_length: Optional[int] = 1024, + create_plots: bool = False, + max_plots_per_dataset: int = 10, +) -> List[EvaluationItem]: + """Evaluate predictor on one dataset across the requested terms.""" + sub_datasets = construct_evaluation_data( + dataset_name=dataset, + dataset_storage_path=dataset_storage_path, + terms=terms, + max_windows=max_windows, + ) + + results: List[EvaluationItem] = [] + for i, (sub_dataset, metadata) in enumerate(sub_datasets): + logger.info(f"Evaluating {i + 1}/{len(sub_datasets)}: {metadata.full_name}") + logger.info(f" Dataset size: {len(sub_dataset.test_data)}") + logger.info(f" Frequency: {sub_dataset.freq}") + logger.info(f" Term: {metadata.term}") + logger.info(f" Prediction length: {sub_dataset.prediction_length}") + logger.info(f" Target dimensions: {sub_dataset.target_dim}") + logger.info(f" Windows: {sub_dataset.windows}") + + # Update context on the reusable predictor + predictor.set_dataset_context( + prediction_length=sub_dataset.prediction_length, + freq=sub_dataset.freq, + batch_size=batch_size, + max_context_length=max_context_length, + ) + + res = evaluate_model( + model=predictor, + test_data=sub_dataset.test_data, + metrics=METRICS, + axis=None, + mask_invalid_label=True, + allow_nan_forecast=False, + seasonality=metadata.season_length, + ) + + figs: List[Tuple[object, str]] = [] + if create_plots: + forecasts = predictor.predict(sub_dataset.test_data.input) + figs = create_plots_for_dataset( + forecasts=forecasts, + test_data=sub_dataset.test_data, + dataset_metadata=metadata, + max_plots=max_plots_per_dataset, + max_context_length=max_context_length, + ) + + results.append( + EvaluationItem(dataset_metadata=metadata, metrics=res, figures=figs) + ) + + return results + + +def _run_evaluation( + predictor: TimeSeriesPredictor, + datasets: List[str] | str, + terms: List[str], + dataset_storage_path: str, + max_windows: Optional[int] = None, + batch_size: int = 48, + max_context_length: Optional[int] = 1024, + output_dir: str = "gift_eval_results", + model_name: str = "TimeSeriesModel", + create_plots: bool = False, + max_plots: int = 10, +) -> None: + """Shared evaluation workflow used by both entry points.""" + datasets_to_run = expand_datasets_arg(datasets) + results_root = Path(output_dir) + + for ds_name in datasets_to_run: + items = evaluate_datasets( + predictor=predictor, + dataset=ds_name, + dataset_storage_path=dataset_storage_path, + terms=terms, + max_windows=max_windows, + batch_size=batch_size, + max_context_length=max_context_length, + create_plots=create_plots, + max_plots_per_dataset=max_plots, + ) + write_results_to_disk( + items=items, + dataset_name=ds_name, + output_dir=results_root, + model_name=model_name, + create_plots=create_plots, + ) + + +def evaluate_from_paths( + model_path: str, + config_path: str, + datasets: List[str] | str, + terms: List[str], + dataset_storage_path: str, + max_windows: Optional[int] = None, + batch_size: int = 48, + max_context_length: Optional[int] = 1024, + output_dir: str = "gift_eval_results", + model_name: str = "TimeSeriesModel", + create_plots: bool = False, + max_plots: int = 10, +) -> None: + """Entry point: load model from disk and save metrics/plots to disk.""" + # Validate inputs early + if not Path(model_path).exists(): + raise FileNotFoundError(f"Model path does not exist: {model_path}") + if not Path(config_path).exists(): + raise FileNotFoundError(f"Config path does not exist: {config_path}") + + predictor = TimeSeriesPredictor.from_paths( + model_path=model_path, + config_path=config_path, + ds_prediction_length=1, # placeholder; set per dataset below + ds_freq="D", # placeholder; set per dataset below + batch_size=batch_size, + max_context_length=max_context_length, + ) + + _run_evaluation( + predictor=predictor, + datasets=datasets, + terms=terms, + dataset_storage_path=dataset_storage_path, + max_windows=max_windows, + batch_size=batch_size, + max_context_length=max_context_length, + output_dir=output_dir, + model_name=model_name, + create_plots=create_plots, + max_plots=max_plots, + ) + + +def evaluate_in_memory( + model, + config: dict, + datasets: List[str] | str, + terms: List[str], + dataset_storage_path: str, + max_windows: Optional[int] = None, + batch_size: int = 48, + max_context_length: Optional[int] = 1024, + output_dir: str = "gift_eval_results", + model_name: str = "TimeSeriesModel", + create_plots: bool = False, + max_plots: int = 10, +) -> None: + """Entry point: evaluate in-memory model and return results per dataset.""" + predictor = TimeSeriesPredictor.from_model( + model=model, + config=config, + ds_prediction_length=1, # placeholder; set per dataset below + ds_freq="D", # placeholder; set per dataset below + batch_size=batch_size, + max_context_length=max_context_length, + ) + + _run_evaluation( + predictor=predictor, + datasets=datasets, + terms=terms, + dataset_storage_path=dataset_storage_path, + max_windows=max_windows, + batch_size=batch_size, + max_context_length=max_context_length, + output_dir=output_dir, + model_name=model_name, + create_plots=create_plots, + max_plots=max_plots, + ) + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Evaluate TimeSeriesModel on GIFT-Eval datasets" + ) + + # Model configuration + parser.add_argument( + "--model_path", + type=str, + required=True, + help="Path to the trained model checkpoint", + ) + parser.add_argument( + "--config_path", + type=str, + required=True, + help="Path to the model configuration YAML file", + ) + parser.add_argument( + "--model_name", + type=str, + default="TimeSeriesModel", + help="Name identifier for the model", + ) + + # Dataset configuration + parser.add_argument( + "--datasets", + type=str, + default="all", + help="Comma-separated list of dataset names to evaluate (or 'all')", + ) + parser.add_argument( + "--dataset_storage_path", + type=str, + default="/work/dlclarge2/moroshav-GiftEvalPretrain/gift_eval", + help="Path to the dataset storage directory (default: GIFT_EVAL)", + ) + parser.add_argument( + "--terms", + type=str, + default="short,medium,long", + help="Comma-separated list of prediction terms to evaluate", + ) + parser.add_argument( + "--max_windows", + type=int, + default=None, + help="Maximum number of windows to use for evaluation", + ) + + # Inference configuration + parser.add_argument( + "--batch_size", type=int, default=48, help="Batch size for model inference" + ) + parser.add_argument( + "--max_context_length", + type=int, + default=1024, + help="Maximum context length to use (None for no limit)", + ) + + # Output configuration + parser.add_argument( + "--output_dir", + type=str, + default="gift_eval_results", + help="Directory to save evaluation results", + ) + + # Plotting configuration + parser.add_argument( + "--create_plots", + action="store_true", + help="Create and save plots for each evaluation window", + ) + parser.add_argument( + "--max_plots_per_dataset", + type=int, + default=10, + help="Maximum number of plots to create per dataset term", + ) + + args = parser.parse_args() + args.terms = args.terms.split(",") + args.datasets = args.datasets.split(",") + return args + + +def _configure_logging() -> None: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + +if __name__ == "__main__": + _configure_logging() + args = _parse_args() + logger.info(f"Command Line Arguments: {vars(args)}") + try: + evaluate_from_paths( + model_path=args.model_path, + config_path=args.config_path, + datasets=args.datasets, + terms=args.terms, + dataset_storage_path=args.dataset_storage_path, + max_windows=args.max_windows, + batch_size=args.batch_size, + max_context_length=args.max_context_length, + output_dir=args.output_dir, + model_name=args.model_name, + create_plots=args.create_plots, + max_plots=args.max_plots_per_dataset, + ) + except Exception as e: + logger.error(f"Evaluation failed: {str(e)}") + raise diff --git a/src/gift_eval/predictor.py b/src/gift_eval/predictor.py new file mode 100644 index 0000000000000000000000000000000000000000..11a2223842373fbdb294c7608f088a6ad86fa2c4 --- /dev/null +++ b/src/gift_eval/predictor.py @@ -0,0 +1,318 @@ +"""Predictor implementation wrapping the TimeSeriesModel for GIFT-Eval.""" + +import logging +from typing import Iterator, List, Optional + +import numpy as np +import torch +import yaml +from gluonts.model.forecast import QuantileForecast +from gluonts.model.predictor import Predictor +from torch.nn.parallel import DistributedDataParallel as DDP + +from src.data.containers import BatchTimeSeriesContainer +from src.data.frequency import parse_frequency +from src.data.scalers import RobustScaler +from src.models.model import TimeSeriesModel +from src.utils.utils import device + + +logger = logging.getLogger(__name__) + + +class TimeSeriesPredictor(Predictor): + """Unified predictor for TimeSeriesModel supporting flexible construction.""" + + def __init__( + self, + model: TimeSeriesModel, + config: dict, + ds_prediction_length: int, + ds_freq: str, + batch_size: int = 32, + max_context_length: Optional[int] = None, + debug: bool = False, + ) -> None: + # Dataset-specific context (can be updated per dataset/term) + self.ds_prediction_length = ds_prediction_length + self.ds_freq = ds_freq + self.batch_size = batch_size + self.max_context_length = max_context_length + self.debug = debug + + # Persistent model/config (unwrap DDP if needed) + self.model = model.module if isinstance(model, DDP) else model + self.model.eval() + self.config = config + + # Initialize scaler (using same type as model) + scaler_type = self.config.get("TimeSeriesModel", {}).get( + "scaler", "custom_robust" + ) + epsilon = self.config.get("TimeSeriesModel", {}).get("epsilon", 1e-3) + if scaler_type == "custom_robust": + self.scaler = RobustScaler(epsilon=epsilon) + else: + raise ValueError(f"Unsupported scaler type: {scaler_type}") + + def set_dataset_context( + self, + prediction_length: Optional[int] = None, + freq: Optional[str] = None, + batch_size: Optional[int] = None, + max_context_length: Optional[int] = None, + ) -> None: + """Update lightweight dataset-specific attributes without reloading the model.""" + + if prediction_length is not None: + self.ds_prediction_length = prediction_length + if freq is not None: + self.ds_freq = freq + if batch_size is not None: + self.batch_size = batch_size + if max_context_length is not None: + self.max_context_length = max_context_length + + @classmethod + def from_model( + cls, + model: TimeSeriesModel, + config: dict, + ds_prediction_length: int, + ds_freq: str, + batch_size: int = 32, + max_context_length: Optional[int] = None, + debug: bool = False, + ) -> "TimeSeriesPredictor": + return cls( + model=model, + config=config, + ds_prediction_length=ds_prediction_length, + ds_freq=ds_freq, + batch_size=batch_size, + max_context_length=max_context_length, + debug=debug, + ) + + @classmethod + def from_paths( + cls, + model_path: str, + config_path: str, + ds_prediction_length: int, + ds_freq: str, + batch_size: int = 32, + max_context_length: Optional[int] = None, + debug: bool = False, + ) -> "TimeSeriesPredictor": + with open(config_path, "r") as f: + config = yaml.safe_load(f) + model = cls._load_model_from_path(config=config, model_path=model_path) + return cls( + model=model, + config=config, + ds_prediction_length=ds_prediction_length, + ds_freq=ds_freq, + batch_size=batch_size, + max_context_length=max_context_length, + debug=debug, + ) + + @staticmethod + def _load_model_from_path(config: dict, model_path: str) -> TimeSeriesModel: + try: + model = TimeSeriesModel(**config["TimeSeriesModel"]).to(device) + checkpoint = torch.load(model_path, map_location=device) + model.load_state_dict(checkpoint["model_state_dict"]) + model.eval() + logger.info(f"Successfully loaded model from {model_path}") + return model + except Exception as exc: # pragma: no cover - logging path + logger.error(f"Failed to load model from {model_path}: {exc}") + raise + + def predict(self, test_data_input) -> Iterator[QuantileForecast]: + """Generate forecasts for the test data.""" + + if hasattr(test_data_input, "__iter__") and not isinstance(test_data_input, list): + test_data_input = list(test_data_input) + logger.debug(f"Processing {len(test_data_input)} time series") + + # Group series by their effective length (after optional truncation), + # then process each uniform-length group in sub-batches up to batch_size. + def _effective_length(entry) -> int: + target = entry["target"] + if target.ndim == 1: + seq_len = len(target) + else: + # target shape is [num_channels, seq_len] + seq_len = target.shape[1] + if self.max_context_length is not None: + seq_len = min(seq_len, self.max_context_length) + return seq_len + + length_to_items: dict[int, List[tuple[int, object]]] = {} + for idx, entry in enumerate(test_data_input): + seq_len = _effective_length(entry) + length_to_items.setdefault(seq_len, []).append((idx, entry)) + + total = len(test_data_input) + ordered_results: List[Optional[QuantileForecast]] = [None] * total + + for _, items in length_to_items.items(): + for i in range(0, len(items), self.batch_size): + chunk = items[i : i + self.batch_size] + entries = [entry for (_orig_idx, entry) in chunk] + batch_forecasts = self._predict_batch(entries) + for forecast_idx, (orig_idx, _entry) in enumerate(chunk): + ordered_results[orig_idx] = batch_forecasts[forecast_idx] + + return ordered_results # type: ignore[return-value] + + def _predict_batch(self, test_data_batch: List) -> List[QuantileForecast]: + """Generate predictions for a batch of time series.""" + + logger.debug(f"Processing batch of size: {len(test_data_batch)}") + + try: + batch_container = self._convert_to_batch_container(test_data_batch) + + if isinstance(device, torch.device): + device_type = device.type + else: + device_type = "cuda" if "cuda" in str(device).lower() else "cpu" + enable_autocast = device_type == "cuda" + + with torch.autocast( + device_type=device_type, + dtype=torch.bfloat16, + enabled=enable_autocast, + ): + with torch.no_grad(): + model_output = self.model(batch_container, drop_enc_allow=False) + + forecasts = self._convert_to_forecasts( + model_output, test_data_batch, batch_container + ) + + logger.debug(f"Generated {len(forecasts)} forecasts") + return forecasts + except Exception as exc: # pragma: no cover - logging path + logger.error(f"Error in batch prediction: {exc}") + raise + + def _convert_to_batch_container( + self, test_data_batch: List + ) -> BatchTimeSeriesContainer: + """Convert gluonts test data to BatchTimeSeriesContainer.""" + + batch_size = len(test_data_batch) + history_values_list = [] + start_dates = [] + frequencies = [] + + for entry in test_data_batch: + target = entry["target"] + + if target.ndim == 1: + target = target.reshape(-1, 1) + else: + target = target.T + + if ( + self.max_context_length is not None + and len(target) > self.max_context_length + ): + target = target[-self.max_context_length :] + + history_values_list.append(target) + start_dates.append(entry["start"].to_timestamp().to_datetime64()) + frequencies.append(parse_frequency(entry["freq"])) + + history_values_np = np.stack(history_values_list, axis=0) + num_channels = history_values_np.shape[2] + + history_values = torch.tensor( + history_values_np, dtype=torch.float32, device=device + ) + + future_values = torch.zeros( + (batch_size, self.ds_prediction_length, num_channels), + dtype=torch.float32, + device=device, + ) + + return BatchTimeSeriesContainer( + history_values=history_values, + future_values=future_values, + start=start_dates, + frequency=frequencies, + ) + + def _convert_to_forecasts( + self, + model_output: dict, + test_data_batch: List, + batch_container: BatchTimeSeriesContainer, + ) -> List[QuantileForecast]: + """Convert model predictions to QuantileForecast objects.""" + + predictions = model_output["result"] + scale_statistics = model_output["scale_statistics"] + + if predictions.ndim == 4: + predictions_unscaled = self.scaler.inverse_scale( + predictions, scale_statistics + ) + is_quantile = True + quantile_levels = self.model.quantiles + else: + predictions_unscaled = self.scaler.inverse_scale( + predictions, scale_statistics + ) + is_quantile = False + quantile_levels = [0.5] + + forecasts: List[QuantileForecast] = [] + for idx, entry in enumerate(test_data_batch): + history_length = int(batch_container.history_values.shape[1]) + start_date = entry["start"] + forecast_start = start_date + history_length + + if is_quantile: + pred_array = predictions_unscaled[idx].cpu().numpy() + + if pred_array.shape[1] == 1: + pred_array = pred_array.squeeze(1) + forecast_arrays = pred_array.T + else: + forecast_arrays = pred_array.transpose(2, 0, 1) + + forecast = QuantileForecast( + forecast_arrays=forecast_arrays, + forecast_keys=[str(q) for q in quantile_levels], + start_date=forecast_start, + ) + else: + pred_array = predictions_unscaled[idx].cpu().numpy() + + if pred_array.shape[1] == 1: + pred_array = pred_array.squeeze(1) + forecast_arrays = pred_array.reshape(1, -1) + else: + forecast_arrays = pred_array.reshape(1, *pred_array.shape) + + forecast = QuantileForecast( + forecast_arrays=forecast_arrays, + forecast_keys=["0.5"], + start_date=forecast_start, + ) + + forecasts.append(forecast) + + return forecasts + + +__all__ = ["TimeSeriesPredictor"] + + diff --git a/src/gift_eval/results.py b/src/gift_eval/results.py new file mode 100644 index 0000000000000000000000000000000000000000..b4038065e4cb07058cb8ebb8946ee3d20ed58651 --- /dev/null +++ b/src/gift_eval/results.py @@ -0,0 +1,243 @@ +"""Utilities for persisting and aggregating GIFT-Eval results.""" + +import argparse +import csv +import glob +import logging +from pathlib import Path +from typing import List, Optional + +import pandas as pd + +from src.gift_eval.constants import ( + ALL_DATASETS, + DATASET_PROPERTIES, + MED_LONG_DATASETS, + PRETTY_NAMES, + STANDARD_METRIC_NAMES, +) +from src.gift_eval.core import DatasetMetadata, EvaluationItem + + +logger = logging.getLogger(__name__) + + +def _ensure_results_csv(csv_file_path: Path) -> None: + if not csv_file_path.exists(): + csv_file_path.parent.mkdir(parents=True, exist_ok=True) + with open(csv_file_path, "w", newline="") as csvfile: + writer = csv.writer(csvfile) + header = ( + ["dataset", "model"] + + [f"eval_metrics/{name}" for name in STANDARD_METRIC_NAMES] + + ["domain", "num_variates"] + ) + writer.writerow(header) + + +def write_results_to_disk( + items: List[EvaluationItem], + dataset_name: str, + output_dir: Path, + model_name: str, + create_plots: bool, +) -> None: + output_dir = output_dir / dataset_name + output_dir.mkdir(parents=True, exist_ok=True) + output_csv_path = output_dir / "results.csv" + _ensure_results_csv(output_csv_path) + + try: + import matplotlib.pyplot as plt # Local import to avoid unnecessary dependency at module import time + except ImportError: # pragma: no cover - guard for optional dependency + plt = None + + with open(output_csv_path, "a", newline="") as csvfile: + writer = csv.writer(csvfile) + for item in items: + md: DatasetMetadata = item.dataset_metadata + metric_values: List[Optional[float]] = [] + for metric_name in STANDARD_METRIC_NAMES: + value = item.metrics.get(metric_name, None) + if value is None: + metric_values.append(None) + else: + if ( + hasattr(value, "__len__") + and not isinstance(value, (str, bytes)) + and len(value) == 1 + ): + value = value[0] + elif hasattr(value, "item"): + value = value.item() + metric_values.append(value) + + ds_key = md.key.lower() + props = DATASET_PROPERTIES.get(ds_key, {}) + domain = props.get("domain", "unknown") + num_variates = props.get( + "num_variates", 1 if md.to_univariate else md.target_dim + ) + + row = [md.full_name, model_name] + metric_values + [domain, num_variates] + writer.writerow(row) + + if create_plots and item.figures and plt is not None: + plots_dir = output_dir / "plots" / md.key / md.term + plots_dir.mkdir(parents=True, exist_ok=True) + for fig, filename in item.figures: + filepath = plots_dir / filename + fig.savefig(filepath, dpi=300, bbox_inches="tight") + plt.close(fig) + + logger.info( + "Evaluation complete for dataset '%s'. Results saved to %s", + dataset_name, + output_csv_path, + ) + if create_plots: + logger.info("Plots saved under %s", output_dir / "plots") + + +def get_all_datasets_full_name() -> List[str]: + """Get all possible dataset full names for validation.""" + + terms = ["short", "medium", "long"] + datasets_full_names: List[str] = [] + + for name in ALL_DATASETS: + for term in terms: + if term in ["medium", "long"] and name not in MED_LONG_DATASETS: + continue + + if "/" in name: + ds_key, ds_freq = name.split("/") + ds_key = ds_key.lower() + ds_key = PRETTY_NAMES.get(ds_key, ds_key) + else: + ds_key = name.lower() + ds_key = PRETTY_NAMES.get(ds_key, ds_key) + ds_freq = DATASET_PROPERTIES.get(ds_key, {}).get("frequency") + + datasets_full_names.append( + f"{ds_key}/{ds_freq if ds_freq else 'unknown'}/{term}" + ) + + return datasets_full_names + + +def aggregate_results(result_root_dir: str | Path) -> pd.DataFrame | None: + """Aggregate results from multiple CSV files into a single dataframe.""" + + result_root = Path(result_root_dir) + + logger.info("Aggregating results in: %s", result_root) + + result_files = glob.glob(f"{result_root}/**/results.csv", recursive=True) + + if not result_files: + logger.error("No result files found!") + return None + + dataframes: List[pd.DataFrame] = [] + for file in result_files: + try: + df = pd.read_csv(file) + if len(df) > 0: + dataframes.append(df) + else: + logger.warning("Empty file: %s", file) + except pd.errors.EmptyDataError: + logger.warning("Skipping empty file: %s", file) + except Exception as exc: + logger.error("Error reading %s: %s", file, exc) + + if not dataframes: + logger.warning("No valid CSV files found to combine") + return None + + combined_df = pd.concat(dataframes, ignore_index=True).sort_values("dataset") + + if len(combined_df) != len(set(combined_df.dataset)): + duplicate_datasets = combined_df.dataset[ + combined_df.dataset.duplicated() + ].tolist() + logger.warning("Warning: Duplicate datasets found: %s", duplicate_datasets) + combined_df = combined_df.drop_duplicates(subset=["dataset"], keep="first") + logger.info( + "Removed duplicates, %s unique datasets remaining", len(combined_df) + ) + + logger.info("Combined results: %s datasets", len(combined_df)) + + all_datasets_full_name = get_all_datasets_full_name() + completed_experiments = combined_df.dataset.tolist() + + completed_experiments_clean = [ + exp for exp in completed_experiments if exp in all_datasets_full_name + ] + missing_or_failed_experiments = [ + exp for exp in all_datasets_full_name if exp not in completed_experiments_clean + ] + + logger.info("=== EXPERIMENT SUMMARY ===") + logger.info("Total expected datasets: %s", len(all_datasets_full_name)) + logger.info("Completed experiments: %s", len(completed_experiments_clean)) + logger.info("Missing/failed experiments: %s", len(missing_or_failed_experiments)) + + logger.info("Completed experiments:") + for idx, exp in enumerate(completed_experiments_clean, start=1): + logger.info(" %3d: %s", idx, exp) + + if missing_or_failed_experiments: + logger.info("Missing or failed experiments:") + for idx, exp in enumerate(missing_or_failed_experiments, start=1): + logger.info(" %3d: %s", idx, exp) + + completion_rate = ( + len(completed_experiments_clean) / len(all_datasets_full_name) * 100 + if all_datasets_full_name + else 0.0 + ) + logger.info("Completion rate: %.1f%%", completion_rate) + + output_file = result_root / "all_results.csv" + combined_df.to_csv(output_file, index=False) + logger.info("Combined results saved to: %s", output_file) + + return combined_df + + +__all__ = [ + "aggregate_results", + "get_all_datasets_full_name", + "write_results_to_disk", +] + + +def main() -> None: + """CLI entry point for aggregating results from disk.""" + + parser = argparse.ArgumentParser( + description="Aggregate GIFT-Eval results from multiple CSV files" + ) + parser.add_argument( + "--result_root_dir", + type=str, + required=True, + help="Root directory containing result subdirectories", + ) + + args = parser.parse_args() + result_root_dir = Path(args.result_root_dir) + + logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" + ) + logger.info("Searching in directory: %s", result_root_dir) + + aggregate_results(result_root_dir=result_root_dir) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/src/models/__init__.py b/src/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/models/blocks.py b/src/models/blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..7b920ad340ddaf7cd0597b126cdb099c4c5cf9f5 --- /dev/null +++ b/src/models/blocks.py @@ -0,0 +1,62 @@ +import torch +import torch.nn as nn + +from src.models.gated_deltaproduct import GatedDeltaProductConfig +from src.models.gated_deltaproduct.modeling_gated_deltaproduct import ( + GatedDeltaProductBlock, +) + + +class GatedDeltaProductEncoder(nn.Module): + """ + GatedDeltaNet encoder using GatedDeltaProductBlock for sequence modeling. + """ + + def __init__( + self, + layer_idx: int, + token_embed_dim: int, + num_heads: int = 4, + attn_mode: str = "chunk", + expand_v: float = 1.0, + use_gate: bool = False, + use_short_conv: bool = True, + conv_size: int = 4, + hidden_ratio: int = 1.0, + allow_neg_eigval: bool = True, + use_forget_gate: bool = True, + num_householder: int = 1, + **kwargs, + ): + super().__init__() + config = GatedDeltaProductConfig( + attn_mode=attn_mode, + hidden_size=token_embed_dim, + expand_v=expand_v, + use_gate=use_gate, + use_short_conv=use_short_conv, + conv_size=conv_size, + head_dim=token_embed_dim // num_heads, + hidden_ratio=hidden_ratio, + num_heads=num_heads, + allow_neg_eigval=allow_neg_eigval, + use_forget_gate=use_forget_gate, + num_householder=num_householder, + ) + + self.encoder_layer = GatedDeltaProductBlock(layer_idx=layer_idx, config=config) + + def forward(self, x, initial_state=None): + """ + Forward pass through the GatedDeltaProductBlock. + + Args: + x: Input tensor of shape [batch_size, seq_len, hidden_size] + + Returns: + Output tensor of same shape as input + """ + x, last_hidden_state, _ = self.encoder_layer( + x, output_attentions=True, initial_state=initial_state + ) + return x, last_hidden_state diff --git a/src/models/gated_deltaproduct/README.md b/src/models/gated_deltaproduct/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8eee97e640bfc0dce855595d4693b39b13445888 --- /dev/null +++ b/src/models/gated_deltaproduct/README.md @@ -0,0 +1,344 @@ +# Custom GatedDeltaProduct Implementation + +This directory contains a custom implementation of the GatedDeltaProduct layer, based on the [Flash Linear Attention (FLA)](https://github.com/fla-org/flash-linear-attention) library, with modifications specifically designed for **time series forecasting** tasks. + +## Overview + +Our custom implementation adds **hidden state weaving** functionality that enables information to flow across encoder layers, maintaining temporal continuity - a crucial feature for time series forecasting that differs from the general-purpose language modeling focus of the official FLA implementation. + +## Reference + +This implementation is based on: +- **Official FLA Repository**: [https://github.com/fla-org/flash-linear-attention](https://github.com/fla-org/flash-linear-attention) +- **Original Paper**: [DeltaProduct: Improving State-Tracking in Linear RNNs via Householder Products](https://arxiv.org/html/2502.10297v3) (Siems et al., 2025) + +--- + +## What is DeltaProduct? + +DeltaProduct is a linear RNN architecture that uses **diagonal plus rank-nₕ** state-transition matrices, formed as products of `nₕ` generalized Householder transformations. This provides a tunable mechanism to balance expressivity and efficiency compared to diagonal-only architectures like Mamba or GLA. + +### Key Concepts + +- **Householder transformations**: Enable simultaneous token-channel mixing, overcoming the expressivity limitations of purely diagonal state-transition matrices +- **Rank-nₕ structure**: Allows better expressivity than rank-1 (DeltaNet) while maintaining training efficiency. The parameter `nₕ` (number of Householder transformations) provides a tunable trade-off between expressivity and computational cost +- **Gated variant**: Adds gating mechanisms for improved performance, allowing the model to control information flow through forget gates and output gates + +### Architecture Overview + +DeltaProduct improves upon earlier linear RNN architectures: + +- **Diagonal architectures** (Mamba, GLA, mLSTM): Use diagonal state-transition matrices for fast runtime but suffer from limited expressivity +- **Rank-1 architectures** (DeltaNet, RWKV-7): Use diagonal plus rank-1 structure, enabling simultaneous token-channel mixing with only a slight decrease in training efficiency +- **DeltaProduct**: Extends this to diagonal plus rank-nₕ structure, where multiple Householder transformations (nₕ ≥ 1) provide greater expressivity while maintaining computational efficiency + +The architecture interprets DeltaNet's recurrence as performing one step of online gradient descent per token on an associative recall loss. DeltaProduct instead takes multiple (`nₕ`) steps per token, naturally leading to the rank-nₕ structure. + +--- + +## State Weaving Mechanism + +Unlike DeltaProduct's original design for autoregressive language modeling, time series forecasting across a full horizon does not require causal masking. To exploit this property, we introduce **state weaving**, a mechanism that enables bidirectional information flow across the entire sequence length without additional parameters or computational overhead. + +
+ State Weaving Architecture +
+ +*Figure: The TempoPFN architecture using stacked GatedDeltaProduct blocks with learnable initial states H₀ⁱ and state-weaving. The final hidden state of each layer Hₜⁱ is added to the learnable initial state of the next layer H₀ⁱ⁺¹, enabling bidirectional information flow.* + +### How State Weaving Works + +In our implementation, state weaving operates as follows: + +1. **Learnable Initial States**: Each encoder layer `i` has a learnable initial hidden state `H₀ⁱ` that is optimized during training. + +2. **State Propagation**: The final hidden state from layer `i`, denoted `Hₜⁱ`, is propagated forward and combined with the learnable initial state of the next layer: + ``` + H₀ⁱ⁺¹ = H₀ⁱ⁺¹ + Hₜⁱ + ``` + +3. **Bidirectional Information Flow**: This mechanism effectively lifts the causal constraint while maintaining computational efficiency. Information from later tokens can influence earlier layers through the accumulated hidden states, enabling the model to process the entire sequence (history + future horizon) coherently. + +4. **No Extra Overhead**: Unlike explicit bidirectional architectures, state weaving requires no additional parameters or computational overhead beyond the existing forward pass. + +This design is particularly powerful for time series forecasting, where: +- The full prediction horizon is known at inference time +- Coherent predictions across all future time steps are desired +- Historical context should inform all future predictions simultaneously + +--- + +## Key Differences from Official FLA + +### 1. **`initial_state` Parameter in Forward Method** + +#### Official FLA (`fla/layers/gated_deltaproduct.py`) +```python +def forward( + self, + hidden_states: torch.Tensor, + attention_mask: torch.Tensor | None = None, + past_key_values: Cache | None = None, + use_cache: bool | None = False, + output_attentions: bool | None = False, + **kwargs: Unpack[dict], +) -> tuple[torch.Tensor, torch.Tensor | None, Cache | None]: +``` +**No `initial_state` parameter** - The official implementation only uses `recurrent_state` from `past_key_values`. + +#### Our Custom Implementation (`gated_deltaproduct.py`) +```python +def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Cache] = None, + initial_state: Optional[torch.Tensor] = None, # ← ADDED + use_cache: Optional[bool] = False, + output_attentions: Optional[bool] = False, + **kwargs: Unpack[Dict], +) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Cache]]: +``` +**Added `initial_state` parameter** - Allows external control of the initial recurrent state, enabling layer-to-layer state propagation. + +--- + +### 2. **Usage of `initial_state` in Chunk Mode** + +#### Official FLA +```python +if mode == 'chunk': + o, recurrent_state = chunk_gated_delta_product( + q=q, k=k, v=v, g=g, beta=beta, + initial_state=recurrent_state, # ← Only from past_key_values + output_final_state=use_cache, + cu_seqlens=cu_seqlens, + num_householder=self.num_householder, + use_qk_l2norm_in_kernel=True, + ) +``` + +#### Our Custom Implementation +```python +if mode == "chunk": + o, recurrent_state = chunk_gated_delta_product( + q=q, k=k, v=v, g=g, beta=beta, + initial_state=initial_state, # ← Uses external initial_state if provided + output_final_state=output_attentions, + cu_seqlens=cu_seqlens, + num_householder=self.num_householder, + use_qk_l2norm_in_kernel=True, + ) +``` + +**Key Difference**: Our implementation prioritizes the externally provided `initial_state` over `recurrent_state` from `past_key_values`, enabling layer-to-layer state propagation. + +--- + +### 3. **Return Value: Hidden State Output** + +#### Official FLA (`fla/models/gated_deltaproduct/modeling_gated_deltaproduct.py`) +```python +def forward( + self, + hidden_states: torch.Tensor, + attention_mask: torch.Tensor | None = None, + past_key_values: Cache | list[torch.FloatTensor] | None = None, + use_cache: bool | None = False, + output_attentions: bool | None = False, + **kwargs: Unpack[dict], +) -> tuple[torch.FloatTensor, tuple[torch.FloatTensor, torch.FloatTensor] | None]: + # ... + return outputs # Returns (hidden_states, attentions, past_key_values) +``` + +**No `initial_state` parameter** - The block doesn't accept or return hidden states explicitly. + +#### Our Custom Implementation (`modeling_gated_deltaproduct.py`) +```python +def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Union[Cache, List[torch.FloatTensor]]] = None, + use_cache: Optional[bool] = False, + output_attentions: Optional[bool] = False, + initial_state: Optional[torch.FloatTensor] = None, # ← ADDED + **kwargs: Unpack[Dict], +) -> Tuple[ + torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]] +]: + # ... + hidden_states, attentions, past_key_values = self.attn( + # ... + initial_state=initial_state, # ← Passed through + **kwargs, + ) + # ... + return outputs # Returns (hidden_states, attentions, past_key_values) +``` + +**Added `initial_state` parameter** - The block accepts and forwards `initial_state` to the attention layer. + +--- + +### 4. **Hidden State Weaving Implementation** + +Our implementation supports two modes of hidden state weaving (controlled by the `weaving` parameter in encoder config): + +#### **Mode 1: Weaving Enabled (`weaving=True`)** - Default +```python +if self.encoder_config.get("weaving", True): + # initial hidden state is learnable + hidden_state = torch.zeros_like( + self.initial_hidden_state[0].repeat(batch_size * num_channels, 1, 1, 1) + ) + for layer_idx, encoder_layer in enumerate(self.encoder_layers): + x, hidden_state = encoder_layer( + x, + hidden_state + self.initial_hidden_state[layer_idx].repeat( + batch_size * num_channels, 1, 1, 1 + ), + ) +``` + +**Key Features**: +- Hidden state accumulates across layers +- Each layer receives: `previous_hidden_state + learnable_initial_state[layer_idx]` +- State persists between layers, allowing information to flow through the network + +#### **Mode 2: No Weaving (`weaving=False`)** +```python +else: + # initial hidden state is separately learnable for each layer + for layer_idx, encoder_layer in enumerate(self.encoder_layers): + initial_hidden_state = self.initial_hidden_state[layer_idx].repeat( + batch_size * num_channels, 1, 1, 1 + ) + x, _ = encoder_layer(x, initial_hidden_state) +``` + +**Key Features**: +- Each layer uses its own independent learnable initial state +- No accumulation between layers +- Hidden state is discarded after each layer + +--- + +### 5. **Learnable Initial Hidden States** + +Our implementation includes learnable initial states managed at the model level: + +```python +num_initial_hidden_states = self.num_encoder_layers +self.initial_hidden_state = nn.ParameterList( + [ + nn.Parameter( + torch.randn( + 1, self.encoder_config["num_heads"], head_k_dim, head_v_dim + ) + / head_k_dim, + requires_grad=True, + ) + for _ in range(num_initial_hidden_states) + ] +) +``` + +**Key Features**: +- One learnable parameter per encoder layer +- Shape: `[1, num_heads, head_k_dim, head_v_dim]` +- Initialized with small random values scaled by `head_k_dim` +- These are trainable parameters that can be optimized during training + +--- + +### 6. **Parameter Name Differences** + +- **Official FLA**: Uses `use_output_gate` parameter +- **Our Implementation**: Uses `use_gate` parameter (renamed for clarity) + +--- + +### 7. **Return Value Differences** + +#### Official FLA (`fla/layers/gated_deltaproduct.py`) +```python +return o, None, past_key_values # Returns (output, None, past_key_values) +``` + +#### Our Custom Implementation (`gated_deltaproduct.py`) +```python +return o, recurrent_state, past_key_values # Returns (output, recurrent_state, past_key_values) +``` + +**Key Difference**: Our implementation returns `recurrent_state` (the final hidden state) instead of `None`, enabling state propagation. + +--- + +### 8. **Encoder Wrapper Return Values** + +Our `GatedDeltaProductEncoder` (in `src/models/blocks.py`) returns both the output and hidden state: + +```python +x, last_hidden_state, _ = self.encoder_layer( + x, output_attentions=True, initial_state=initial_state +) +return x, last_hidden_state # ← Returns hidden state for weaving +``` + +This allows state propagation between layers in the `TimeSeriesModel`. + +--- + +## Summary Table + +| Feature | Official FLA | Our Custom Implementation | +|---------|-------------|---------------------------| +| `initial_state` in `forward()` | ❌ No | ✅ Yes | +| `initial_state` in `GatedDeltaProductBlock.forward()` | ❌ No | ✅ Yes | +| Hidden state weaving | ❌ No | ✅ Yes (configurable) | +| Learnable initial states | ❌ No | ✅ Yes (`nn.ParameterList`) | +| Returns `recurrent_state` | ❌ No (returns `None`) | ✅ Yes | +| Layer-to-layer state propagation | ❌ No | ✅ Yes (when `weaving=True`) | +| Parameter name | `use_output_gate` | `use_gate` | + +--- + +## Why These Differences Matter for Time Series Forecasting + +1. **Temporal Continuity**: Hidden state weaving allows information to flow across layers, maintaining temporal patterns across the encoder stack. This is crucial for time series where historical context matters. + +2. **Learnable Initialization**: Learnable initial states allow the model to learn optimal starting points for the recurrent computation, which can be crucial for capturing time series patterns. + +3. **Flexible State Management**: The `weaving` parameter allows switching between: + - **Weaving mode**: Better for capturing long-term dependencies across layers + - **Independent mode**: Each layer processes independently, potentially more stable + +4. **State Propagation**: Returning and propagating hidden states enables the model to maintain context across multiple encoder layers, which is beneficial for time series forecasting where historical context matters. + +These modifications make our implementation better suited for time series forecasting tasks compared to the general-purpose language modeling focus of the official FLA implementation. + +--- + +## Files in This Directory + +- **`gated_deltaproduct.py`**: Core GatedDeltaProduct layer implementation with `initial_state` support +- **`modeling_gated_deltaproduct.py`**: GatedDeltaProductBlock wrapper that integrates the layer +- **`configuration_gated_deltaproduct.py`**: Configuration class for the model +- **`__init__.py`**: Module exports + +--- + +## Usage + +See `src/models/model.py` and `src/models/blocks.py` for examples of how to use this custom implementation with hidden state weaving. + +To enable/disable weaving, set the `weaving` parameter in your encoder configuration: +```python +encoder_config = { + "weaving": True, # Enable state propagation across layers + # ... other config parameters +} +``` + diff --git a/src/models/gated_deltaproduct/__init__.py b/src/models/gated_deltaproduct/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b58b09a4b41a9993251d80912aa37120cf3fc4f9 --- /dev/null +++ b/src/models/gated_deltaproduct/__init__.py @@ -0,0 +1,11 @@ +from src.models.gated_deltaproduct.configuration_gated_deltaproduct import ( + GatedDeltaProductConfig, +) +from src.models.gated_deltaproduct.modeling_gated_deltaproduct import ( + GatedDeltaProductBlock, +) + +__all__ = [ + "GatedDeltaProductConfig", + "GatedDeltaProductBlock", +] diff --git a/src/models/gated_deltaproduct/configuration_gated_deltaproduct.py b/src/models/gated_deltaproduct/configuration_gated_deltaproduct.py new file mode 100644 index 0000000000000000000000000000000000000000..83c95f1a47459a9fc754ee680962a090d8bec8df --- /dev/null +++ b/src/models/gated_deltaproduct/configuration_gated_deltaproduct.py @@ -0,0 +1,108 @@ +import warnings + +from transformers.configuration_utils import PretrainedConfig + + +class GatedDeltaProductConfig(PretrainedConfig): + model_type = "gated_deltaproduct" + keys_to_ignore_at_inference = ["past_key_values"] + + def __init__( + self, + attn_mode: str = "chunk", + conv_size: int = 4, + head_dim: int = 256, + num_heads: int = 6, + hidden_size: int = 2048, + expand_v: float = 2.0, + use_gate: bool = True, # Changed from use_output_gate to use_gate for custom implementation + use_short_conv: bool = True, + max_position_embeddings: int = 2048, + hidden_ratio: int | None = 4, + intermediate_size: int | None = None, + hidden_act: str = "swish", + num_hidden_layers: int = 21, + norm_eps: float = 1e-6, + attn: dict | None = None, + use_cache: bool = True, + pad_token_id: int = None, + bos_token_id: int = 1, + eos_token_id: int = 2, + tie_word_embeddings: bool = False, + initializer_range: float = 0.02, + fuse_norm: bool = True, + fuse_swiglu: bool = True, + fuse_cross_entropy: bool = True, + fuse_linear_cross_entropy: bool = False, + use_l2warp: bool = False, + vocab_size: int = 32000, + use_forget_gate: bool = False, + allow_neg_eigval: bool = False, + num_householder: int = 1, + **kwargs, + ): + self.attn_mode = attn_mode + self.conv_size = conv_size + self.head_dim = head_dim + self.num_heads = num_heads + self.hidden_size = hidden_size + self.expand_v = expand_v + self.use_gate = use_gate # Changed from use_output_gate to use_gate + self.use_short_conv = use_short_conv + self.max_position_embeddings = max_position_embeddings + + self.hidden_ratio = hidden_ratio + self.intermediate_size = intermediate_size + self.hidden_act = hidden_act + self.num_hidden_layers = num_hidden_layers + self.norm_eps = norm_eps + self.attn = attn + self.use_cache = use_cache + self.initializer_range = initializer_range + + self.fuse_norm = fuse_norm + self.fuse_swiglu = fuse_swiglu + self.fuse_cross_entropy = fuse_cross_entropy + self.fuse_linear_cross_entropy = fuse_linear_cross_entropy + self.use_l2warp = use_l2warp + self.vocab_size = vocab_size + + if fuse_cross_entropy and fuse_linear_cross_entropy: + raise ValueError( + "`fuse_cross_entropy` and `fuse_linear_cross_entropy` cannot be True at the same time.", + ) + if fuse_linear_cross_entropy: + warnings.warn( + "`fuse_linear_cross_entropy` is enabled, which can improves memory efficiency " + "at the potential cost of reduced precision. " + "If you observe issues like loss divergence, consider disabling this setting.", + ) + + # DeltaProduct specific + self.allow_neg_eigval = allow_neg_eigval + self.num_householder = num_householder + self.use_forget_gate = use_forget_gate + + if attn is not None: + if not isinstance(attn, dict): + raise ValueError("attn must be a dictionary") + if "layers" not in attn: + raise ValueError( + "Layer indices must be provided to initialize hybrid attention layers" + ) + if "num_heads" not in attn: + raise ValueError( + "Number of heads must be provided to initialize hybrid attention layers" + ) + attn["num_kv_heads"] = attn.get("num_kv_heads", attn["num_heads"]) + attn["qkv_bias"] = attn.get("qkv_bias", False) + attn["window_size"] = attn.get("window_size", None) + attn["rope_theta"] = attn.get("rope_theta", 10000.0) + + super().__init__( + pad_token_id=pad_token_id, + bos_token_id=bos_token_id, + eos_token_id=eos_token_id, + tie_word_embeddings=tie_word_embeddings, + **kwargs, + ) diff --git a/src/models/gated_deltaproduct/gated_deltaproduct.py b/src/models/gated_deltaproduct/gated_deltaproduct.py new file mode 100644 index 0000000000000000000000000000000000000000..f85222cc627f0000509e82451247916ca5cd67e5 --- /dev/null +++ b/src/models/gated_deltaproduct/gated_deltaproduct.py @@ -0,0 +1,351 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2023-2025, Songlin Yang, Yu Zhang + +from __future__ import annotations + +import math +import warnings +from typing import TYPE_CHECKING, Dict, Optional, Tuple + +import torch +import torch.nn as nn +from einops import rearrange, repeat +from fla.layers.utils import get_unpad_data, index_first_axis, pad_input +from fla.modules import FusedRMSNormGated, RMSNorm, ShortConvolution +from fla.ops.delta_rule import fused_recurrent_delta_rule +from fla.ops.gated_delta_product import chunk_gated_delta_product +from fla.ops.gated_delta_rule import fused_recurrent_gated_delta_rule +from torch.nn import functional as F + +if TYPE_CHECKING: + from fla.models.utils import Cache + from transformers.processing_utils import Unpack + + +class GatedDeltaProduct(nn.Module): + """ + Generalized version of GatedDoubleDeltaNet that supports arbitrary number of householder transformations. + """ + + def __init__( + self, + hidden_size: int = 2048, + expand_v: float = 2, + head_dim: int = 256, + num_heads: int = 6, + num_v_heads: int = None, + mode: str = "chunk", + use_gate: bool = True, + use_short_conv: bool = True, + conv_size: int = 4, + conv_bias: bool = False, + layer_idx: int = None, + norm_eps: float = 1e-5, + use_forget_gate: bool = True, + allow_neg_eigval: bool = True, + num_householder: int = 2, + **kwargs, + ) -> GatedDeltaProduct: + super().__init__() + + self.mode = mode + + self.hidden_size = hidden_size + self.expand_v = expand_v + + self.use_forget_gate = use_forget_gate + self.allow_neg_eigval = allow_neg_eigval + self.num_householder = num_householder + self.use_gate = use_gate + self.use_short_conv = use_short_conv + self.conv_size = conv_size + self.conv_bias = conv_bias + + self.head_dim = head_dim + self.num_heads = num_heads + self.num_v_heads = num_v_heads if num_v_heads is not None else num_heads + + self.head_k_dim = head_dim + self.head_v_dim = int(self.head_dim * self.expand_v) + self.key_dim = int(self.num_heads * self.head_k_dim) + self.value_dim = int(self.num_v_heads * self.head_v_dim) + self.layer_idx = layer_idx + self.init_hidden_state = nn.Parameter( + torch.randn(self.num_heads, self.head_dim, self.head_dim) + ) + + # Consistency check: Ensure expand_v produces integer values + if not math.isclose( + self.num_v_heads * self.head_dim * expand_v, self.value_dim, rel_tol=1e-5 + ): + raise ValueError( + f"expand_v={expand_v} does not produce an integer value when multiplied by key_dim={self.key_dim}. " + f"Resulting value_dim would be {self.num_v_heads * self.head_dim * expand_v}, which is invalid for nn.Linear." + ) + if self.num_v_heads > self.num_heads and self.num_v_heads % self.num_heads != 0: + raise ValueError( + f"num_v_heads={self.num_v_heads} must be divisible by num_heads={self.num_heads}." + ) + + if not math.isclose(head_dim * expand_v, self.head_v_dim, rel_tol=1e-5): + raise ValueError( + f"expand_v={expand_v} does not produce an integer value when multiplied by head_dim={head_dim}. " + f"Resulting head_v_dim would be {head_dim * expand_v}, which is invalid for FusedRMSNormGated." + ) + assert mode in ["chunk", "fused_recurrent"], f"Not suppoerted mode `{mode}`." + + self.q_proj = nn.Linear(hidden_size, self.key_dim, bias=False) + self.k_proj = nn.Linear(hidden_size, self.key_dim * num_householder, bias=False) + self.v_proj = nn.Linear( + hidden_size, self.value_dim * num_householder, bias=False + ) + self.b_proj = nn.Linear( + hidden_size, self.num_v_heads * num_householder, bias=False + ) + + if self.use_forget_gate: + self.a_proj = nn.Linear(hidden_size, self.num_v_heads, bias=False) + A = torch.empty(self.num_v_heads, dtype=torch.float32).uniform_(0, 16) + self.A_log = nn.Parameter(torch.log(A)) + self.A_log._no_weight_decay = True + # hard coded for now + dt_min = 0.001 + dt_max = 0.1 + dt_init_floor = 1e-4 + dt = torch.exp( + torch.rand(self.num_v_heads) * (math.log(dt_max) - math.log(dt_min)) + + math.log(dt_min) + ) + dt = torch.clamp(dt, min=dt_init_floor) + # Inverse of softplus: https://github.com/pytorch/pytorch/issues/72759 + inv_dt = dt + torch.log(-torch.expm1(-dt)) + self.dt_bias = nn.Parameter(inv_dt) + # Just to be explicit. Without this we already don't put wd on dt_bias because of the check + # name.endswith("bias") in param_grouping.py + self.dt_bias._no_weight_decay = True + + if use_short_conv: + self.conv_size = conv_size + self.q_conv1d = ShortConvolution( + hidden_size=self.key_dim, + kernel_size=conv_size, + bias=conv_bias, + activation="silu", + ) + self.k_conv1d = ShortConvolution( + hidden_size=self.key_dim * num_householder, + kernel_size=conv_size, + bias=conv_bias, + activation="silu", + ) + self.v_conv1d = ShortConvolution( + hidden_size=self.value_dim * num_householder, + kernel_size=conv_size, + bias=conv_bias, + activation="silu", + ) + else: + warnings.warn( + "ShortConvolution is crucial to the performance. " + "Do not turn it off, i.e., setting `use_short_conv=False` unless you know what you are doing." + ) + if use_gate: + self.g_proj = nn.Linear(hidden_size, self.value_dim, bias=False) + self.o_norm = FusedRMSNormGated(self.head_v_dim, eps=norm_eps) + else: + self.o_norm = RMSNorm(self.head_v_dim, eps=norm_eps) + self.o_proj = nn.Linear(self.value_dim, hidden_size, bias=False) + + def _initialize_weights(self, module: nn.Module): + if getattr(module, "_is_hf_initialized", False): + return + if isinstance(module, nn.Linear): + nn.init.xavier_uniform_(module.weight, gain=2**-2.5) + if module.bias is not None: + nn.init.zeros_(module.bias) + module._is_hf_initialized = True + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Cache] = None, + initial_state: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = False, + output_attentions: Optional[bool] = False, + **kwargs: Unpack[Dict], + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Cache]]: + if attention_mask is not None: + assert len(attention_mask.shape) == 2, ( + "Expected attention_mask as a 0-1 matrix with shape [batch_size, seq_len] " + "for padding purposes (0 indicating padding). " + "Arbitrary attention masks of shape [batch_size, seq_len, seq_len] are not allowed." + ) + + batch_size, q_len, _ = hidden_states.shape + # change to inference mode. + mode = self.mode + + if self.training: + assert mode == "chunk", "Only chunk mode is supported in training." + + last_state = None + if past_key_values is not None and len(past_key_values) > self.layer_idx: + last_state = past_key_values[self.layer_idx] + + cu_seqlens = kwargs.get("cu_seqlens", None) + if attention_mask is not None: + indices, cu_seqlens, _ = get_unpad_data(attention_mask[:, -q_len:]) + hidden_states = index_first_axis( + rearrange(hidden_states, "b s ... -> (b s) ..."), indices + ).unsqueeze(0) + + if self.use_short_conv: + conv_state_q, conv_state_k, conv_state_v = None, None, None + if last_state is not None: + conv_state_q, conv_state_k, conv_state_v = last_state["conv_state"] + q, conv_state_q = self.q_conv1d( + x=self.q_proj(hidden_states), + cache=conv_state_q, + output_final_state=use_cache, + cu_seqlens=cu_seqlens, + ) + k, conv_state_k = self.k_conv1d( + x=self.k_proj(hidden_states), + cache=conv_state_k, + output_final_state=use_cache, + cu_seqlens=cu_seqlens, + ) + v, conv_state_v = self.v_conv1d( + x=self.v_proj(hidden_states), + cache=conv_state_v, + output_final_state=use_cache, + cu_seqlens=cu_seqlens, + ) + else: + q = F.silu(self.q_proj(hidden_states)) + k = F.silu(self.k_proj(hidden_states)) + v = F.silu(self.v_proj(hidden_states)) + + q = rearrange(q, "... (h d) -> ... h d", d=self.head_k_dim) + k = rearrange( + k, + "... l (n h d) -> ... (l n) h d", + n=self.num_householder, + d=self.head_k_dim, + ) + v = rearrange( + v, + "... l (n h d) -> ... (l n) h d", + n=self.num_householder, + d=self.head_v_dim, + ) + + if self.num_v_heads > self.num_heads: + q, k = map( + lambda x: repeat( + x, "... h d -> ... (h g) d", g=self.num_v_heads // self.num_heads + ), + (q, k), + ) + + beta = self.b_proj(hidden_states).sigmoid() + if self.allow_neg_eigval: + beta = beta * 2.0 + + beta = rearrange(beta, "... l (n h) -> ... (l n) h", n=self.num_householder) + if self.use_forget_gate: + g = -self.A_log.float().exp() * F.softplus( + self.a_proj(hidden_states).float() + self.dt_bias + ) + else: + g = None + + recurrent_state = ( + last_state["recurrent_state"] if last_state is not None else None + ) + if mode == "chunk": + o, recurrent_state = chunk_gated_delta_product( + q=q, + k=k, + v=v, + g=g, + beta=beta, + initial_state=initial_state, + output_final_state=output_attentions, + cu_seqlens=cu_seqlens, + num_householder=self.num_householder, + use_qk_l2norm_in_kernel=True, + ) + + elif mode == "fused_recurrent": + if self.use_forget_gate: + g_new = torch.zeros( + g.shape[0], + g.shape[1], + self.num_householder, + g.shape[2], + device=g.device, + dtype=torch.float32, + ) + g_new[:, :, 0] = g + g = rearrange(g_new, "... l n h -> ... (l n) h") + + q_new = q.new_zeros( + q.shape[0], q.shape[1], self.num_householder, q.shape[2], q.shape[3] + ) + q_new[:, :, -1] = q + q = rearrange(q_new, "... l n h d-> ... (l n) h d") + if self.use_forget_gate: + o, recurrent_state = fused_recurrent_gated_delta_rule( + q=q, + k=k, + v=v, + g=g, + beta=beta, + initial_state=recurrent_state, + output_final_state=use_cache, + cu_seqlens=cu_seqlens * self.num_householder + if cu_seqlens is not None + else None, + use_qk_l2norm_in_kernel=True, + ) + else: + o, recurrent_state = fused_recurrent_delta_rule( + q=q, + k=k, + v=v, + beta=beta, + initial_state=recurrent_state, + output_final_state=use_cache, + cu_seqlens=cu_seqlens * self.num_householder + if cu_seqlens is not None + else None, + use_qk_l2norm_in_kernel=True, + ) + o = rearrange(o, "... (l n) h d -> ... l n h d", n=self.num_householder)[ + ..., -1, :, : + ].contiguous() + + if past_key_values is not None: + past_key_values.update( + recurrent_state=recurrent_state, + conv_state=(conv_state_q, conv_state_k, conv_state_v) + if self.use_short_conv + else None, + layer_idx=self.layer_idx, + offset=q_len, + ) + + if self.use_gate: + g = rearrange( + self.g_proj(hidden_states), "... (h d) -> ... h d", d=self.head_v_dim + ) + o = self.o_norm(o, g) + else: + o = self.o_norm(o) + o = rearrange(o, "b t h d -> b t (h d)") + o = self.o_proj(o) + if attention_mask is not None: + o = pad_input(o.squeeze(0), indices, batch_size, q_len) + return o, recurrent_state, past_key_values diff --git a/src/models/gated_deltaproduct/modeling_gated_deltaproduct.py b/src/models/gated_deltaproduct/modeling_gated_deltaproduct.py new file mode 100644 index 0000000000000000000000000000000000000000..938169b086abec2f531633e8dd74fe7548666309 --- /dev/null +++ b/src/models/gated_deltaproduct/modeling_gated_deltaproduct.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- + +from __future__ import annotations + +from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union + +import torch +import torch.nn as nn +from fla.layers.attn import Attention +from fla.models.utils import Cache +from fla.modules import GatedMLP as GatedDeltaProductMLP +from fla.modules import RMSNorm + +from src.models.gated_deltaproduct.configuration_gated_deltaproduct import ( + GatedDeltaProductConfig, +) +from src.models.gated_deltaproduct.gated_deltaproduct import GatedDeltaProduct + +if TYPE_CHECKING: + from transformers.processing_utils import Unpack + + +class GatedDeltaProductBlock(nn.Module): + def __init__(self, config: GatedDeltaProductConfig, layer_idx: int): + super().__init__() + + self.config = config + self.layer_idx = layer_idx + + self.attn_norm = (RMSNorm if config.fuse_norm else nn.RMSNorm)( + config.hidden_size, eps=config.norm_eps + ) + if config.attn is not None and layer_idx in config.attn["layers"]: + self.attn = Attention( + hidden_size=config.hidden_size, + num_heads=config.attn["num_heads"], + num_kv_heads=config.attn["num_kv_heads"], + qkv_bias=config.attn["qkv_bias"], + window_size=config.attn["window_size"], + rope_theta=config.attn["rope_theta"], + max_position_embeddings=config.max_position_embeddings, + layer_idx=layer_idx, + ) + else: + self.attn = GatedDeltaProduct( + mode=config.attn_mode, + hidden_size=config.hidden_size, + expand_v=config.expand_v, + head_dim=config.head_dim, + num_heads=config.num_heads, + use_gate=config.use_gate, + use_forget_gate=config.use_forget_gate, + use_short_conv=config.use_short_conv, + conv_size=config.conv_size, + norm_eps=config.norm_eps, + allow_neg_eigval=config.allow_neg_eigval, + num_householder=config.num_householder, + layer_idx=layer_idx, + ) + self.mlp_norm = (RMSNorm if config.fuse_norm else nn.RMSNorm)( + config.hidden_size, eps=config.norm_eps + ) + self.mlp = GatedDeltaProductMLP( + hidden_size=config.hidden_size, + hidden_ratio=config.hidden_ratio, + intermediate_size=config.intermediate_size, + hidden_act=config.hidden_act, + fuse_swiglu=config.fuse_swiglu, + ) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Union[Cache, List[torch.FloatTensor]]] = None, + use_cache: Optional[bool] = False, + output_attentions: Optional[bool] = False, + initial_state: Optional[torch.FloatTensor] = None, + **kwargs: Unpack[Dict], + ) -> Tuple[ + torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]] + ]: + residual = hidden_states + hidden_states = self.attn_norm(hidden_states) + hidden_states, attentions, past_key_values = self.attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + initial_state=initial_state, + **kwargs, + ) + if self.config.fuse_norm: + hidden_states, residual = self.mlp_norm(hidden_states, residual, True) + else: + hidden_states = residual + hidden_states + residual = hidden_states + hidden_states = self.mlp_norm(hidden_states) + hidden_states = self.mlp(hidden_states, **kwargs) + hidden_states = residual + hidden_states + + outputs = (hidden_states, attentions, past_key_values) + + return outputs diff --git a/src/models/model.py b/src/models/model.py new file mode 100644 index 0000000000000000000000000000000000000000..45cc97694c0f719f87f3886b4c1f099c9d55f4f5 --- /dev/null +++ b/src/models/model.py @@ -0,0 +1,427 @@ +import torch +import torch.nn as nn +from fla.modules import GatedMLP + +from src.data.containers import BatchTimeSeriesContainer +from src.data.scalers import MinMaxScaler, RobustScaler +from src.data.time_features import compute_batch_time_features +from src.models.blocks import GatedDeltaProductEncoder +from src.utils.utils import device + + +def create_scaler(scaler_type: str, epsilon: float = 1e-3): + """Create scaler instance based on type.""" + if scaler_type == "custom_robust": + return RobustScaler(epsilon=epsilon) + elif scaler_type == "min_max": + return MinMaxScaler(epsilon=epsilon) + else: + raise ValueError(f"Unknown scaler: {scaler_type}") + + +def apply_channel_noise(values: torch.Tensor, noise_scale: float = 0.1): + """Add noise to constant channels to prevent model instability.""" + is_constant = torch.all(values == values[:, 0:1, :], dim=1) + noise = torch.randn_like(values) * noise_scale * is_constant.unsqueeze(1) + return values + noise + + +class TimeSeriesModel(nn.Module): + """Time series forecasting model combining embedding, encoding, and prediction.""" + + def __init__( + self, + # Core architecture + embed_size: int = 128, + num_encoder_layers: int = 2, + # Scaling and preprocessing + scaler: str = "custom_robust", + epsilon: float = 1e-3, + scaler_clamp_value: float = None, + handle_constants: bool = False, + # Time features + K_max: int = 6, + time_feature_config: dict = None, + encoding_dropout: float = 0.0, + # Encoder configuration + encoder_config: dict = None, + # Loss configuration + loss_type: str = "huber", # "huber", "quantile" + quantiles: list[float] = None, + **kwargs, + ): + super().__init__() + + # Core parameters + self.embed_size = embed_size + self.num_encoder_layers = num_encoder_layers + self.epsilon = epsilon + self.scaler_clamp_value = scaler_clamp_value + self.handle_constants = handle_constants + self.encoding_dropout = encoding_dropout + self.K_max = K_max + self.time_feature_config = time_feature_config or {} + self.encoder_config = encoder_config or {} + + # Store loss parameters + self.loss_type = loss_type + self.quantiles = quantiles + if self.loss_type == "quantile" and self.quantiles is None: + raise ValueError("Quantiles must be provided for quantile loss.") + if self.quantiles: + self.register_buffer( + "qt", torch.tensor(self.quantiles, device=device).view(1, 1, 1, -1) + ) + + # Validate configuration before initialization + self._validate_configuration() + + # Initialize components + self.scaler = create_scaler(scaler, epsilon) + self._init_embedding_layers() + self._init_encoder_layers(self.encoder_config, num_encoder_layers) + self._init_projection_layers() + + def _validate_configuration(self): + """Validate essential model configuration parameters.""" + if "num_heads" not in self.encoder_config: + raise ValueError("encoder_config must contain 'num_heads' parameter") + + if self.embed_size % self.encoder_config["num_heads"] != 0: + raise ValueError( + f"embed_size ({self.embed_size}) must be divisible by " + f"num_heads ({self.encoder_config['num_heads']})" + ) + + def _init_embedding_layers(self): + """Initialize value and time feature embedding layers.""" + self.expand_values = nn.Linear(1, self.embed_size, bias=True) + self.nan_embedding = nn.Parameter( + torch.randn(1, 1, 1, self.embed_size) / self.embed_size, + requires_grad=True, + ) + self.time_feature_projection = nn.Linear(self.K_max, self.embed_size) + + def _init_encoder_layers(self, encoder_config: dict, num_encoder_layers: int): + """Initialize encoder layers.""" + self.num_encoder_layers = num_encoder_layers + + # Ensure encoder_config has token_embed_dim + encoder_config = encoder_config.copy() + encoder_config["token_embed_dim"] = self.embed_size + self.encoder_layers = nn.ModuleList( + [ + GatedDeltaProductEncoder(layer_idx=layer_idx, **encoder_config) + for layer_idx in range(self.num_encoder_layers) + ] + ) + + def _init_projection_layers(self): + if self.loss_type == "quantile": + output_dim = len(self.quantiles) + else: + output_dim = 1 + self.final_output_layer = nn.Linear(self.embed_size, output_dim) + + self.mlp = GatedMLP( + hidden_size=self.embed_size, + hidden_ratio=4, + hidden_act="swish", + fuse_swiglu=True, + ) + # Initialize learnable initial hidden state for the first encoder layer + # This will be expanded to match batch size during forward pass + head_k_dim = self.embed_size // self.encoder_config["num_heads"] + + # Get expand_v from encoder_config, default to 1.0 if not present + expand_v = self.encoder_config.get("expand_v", 1.0) + head_v_dim = int(head_k_dim * expand_v) + + num_initial_hidden_states = self.num_encoder_layers + self.initial_hidden_state = nn.ParameterList( + [ + nn.Parameter( + torch.randn( + 1, self.encoder_config["num_heads"], head_k_dim, head_v_dim + ) + / head_k_dim, + requires_grad=True, + ) + for _ in range(num_initial_hidden_states) + ] + ) + + def _preprocess_data(self, data_container: BatchTimeSeriesContainer): + """Extract data shapes and handle constants without padding.""" + history_values = data_container.history_values + future_values = data_container.future_values + history_mask = data_container.history_mask + + batch_size, history_length, num_channels = history_values.shape + future_length = future_values.shape[1] if future_values is not None else 0 + + # Handle constants + if self.handle_constants: + history_values = apply_channel_noise(history_values) + + return { + "history_values": history_values, + "future_values": future_values, + "history_mask": history_mask, + "num_channels": num_channels, + "history_length": history_length, + "future_length": future_length, + "batch_size": batch_size, + } + + def _compute_scaling( + self, history_values: torch.Tensor, history_mask: torch.Tensor = None + ): + """Compute scaling statistics and apply scaling.""" + scale_statistics = self.scaler.compute_statistics(history_values, history_mask) + return scale_statistics + + def _apply_scaling_and_masking( + self, values: torch.Tensor, scale_statistics: dict, mask: torch.Tensor = None + ): + """Apply scaling and optional masking to values.""" + scaled_values = self.scaler.scale(values, scale_statistics) + + if mask is not None: + scaled_values = scaled_values * mask.unsqueeze(-1).float() + + if self.scaler_clamp_value is not None: + scaled_values = torch.clamp( + scaled_values, -self.scaler_clamp_value, self.scaler_clamp_value + ) + + return scaled_values + + def _get_positional_embeddings( + self, + time_features: torch.Tensor, + num_channels: int, + batch_size: int, + drop_enc_allow: bool = False, + ): + """Generate positional embeddings from time features.""" + seq_len = time_features.shape[1] + + if (torch.rand(1).item() < self.encoding_dropout) and drop_enc_allow: + return torch.zeros( + batch_size, seq_len, num_channels, self.embed_size, device=device + ).to(torch.float32) + + pos_embed = self.time_feature_projection(time_features) + return pos_embed.unsqueeze(2).expand(-1, -1, num_channels, -1) + + def _compute_embeddings( + self, + scaled_history: torch.Tensor, + history_pos_embed: torch.Tensor, + history_mask: torch.Tensor | None = None, + ): + """Compute value embeddings and combine with positional embeddings.""" + + nan_mask = torch.isnan(scaled_history) + history_for_embedding = torch.nan_to_num(scaled_history, nan=0.0) + channel_embeddings = self.expand_values(history_for_embedding.unsqueeze(-1)) + channel_embeddings[nan_mask] = self.nan_embedding.to(channel_embeddings.dtype) + channel_embeddings = channel_embeddings + history_pos_embed + + # Suppress padded time steps completely so padding is a pure batching artifact + # history_mask: [B, S] -> broadcast to [B, S, 1, 1] + if history_mask is not None: + mask_broadcast = ( + history_mask.unsqueeze(-1).unsqueeze(-1).to(channel_embeddings.dtype) + ) + channel_embeddings = channel_embeddings * mask_broadcast + + batch_size, seq_len = scaled_history.shape[:2] + all_channels_embedded = channel_embeddings.view(batch_size, seq_len, -1) + + return all_channels_embedded + + def _generate_predictions( + self, + embedded: torch.Tensor, + target_pos_embed: torch.Tensor, + prediction_length: int, + num_channels: int, + history_mask: torch.Tensor = None, + ): + """ + Generate predictions for all channels using vectorized operations. + """ + batch_size, seq_len, _ = embedded.shape + # embedded shape: [B, S, N*E] -> Reshape to [B, S, N, E] + embedded = embedded.view(batch_size, seq_len, num_channels, self.embed_size) + + # Vectorize across channels by merging the batch and channel dimensions. + # [B, S, N, E] -> [B*N, S, E] + channel_embedded = ( + embedded.permute(0, 2, 1, 3) + .contiguous() + .view(batch_size * num_channels, seq_len, self.embed_size) + ) + + # Reshape target positional embeddings similarly: [B, P, N, E] -> [B*N, P, E] + target_pos_embed = ( + target_pos_embed.permute(0, 2, 1, 3) + .contiguous() + .view(batch_size * num_channels, prediction_length, self.embed_size) + ) + x = channel_embedded + target_repr = target_pos_embed + x = torch.concatenate([x, target_repr], dim=1) + if self.encoder_config.get("weaving", True): + # initial hidden state is learnable + hidden_state = torch.zeros_like( + self.initial_hidden_state[0].repeat(batch_size * num_channels, 1, 1, 1) + ) + for layer_idx, encoder_layer in enumerate(self.encoder_layers): + x, hidden_state = encoder_layer( + x, + hidden_state + + self.initial_hidden_state[layer_idx].repeat( + batch_size * num_channels, 1, 1, 1 + ), + ) + else: + # initial hidden state is separately learnable for each layer + for layer_idx, encoder_layer in enumerate(self.encoder_layers): + initial_hidden_state = self.initial_hidden_state[layer_idx].repeat( + batch_size * num_channels, 1, 1, 1 + ) + x, _ = encoder_layer(x, initial_hidden_state) + + # Use the last prediction_length positions + prediction_embeddings = x[:, -prediction_length:, :] + + predictions = self.final_output_layer(self.mlp(prediction_embeddings)) + + # Reshape output to handle quantiles + # Original shape: [B*N, P, Q] where Q is num_quantiles or 1 + # Reshape the output back to [B, P, N, Q] + output_dim = len(self.quantiles) if self.loss_type == "quantile" else 1 + predictions = predictions.view( + batch_size, num_channels, prediction_length, output_dim + ) + predictions = predictions.permute(0, 2, 1, 3) # [B, P, N, Q] + # Squeeze the last dimension if not in quantile mode for backward compatibility + if self.loss_type != "quantile": + predictions = predictions.squeeze(-1) # [B, P, N] + return predictions + + def forward( + self, data_container: BatchTimeSeriesContainer, drop_enc_allow: bool = False + ): + """Main forward pass.""" + # Preprocess data + preprocessed = self._preprocess_data(data_container) + + # Compute time features dynamically based on actual lengths + history_time_features, target_time_features = compute_batch_time_features( + start=data_container.start, + history_length=preprocessed["history_length"], + future_length=preprocessed["future_length"], + batch_size=preprocessed["batch_size"], + frequency=data_container.frequency, + K_max=self.K_max, + time_feature_config=self.time_feature_config, + ) + + # Compute scaling + scale_statistics = self._compute_scaling( + preprocessed["history_values"], preprocessed["history_mask"] + ) + + # Apply scaling + history_scaled = self._apply_scaling_and_masking( + preprocessed["history_values"], + scale_statistics, + preprocessed["history_mask"], + ) + + # Scale future values if present + future_scaled = None + if preprocessed["future_values"] is not None: + future_scaled = self.scaler.scale( + preprocessed["future_values"], scale_statistics + ) + + # Get positional embeddings + history_pos_embed = self._get_positional_embeddings( + history_time_features, + preprocessed["num_channels"], + preprocessed["batch_size"], + drop_enc_allow, + ) + target_pos_embed = self._get_positional_embeddings( + target_time_features, + preprocessed["num_channels"], + preprocessed["batch_size"], + drop_enc_allow, + ) + + # Compute embeddings + history_embed = self._compute_embeddings( + history_scaled, history_pos_embed, preprocessed["history_mask"] + ) + + # Generate predictions + predictions = self._generate_predictions( + history_embed, + target_pos_embed, + preprocessed["future_length"], + preprocessed["num_channels"], + preprocessed["history_mask"], + ) + + return { + "result": predictions, + "scale_statistics": scale_statistics, + "future_scaled": future_scaled, + "history_length": preprocessed["history_length"], + "future_length": preprocessed["future_length"], + } + + def _quantile_loss(self, y_true: torch.Tensor, y_pred: torch.Tensor): + """ + Compute the quantile loss. + y_true: [B, P, N] + y_pred: [B, P, N, Q] + """ + # Add a dimension to y_true to match y_pred: [B, P, N] -> [B, P, N, 1] + y_true = y_true.unsqueeze(-1) + + # Calculate errors + errors = y_true - y_pred + + # Calculate quantile loss + # The max operator implements the two cases of the quantile loss formula + loss = torch.max((self.qt - 1) * errors, self.qt * errors) + + # Average the loss across all dimensions + return loss.mean() + + def compute_loss(self, y_true: torch.Tensor, y_pred: dict): + """Compute loss between predictions and scaled ground truth.""" + predictions = y_pred["result"] + scale_statistics = y_pred["scale_statistics"] + + if y_true is None: + return torch.tensor(0.0, device=predictions.device) + + future_scaled = self.scaler.scale(y_true, scale_statistics) + + if self.loss_type == "huber": + if predictions.shape != future_scaled.shape: + raise ValueError( + f"Shape mismatch for Huber loss: predictions {predictions.shape} vs future_scaled {future_scaled.shape}" + ) + return nn.functional.huber_loss(predictions, future_scaled) + elif self.loss_type == "quantile": + return self._quantile_loss(future_scaled, predictions) + else: + raise ValueError(f"Unknown loss type: {self.loss_type}") diff --git a/src/optim/lr_scheduler.py b/src/optim/lr_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..28a6338f79f0c6a599437f60c623050fa0ff1439 --- /dev/null +++ b/src/optim/lr_scheduler.py @@ -0,0 +1,360 @@ +# src/utils/lr_scheduler.py + +import math +from enum import Enum +from functools import partial +from typing import Optional + +from torch.optim import Optimizer +from torch.optim.lr_scheduler import LambdaLR + + +class SchedulerType(Enum): + """Enumeration of available learning rate schedulers.""" + + COSINE = "cosine" + COSINE_WITH_WARMUP = "cosine_with_warmup" + COSINE_WITH_RESTARTS = "cosine_with_restarts" + WARMUP_STABLE_DECAY = "warmup_stable_decay" + POLYNOMIAL_WITH_WARMUP = "polynomial_with_warmup" + LINEAR_WITH_WARMUP = "linear_with_warmup" + CONSTANT_WITH_WARMUP = "constant_with_warmup" + INVERSE_SQRT = "inverse_sqrt" + + +def _get_warmup_stable_decay_lr_lambda( + current_step: int, + *, + num_warmup_steps: int, + num_stable_steps: int, + num_training_steps: int, + min_lr_ratio: float = 0.001, + decay_type: str = "cosine", +): + """ + Learning rate lambda function for Warmup-Stable-Decay (WSD) schedule. + + This scheduler implements three phases: + 1. Warmup: Linear increase from 0 to peak learning rate + 2. Stable: Constant learning rate for majority of training + 3. Decay: Gradual decrease using cosine or linear decay + + Args: + current_step: Current training step + num_warmup_steps: Number of warmup steps + num_stable_steps: Number of stable learning rate steps + num_training_steps: Total number of training steps + min_lr_ratio: Minimum learning rate as ratio of peak learning rate + decay_type: Type of decay schedule ("cosine" or "linear") + """ + if current_step < num_warmup_steps: + # Warmup phase: linear increase + return float(current_step) / float(max(1, num_warmup_steps)) + + elif current_step < num_warmup_steps + num_stable_steps: + # Stable phase: constant learning rate + return 1.0 + + else: + # Decay phase + decay_steps = num_training_steps - num_warmup_steps - num_stable_steps + if decay_steps <= 0: + return max(min_lr_ratio, 1.0) + + progress = (current_step - num_warmup_steps - num_stable_steps) / decay_steps + progress = min(progress, 1.0) # Clamp to [0, 1] + + if decay_type == "cosine": + # Cosine decay + decay_factor = 0.5 * (1.0 + math.cos(math.pi * progress)) + return max(min_lr_ratio, decay_factor) + elif decay_type == "linear": + # Linear decay + decay_factor = 1.0 - progress + return max(min_lr_ratio, decay_factor) + else: + raise ValueError(f"Unknown decay_type: {decay_type}") + + +def get_warmup_stable_decay_schedule( + optimizer: Optimizer, + num_warmup_steps: int, + num_stable_steps: int, + num_training_steps: int, + min_lr_ratio: float = 0.01, + decay_type: str = "cosine", + last_epoch: int = -1, +): + """ + Create a Warmup-Stable-Decay learning rate schedule. + + This scheduler is particularly well-suited for foundation model training as it: + - Provides stable learning during the majority of training + - Doesn't require pre-committing to exact training duration + - Allows for extended training without aggressive decay + + Args: + optimizer: The optimizer for which to schedule the learning rate + num_warmup_steps: Number of steps for warmup phase + num_stable_steps: Number of steps for stable learning rate phase + num_training_steps: Total number of training steps + min_lr_ratio: Minimum learning rate as fraction of peak learning rate + decay_type: Type of decay ("cosine" or "linear") + last_epoch: The index of the last epoch when resuming training + + Returns: + torch.optim.lr_scheduler.LambdaLR with the WSD schedule + """ + lr_lambda = partial( + _get_warmup_stable_decay_lr_lambda, + num_warmup_steps=num_warmup_steps, + num_stable_steps=num_stable_steps, + num_training_steps=num_training_steps, + min_lr_ratio=min_lr_ratio, + decay_type=decay_type, + ) + return LambdaLR(optimizer, lr_lambda, last_epoch=last_epoch) + + +def _get_cosine_schedule_with_warmup_lr_lambda( + current_step: int, + *, + num_warmup_steps: int, + num_training_steps: int, + num_cycles: float = 0.5, + min_lr_ratio: float = 0.0, +): + """Enhanced cosine schedule with configurable minimum learning rate.""" + if current_step < num_warmup_steps: + return float(current_step) / float(max(1, num_warmup_steps)) + + progress = float(current_step - num_warmup_steps) / float( + max(1, num_training_steps - num_warmup_steps) + ) + cosine_factor = 0.5 * (1.0 + math.cos(math.pi * float(num_cycles) * 2.0 * progress)) + return max(min_lr_ratio, cosine_factor) + + +def get_enhanced_cosine_schedule_with_warmup( + optimizer: Optimizer, + num_warmup_steps: int, + num_training_steps: int, + num_cycles: float = 0.5, + min_lr_ratio: float = 0.01, + last_epoch: int = -1, +): + """ + Enhanced cosine schedule with warmup and configurable minimum learning rate. + + Args: + optimizer: The optimizer for which to schedule the learning rate + num_warmup_steps: Number of steps for warmup phase + num_training_steps: Total number of training steps + num_cycles: Number of cosine cycles (0.5 = half cosine) + min_lr_ratio: Minimum learning rate as fraction of peak learning rate + last_epoch: The index of the last epoch when resuming training + """ + lr_lambda = partial( + _get_cosine_schedule_with_warmup_lr_lambda, + num_warmup_steps=num_warmup_steps, + num_training_steps=num_training_steps, + num_cycles=num_cycles, + min_lr_ratio=min_lr_ratio, + ) + return LambdaLR(optimizer, lr_lambda, last_epoch=last_epoch) + + +def _get_cosine_with_restarts_lr_lambda( + current_step: int, + *, + num_warmup_steps: int, + num_training_steps: int, + num_cycles: int = 1, + min_lr_ratio: float = 0.0, +): + """Cosine schedule with hard restarts and configurable minimum learning rate.""" + if current_step < num_warmup_steps: + return float(current_step) / float(max(1, num_warmup_steps)) + + progress = float(current_step - num_warmup_steps) / float( + max(1, num_training_steps - num_warmup_steps) + ) + if progress >= 1.0: + return min_lr_ratio + + cosine_factor = 0.5 * ( + 1.0 + math.cos(math.pi * ((float(num_cycles) * progress) % 1.0)) + ) + return max(min_lr_ratio, cosine_factor) + + +def get_cosine_with_restarts_schedule( + optimizer: Optimizer, + num_warmup_steps: int, + num_training_steps: int, + num_cycles: int = 4, + min_lr_ratio: float = 0.01, + last_epoch: int = -1, +): + """ + Cosine schedule with hard restarts. + + Args: + optimizer: The optimizer for which to schedule the learning rate + num_warmup_steps: Number of steps for warmup phase + num_training_steps: Total number of training steps + num_cycles: Number of restart cycles + min_lr_ratio: Minimum learning rate as fraction of peak learning rate + last_epoch: The index of the last epoch when resuming training + """ + lr_lambda = partial( + _get_cosine_with_restarts_lr_lambda, + num_warmup_steps=num_warmup_steps, + num_training_steps=num_training_steps, + num_cycles=num_cycles, + min_lr_ratio=min_lr_ratio, + ) + return LambdaLR(optimizer, lr_lambda, last_epoch=last_epoch) + + +# Scheduler registry for easy lookup +SCHEDULER_REGISTRY = { + SchedulerType.WARMUP_STABLE_DECAY: get_warmup_stable_decay_schedule, + SchedulerType.COSINE_WITH_WARMUP: get_enhanced_cosine_schedule_with_warmup, + SchedulerType.COSINE_WITH_RESTARTS: get_cosine_with_restarts_schedule, +} + + +def get_scheduler( + scheduler_type: str | SchedulerType, + optimizer: Optimizer, + num_warmup_steps: int, + num_training_steps: int, + scheduler_kwargs: Optional[dict] = None, +): + """ + Unified interface to create learning rate schedulers. + + Args: + scheduler_type: Type of scheduler to create + optimizer: The optimizer to schedule + num_warmup_steps: Number of warmup steps + num_training_steps: Total training steps + scheduler_kwargs: Additional scheduler-specific parameters + + Returns: + Configured learning rate scheduler + """ + if isinstance(scheduler_type, str): + scheduler_type = SchedulerType(scheduler_type) + + if scheduler_kwargs is None: + scheduler_kwargs = {} + + if scheduler_type not in SCHEDULER_REGISTRY: + raise ValueError(f"Unsupported scheduler type: {scheduler_type}") + + scheduler_func = SCHEDULER_REGISTRY[scheduler_type] + return scheduler_func( + optimizer=optimizer, + num_warmup_steps=num_warmup_steps, + num_training_steps=num_training_steps, + **scheduler_kwargs, + ) + + +class WarmupStableDecayScheduler: + """ + Alternative implementation as a standalone scheduler class. + + This provides more flexibility and better state management for + complex training scenarios with checkpointing. + """ + + def __init__( + self, + optimizer: Optimizer, + num_warmup_steps: int, + num_stable_steps: int, + total_steps: int, + min_lr_ratio: float = 0.01, + decay_type: str = "cosine", + verbose: bool = False, + ): + self.optimizer = optimizer + self.num_warmup_steps = num_warmup_steps + self.num_stable_steps = num_stable_steps + self.total_steps = total_steps + self.min_lr_ratio = min_lr_ratio + self.decay_type = decay_type + self.verbose = verbose + + # Store initial learning rates + self.base_lrs = [group["lr"] for group in optimizer.param_groups] + self.current_step = 0 + + def get_lr_factor(self, step: int) -> float: + """Calculate the learning rate multiplication factor for given step.""" + if step < self.num_warmup_steps: + # Warmup phase + return step / max(1, self.num_warmup_steps) + elif step < self.num_warmup_steps + self.num_stable_steps: + # Stable phase + return 1.0 + else: + # Decay phase + decay_steps = ( + self.total_steps - self.num_warmup_steps - self.num_stable_steps + ) + if decay_steps <= 0: + return max(self.min_lr_ratio, 1.0) + + progress = ( + step - self.num_warmup_steps - self.num_stable_steps + ) / decay_steps + progress = min(progress, 1.0) + + if self.decay_type == "cosine": + decay_factor = 0.5 * (1.0 + math.cos(math.pi * progress)) + elif self.decay_type == "linear": + decay_factor = 1.0 - progress + else: + raise ValueError(f"Unknown decay_type: {self.decay_type}") + + return max(self.min_lr_ratio, decay_factor) + + def step(self): + """Update learning rates for all parameter groups.""" + lr_factor = self.get_lr_factor(self.current_step) + + for param_group, base_lr in zip(self.optimizer.param_groups, self.base_lrs): + param_group["lr"] = base_lr * lr_factor + + if self.verbose and self.current_step % 1000 == 0: + phase = self.get_phase() + print( + f"Step {self.current_step}: LR factor = {lr_factor:.6f}, Phase = {phase}" + ) + + self.current_step += 1 + + def get_phase(self) -> str: + """Get current training phase.""" + if self.current_step < self.num_warmup_steps: + return "warmup" + elif self.current_step < self.num_warmup_steps + self.num_stable_steps: + return "stable" + else: + return "decay" + + def state_dict(self) -> dict: + """Return scheduler state for checkpointing.""" + return { + "current_step": self.current_step, + "base_lrs": self.base_lrs, + } + + def load_state_dict(self, state_dict: dict): + """Load scheduler state from checkpoint.""" + self.current_step = state_dict["current_step"] + self.base_lrs = state_dict["base_lrs"] diff --git a/src/plotting/__init__.py b/src/plotting/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/plotting/gift_eval_utils.py b/src/plotting/gift_eval_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b6049797b8a777c483526014d69a385eede9a642 --- /dev/null +++ b/src/plotting/gift_eval_utils.py @@ -0,0 +1,215 @@ +import logging +from typing import List, Optional, Tuple + +import numpy as np +import pandas as pd +from gluonts.model.forecast import QuantileForecast + +from src.data.frequency import parse_frequency +from src.plotting.plot_timeseries import ( + plot_multivariate_timeseries, +) + +logger = logging.getLogger(__name__) + + +def _prepare_data_for_plotting( + input_data: dict, label_data: dict, max_context_length: int +): + history_values = np.asarray(input_data["target"], dtype=np.float32) + future_values = np.asarray(label_data["target"], dtype=np.float32) + start_period = input_data["start"] + + def ensure_time_first(arr: np.ndarray) -> np.ndarray: + if arr.ndim == 1: + return arr.reshape(-1, 1) + elif arr.ndim == 2: + if arr.shape[0] < arr.shape[1]: + return arr.T + return arr + else: + return arr.reshape(arr.shape[-1], -1).T + + history_values = ensure_time_first(history_values) + future_values = ensure_time_first(future_values) + + if max_context_length is not None and history_values.shape[0] > max_context_length: + history_values = history_values[-max_context_length:] + + # Convert Period to Timestamp if needed + start_timestamp = ( + start_period.to_timestamp() + if hasattr(start_period, "to_timestamp") + else pd.Timestamp(start_period) + ) + return history_values, future_values, start_timestamp + + +def _extract_quantile_predictions( + forecast, +) -> Tuple[Optional[np.ndarray], Optional[np.ndarray], Optional[np.ndarray]]: + def ensure_2d_time_first(arr): + if arr is None: + return None + arr = np.asarray(arr) + if arr.ndim == 1: + return arr.reshape(-1, 1) + elif arr.ndim == 2: + return arr + else: + return arr.reshape(arr.shape[0], -1) + + if isinstance(forecast, QuantileForecast): + try: + median_pred = forecast.quantile(0.5) + try: + lower_bound = forecast.quantile(0.1) + upper_bound = forecast.quantile(0.9) + except (KeyError, ValueError): + lower_bound = None + upper_bound = None + median_pred = ensure_2d_time_first(median_pred) + lower_bound = ensure_2d_time_first(lower_bound) + upper_bound = ensure_2d_time_first(upper_bound) + return median_pred, lower_bound, upper_bound + except Exception: + try: + median_pred = forecast.quantile(0.5) + median_pred = ensure_2d_time_first(median_pred) + return median_pred, None, None + except Exception: + return None, None, None + else: + try: + samples = forecast.samples + if samples.ndim == 1: + median_pred = samples + elif samples.ndim == 2: + if samples.shape[0] == 1: + median_pred = samples[0] + else: + median_pred = np.median(samples, axis=0) + elif samples.ndim == 3: + median_pred = np.median(samples, axis=0) + else: + median_pred = samples[0] if len(samples) > 0 else samples + median_pred = ensure_2d_time_first(median_pred) + return median_pred, None, None + except Exception: + return None, None, None + + +def _create_plot( + input_data: dict, + label_data: dict, + forecast, + dataset_full_name: str, + dataset_freq: str, + max_context_length: int, + title: Optional[str] = None, +): + try: + history_values, future_values, start_timestamp = _prepare_data_for_plotting( + input_data, label_data, max_context_length + ) + median_pred, lower_bound, upper_bound = _extract_quantile_predictions(forecast) + if median_pred is None: + logger.warning(f"Could not extract predictions for {dataset_full_name}") + return None + + def ensure_compatible_shape(pred_arr, target_arr): + if pred_arr is None: + return None + pred_arr = np.asarray(pred_arr) + target_arr = np.asarray(target_arr) + if pred_arr.ndim == 1: + pred_arr = pred_arr.reshape(-1, 1) + if target_arr.ndim == 1: + target_arr = target_arr.reshape(-1, 1) + if pred_arr.shape != target_arr.shape: + if pred_arr.shape[0] == target_arr.shape[0]: + if pred_arr.shape[1] == 1 and target_arr.shape[1] > 1: + pred_arr = np.broadcast_to(pred_arr, target_arr.shape) + elif pred_arr.shape[1] > 1 and target_arr.shape[1] == 1: + pred_arr = pred_arr[:, :1] + elif pred_arr.shape[1] == target_arr.shape[1]: + min_time = min(pred_arr.shape[0], target_arr.shape[0]) + pred_arr = pred_arr[:min_time] + else: + if pred_arr.T.shape == target_arr.shape: + pred_arr = pred_arr.T + else: + if pred_arr.size >= target_arr.shape[0]: + pred_arr = pred_arr.flatten()[ + : target_arr.shape[0] + ].reshape(-1, 1) + if target_arr.shape[1] > 1: + pred_arr = np.broadcast_to(pred_arr, target_arr.shape) + return pred_arr + + median_pred = ensure_compatible_shape(median_pred, future_values) + lower_bound = ensure_compatible_shape(lower_bound, future_values) + upper_bound = ensure_compatible_shape(upper_bound, future_values) + + title = title or f"GIFT-Eval: {dataset_full_name}" + frequency = parse_frequency(dataset_freq) + fig = plot_multivariate_timeseries( + history_values=history_values, + future_values=future_values, + predicted_values=median_pred, + lower_bound=lower_bound, + upper_bound=upper_bound, + start=start_timestamp, + frequency=frequency, + title=title, + show=False, + ) + return fig + except Exception as e: + logger.warning(f"Failed to create plot for {dataset_full_name}: {e}") + return None + + +def create_plots_for_dataset( + forecasts: List, + test_data, + dataset_metadata, + max_plots: int, + max_context_length: int, +) -> List[Tuple[object, str]]: + input_data_list = list(test_data.input) + label_data_list = list(test_data.label) + num_plots = min(len(forecasts), max_plots) + logger.info( + f"Creating {num_plots} plots for {getattr(dataset_metadata, 'full_name', str(dataset_metadata))}" + ) + + figures_with_names: List[Tuple[object, str]] = [] + for i in range(num_plots): + try: + forecast = forecasts[i] + input_data = input_data_list[i] + label_data = label_data_list[i] + title = ( + f"GIFT-Eval: {dataset_metadata.full_name} - Window {i + 1}/{num_plots}" + if hasattr(dataset_metadata, "full_name") + else f"Window {i + 1}/{num_plots}" + ) + fig = _create_plot( + input_data=input_data, + label_data=label_data, + forecast=forecast, + dataset_full_name=getattr(dataset_metadata, "full_name", "dataset"), + dataset_freq=getattr(dataset_metadata, "freq", "D"), + max_context_length=max_context_length, + title=title, + ) + if fig is not None: + filename = ( + f"{getattr(dataset_metadata, 'freq', 'D')}_window_{i + 1:03d}.png" + ) + figures_with_names.append((fig, filename)) + except Exception as e: + logger.warning(f"Error creating plot for window {i + 1}: {e}") + continue + return figures_with_names diff --git a/src/plotting/plot_timeseries.py b/src/plotting/plot_timeseries.py new file mode 100644 index 0000000000000000000000000000000000000000..275cc00dc80727f536a1a3deeed9a364eebad164 --- /dev/null +++ b/src/plotting/plot_timeseries.py @@ -0,0 +1,292 @@ +import logging +from typing import List, Optional, Tuple, Union + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import torch +import torchmetrics +from matplotlib.figure import Figure + +from src.data.containers import BatchTimeSeriesContainer +from src.data.frequency import Frequency + +logger = logging.getLogger(__name__) + + +def calculate_smape(y_true: np.ndarray, y_pred: np.ndarray) -> float: + """Calculate Symmetric Mean Absolute Percentage Error (SMAPE).""" + pred_tensor = torch.from_numpy(y_pred).float() + true_tensor = torch.from_numpy(y_true).float() + return torchmetrics.SymmetricMeanAbsolutePercentageError()( + pred_tensor, true_tensor + ).item() + + +def _create_date_ranges( + start: Optional[Union[np.datetime64, pd.Timestamp]], + frequency: Optional[Union[Frequency, str]], + history_length: int, + prediction_length: int, +) -> Tuple[pd.DatetimeIndex, pd.DatetimeIndex]: + """Create date ranges for history and future periods.""" + if start is not None and frequency is not None: + start_timestamp = pd.Timestamp(start) + pandas_freq = frequency.to_pandas_freq(for_date_range=True) + + history_dates = pd.date_range( + start=start_timestamp, periods=history_length, freq=pandas_freq + ) + + if prediction_length > 0: + next_timestamp = history_dates[-1] + pd.tseries.frequencies.to_offset( + pandas_freq + ) + future_dates = pd.date_range( + start=next_timestamp, periods=prediction_length, freq=pandas_freq + ) + else: + future_dates = pd.DatetimeIndex([]) + else: + # Fallback to default daily frequency + history_dates = pd.date_range( + end=pd.Timestamp.now(), periods=history_length, freq="D" + ) + + if prediction_length > 0: + future_dates = pd.date_range( + start=history_dates[-1] + pd.Timedelta(days=1), + periods=prediction_length, + freq="D", + ) + else: + future_dates = pd.DatetimeIndex([]) + + return history_dates, future_dates + + +def _plot_single_channel( + ax: plt.Axes, + channel_idx: int, + history_dates: pd.DatetimeIndex, + future_dates: pd.DatetimeIndex, + history_values: np.ndarray, + future_values: Optional[np.ndarray] = None, + predicted_values: Optional[np.ndarray] = None, + lower_bound: Optional[np.ndarray] = None, + upper_bound: Optional[np.ndarray] = None, +) -> None: + """Plot a single channel's time series data.""" + # Plot history + ax.plot( + history_dates, history_values[:, channel_idx], color="black", label="History" + ) + + # Plot ground truth future + if future_values is not None: + ax.plot( + future_dates, + future_values[:, channel_idx], + color="blue", + label="Ground Truth", + ) + + # Plot predictions + if predicted_values is not None: + ax.plot( + future_dates, + predicted_values[:, channel_idx], + color="orange", + linestyle="--", + label="Prediction (Median)", + ) + + # Plot uncertainty band + if lower_bound is not None and upper_bound is not None: + ax.fill_between( + future_dates, + lower_bound[:, channel_idx], + upper_bound[:, channel_idx], + color="orange", + alpha=0.2, + label="Uncertainty Band", + ) + + ax.set_title(f"Channel {channel_idx + 1}") + ax.grid(True, which="both", linestyle="--", linewidth=0.5) + + +def _setup_figure(num_channels: int) -> Tuple[Figure, List[plt.Axes]]: + """Create and configure the matplotlib figure and axes.""" + fig, axes = plt.subplots( + num_channels, 1, figsize=(15, 3 * num_channels), sharex=True + ) + if num_channels == 1: + axes = [axes] + return fig, axes + + +def _finalize_plot( + fig: Figure, + axes: List[plt.Axes], + title: Optional[str] = None, + smape_value: Optional[float] = None, + output_file: Optional[str] = None, + show: bool = True, +) -> None: + """Add legend, title, and save/show the plot.""" + # Create legend from first axis + handles, labels = axes[0].get_legend_handles_labels() + fig.legend(handles, labels, loc="upper right") + + # Set title with optional SMAPE + if title: + if smape_value is not None: + title = f"{title} | SMAPE: {smape_value:.4f}" + fig.suptitle(title, fontsize=16) + + # Adjust layout + plt.tight_layout(rect=[0, 0.03, 1, 0.95] if title else None) + + # Save and/or show + if output_file: + plt.savefig(output_file, dpi=300) + if show: + plt.show() + else: + plt.close(fig) + + +def plot_multivariate_timeseries( + history_values: np.ndarray, + future_values: Optional[np.ndarray] = None, + predicted_values: Optional[np.ndarray] = None, + start: Optional[Union[np.datetime64, pd.Timestamp]] = None, + frequency: Optional[Union[Frequency, str]] = None, + title: Optional[str] = None, + output_file: Optional[str] = None, + show: bool = True, + lower_bound: Optional[np.ndarray] = None, + upper_bound: Optional[np.ndarray] = None, +) -> Figure: + """Plot a multivariate time series with history, future, predictions, and uncertainty bands.""" + # Calculate SMAPE if both predicted and true values are available + smape_value = None + if predicted_values is not None and future_values is not None: + try: + smape_value = calculate_smape(future_values, predicted_values) + except Exception as e: + logger.warning(f"Failed to calculate SMAPE: {str(e)}") + + # Extract dimensions + num_channels = history_values.shape[1] + history_length = history_values.shape[0] + prediction_length = ( + predicted_values.shape[0] + if predicted_values is not None + else (future_values.shape[0] if future_values is not None else 0) + ) + + # Create date ranges + history_dates, future_dates = _create_date_ranges( + start, frequency, history_length, prediction_length + ) + + # Setup figure + fig, axes = _setup_figure(num_channels) + + # Plot each channel + for i in range(num_channels): + _plot_single_channel( + ax=axes[i], + channel_idx=i, + history_dates=history_dates, + future_dates=future_dates, + history_values=history_values, + future_values=future_values, + predicted_values=predicted_values, + lower_bound=lower_bound, + upper_bound=upper_bound, + ) + + # Finalize plot + _finalize_plot(fig, axes, title, smape_value, output_file, show) + + return fig + + +def _extract_quantile_predictions( + predicted_values: np.ndarray, + model_quantiles: List[float], +) -> Tuple[Optional[np.ndarray], Optional[np.ndarray], Optional[np.ndarray]]: + """Extract median, lower, and upper bound predictions from quantile output.""" + try: + median_idx = model_quantiles.index(0.5) + lower_idx = model_quantiles.index(0.1) + upper_idx = model_quantiles.index(0.9) + + median_preds = predicted_values[..., median_idx] + lower_bound = predicted_values[..., lower_idx] + upper_bound = predicted_values[..., upper_idx] + + return median_preds, lower_bound, upper_bound + except (ValueError, IndexError): + logger.warning( + "Could not find 0.1, 0.5, 0.9 quantiles for plotting. Using median of available quantiles." + ) + median_preds = predicted_values[..., predicted_values.shape[-1] // 2] + return median_preds, None, None + + +def plot_from_container( + batch: BatchTimeSeriesContainer, + sample_idx: int, + predicted_values: Optional[np.ndarray] = None, + model_quantiles: Optional[List[float]] = None, + title: Optional[str] = None, + output_file: Optional[str] = None, + show: bool = True, +) -> Figure: + """Plot a single sample from a BatchTimeSeriesContainer with proper quantile handling.""" + # Extract data for the specific sample + history_values = batch.history_values[sample_idx].cpu().numpy() + future_values = batch.future_values[sample_idx].cpu().numpy() + + # Process predictions + if predicted_values is not None: + # Handle batch vs single sample predictions + if predicted_values.ndim >= 3 or ( + predicted_values.ndim == 2 + and predicted_values.shape[0] > future_values.shape[0] + ): + sample_preds = predicted_values[sample_idx] + else: + sample_preds = predicted_values + + # Extract quantile information if available + if model_quantiles: + median_preds, lower_bound, upper_bound = _extract_quantile_predictions( + sample_preds, model_quantiles + ) + else: + median_preds = sample_preds + lower_bound = None + upper_bound = None + else: + median_preds = None + lower_bound = None + upper_bound = None + + # Create the plot + return plot_multivariate_timeseries( + history_values=history_values, + future_values=future_values, + predicted_values=median_preds, + start=batch.start[sample_idx], + frequency=batch.frequency[sample_idx], + title=title, + output_file=output_file, + show=show, + lower_bound=lower_bound, + upper_bound=upper_bound, + ) diff --git a/src/synthetic_generation/__init__.py b/src/synthetic_generation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/synthetic_generation/abstract_classes.py b/src/synthetic_generation/abstract_classes.py new file mode 100644 index 0000000000000000000000000000000000000000..129e9832a25d2a162a8fcb0474ea9f52fc88046f --- /dev/null +++ b/src/synthetic_generation/abstract_classes.py @@ -0,0 +1,97 @@ +from abc import ABC, abstractmethod +from typing import Any, Dict, Optional + +import numpy as np +import torch + +from src.data.containers import TimeSeriesContainer +from src.data.frequency import ( + select_safe_random_frequency, + select_safe_start_date, +) +from src.synthetic_generation.generator_params import GeneratorParams + + +class AbstractTimeSeriesGenerator(ABC): + """ + Abstract base class for synthetic time series generators. + """ + + @abstractmethod + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + """ + Generate synthetic time series data. + + Parameters + ---------- + random_seed : int, optional + Random seed for reproducibility. + + Returns + ------- + np.ndarray + Time series values of shape (length,) for univariate or + (length, num_channels) for multivariate time series. + """ + pass + + +class GeneratorWrapper: + """ + Unified base class for all generator wrappers, using a GeneratorParams dataclass + for configuration. Provides parameter sampling, validation, and batch formatting utilities. + """ + + def __init__(self, params: GeneratorParams): + """ + Initialize the GeneratorWrapper with a GeneratorParams dataclass. + + Parameters + ---------- + params : GeneratorParams + Dataclass instance containing all generator configuration parameters. + """ + self.params = params + self._set_random_seeds(self.params.global_seed) + + def _set_random_seeds(self, seed: int) -> None: + # For parameter sampling, we want diversity across batches even with similar seeds + # Use a hash of the generator class name to ensure different generators get different parameter sequences + param_seed = seed + hash(self.__class__.__name__) % 2**31 + self.rng = np.random.default_rng(param_seed) + + # Set global numpy and torch seeds for deterministic behavior in underlying generators + np.random.seed(seed) + torch.manual_seed(seed) + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + """ + Sample parameters with total_length fixed and history_length calculated. + + Returns + ------- + Dict[str, Any] + Dictionary containing sampled parameter values where + history_length = total_length - future_length. + """ + + # Select a suitable frequency based on the total length + frequency = [ + select_safe_random_frequency(self.params.length, self.rng) + for _ in range(batch_size) + ] + start = [ + select_safe_start_date(self.params.length, frequency[i], self.rng) + for i in range(batch_size) + ] + + return { + "frequency": frequency, + "start": start, + } + + @abstractmethod + def generate_batch( + self, batch_size: int, seed: Optional[int] = None, **kwargs + ) -> TimeSeriesContainer: + raise NotImplementedError("Subclasses must implement generate_batch()") diff --git a/src/synthetic_generation/anomalies/anomaly_generator.py b/src/synthetic_generation/anomalies/anomaly_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..daa81aa5f24f667256eb2359e93f844a60aa88b2 --- /dev/null +++ b/src/synthetic_generation/anomalies/anomaly_generator.py @@ -0,0 +1,293 @@ +from typing import List, Optional, Set + +import numpy as np + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.generator_params import ( + AnomalyGeneratorParams, + AnomalyType, + MagnitudePattern, +) + + +class AnomalyGenerator(AbstractTimeSeriesGenerator): + """ + Generator for synthetic time series with realistic spike anomalies. + + Creates clean constant baseline signals with periodic spike patterns that + resemble real-world time series behavior, including clustering and magnitude patterns. + """ + + def __init__(self, params: AnomalyGeneratorParams): + """ + Initialize the AnomalyGenerator. + + Parameters + ---------- + params : AnomalyGeneratorParams + Configuration parameters for anomaly generation. + """ + self.params = params + + def _determine_spike_direction(self) -> AnomalyType: + """ + Determine if this series will have only up or only down spikes. + + Returns + ------- + AnomalyType + Either SPIKE_UP or SPIKE_DOWN for the entire series. + """ + if np.random.random() < self.params.spike_direction_probability: + return AnomalyType.SPIKE_UP + else: + return AnomalyType.SPIKE_DOWN + + def _generate_spike_positions(self) -> List[List[int]]: + """ + Generate spike positions: + - Always create uniformly spaced single spikes (base schedule) + - With 25% probability: add clusters (1-3 extra spikes) near a fraction of base spikes + - With 25% probability: add single random spikes across the series + + Returns + ------- + List[List[int]] + List of spike events, where each event is a list of positions + (single spike = [pos], cluster = [pos, pos+offset, ...]). + """ + # Base uniform schedule (no jitter/variance) + base_period = np.random.randint(*self.params.base_period_range) + start_position = base_period // 2 + base_positions = list(range(start_position, self.params.length, base_period)) + + # Start with single-spike events at base positions + spike_events: List[List[int]] = [[pos] for pos in base_positions] + + if not base_positions: + return spike_events + + # Decide series type + series_draw = np.random.random() + + # 25%: augment with clusters near some base spikes + if series_draw < self.params.cluster_series_probability: + num_base_events = len(base_positions) + num_to_augment = max( + 1, int(round(self.params.cluster_event_fraction * num_base_events)) + ) + num_to_augment = min(num_to_augment, num_base_events) + + chosen_indices = ( + np.random.choice(num_base_events, size=num_to_augment, replace=False) + if num_to_augment > 0 + else np.array([], dtype=int) + ) + + for idx in chosen_indices: + base_pos = base_positions[int(idx)] + # Number of additional spikes (1..3) per selected event + num_additional = np.random.randint( + *self.params.cluster_additional_spikes_range + ) + if num_additional <= 0: + continue + + # Draw offsets around base spike and exclude zero to avoid duplicates + offsets = np.random.randint( + self.params.cluster_offset_range[0], + self.params.cluster_offset_range[1], + size=num_additional, + ) + offsets = [int(off) for off in offsets if off != 0] + + cluster_positions: Set[int] = set([base_pos]) + for off in offsets: + pos = base_pos + off + if 0 <= pos < self.params.length: + cluster_positions.add(pos) + + spike_events[int(idx)] = sorted(cluster_positions) + + # Next 25%: add random single spikes across the series + elif series_draw < ( + self.params.cluster_series_probability + + self.params.random_series_probability + ): + num_base_events = len(base_positions) + num_random = int( + round(self.params.random_spike_fraction_of_base * num_base_events) + ) + if num_random > 0: + all_indices = np.arange(self.params.length) + base_array = np.array(base_positions, dtype=int) + candidates = np.setdiff1d(all_indices, base_array, assume_unique=False) + if candidates.size > 0: + choose_n = min(num_random, candidates.size) + rand_positions = np.random.choice( + candidates, size=choose_n, replace=False + ) + for pos in rand_positions: + spike_events.append([int(pos)]) + + # Else: 50% clean series (uniform singles only) + + return spike_events + + def _generate_spike_magnitudes(self, total_spikes: int) -> np.ndarray: + """ + Generate spike magnitudes based on the configured pattern. + + Parameters + ---------- + total_spikes : int + Total number of individual spikes to generate magnitudes for. + + Returns + ------- + np.ndarray + Array of spike magnitudes. + """ + base_magnitude = np.random.uniform(*self.params.base_magnitude_range) + magnitudes = np.zeros(total_spikes) + + if self.params.magnitude_pattern == MagnitudePattern.CONSTANT: + # All spikes have similar magnitude with small noise + magnitudes = np.full(total_spikes, base_magnitude) + noise = np.random.normal( + 0, self.params.magnitude_noise * base_magnitude, total_spikes + ) + magnitudes += noise + + elif self.params.magnitude_pattern == MagnitudePattern.INCREASING: + # Magnitude increases over time + trend = np.linspace( + 0, + self.params.magnitude_trend_strength * base_magnitude * total_spikes, + total_spikes, + ) + magnitudes = base_magnitude + trend + + elif self.params.magnitude_pattern == MagnitudePattern.DECREASING: + # Magnitude decreases over time + trend = np.linspace( + 0, + -self.params.magnitude_trend_strength * base_magnitude * total_spikes, + total_spikes, + ) + magnitudes = base_magnitude + trend + + elif self.params.magnitude_pattern == MagnitudePattern.CYCLICAL: + # Cyclical magnitude pattern + cycle_length = int(total_spikes * self.params.cyclical_period_ratio) + if cycle_length == 0: + cycle_length = max(1, total_spikes // 4) + + phase = np.linspace( + 0, 2 * np.pi * total_spikes / cycle_length, total_spikes + ) + cyclical_component = 0.3 * base_magnitude * np.sin(phase) + magnitudes = base_magnitude + cyclical_component + + elif self.params.magnitude_pattern == MagnitudePattern.RANDOM_BOUNDED: + # Random with correlation between consecutive spikes + magnitudes[0] = base_magnitude + + for i in range(1, total_spikes): + # Correlated random walk + prev_magnitude = magnitudes[i - 1] + random_component = np.random.normal(0, 0.2 * base_magnitude) + + magnitudes[i] = ( + self.params.magnitude_correlation * prev_magnitude + + (1 - self.params.magnitude_correlation) * base_magnitude + + random_component + ) + + # Add noise to all patterns + noise = np.random.normal( + 0, self.params.magnitude_noise * base_magnitude, total_spikes + ) + magnitudes += noise + + # Ensure magnitudes are positive and within reasonable bounds + min_magnitude = 0.1 * base_magnitude + max_magnitude = 3.0 * base_magnitude + magnitudes = np.clip(magnitudes, min_magnitude, max_magnitude) + + return magnitudes + + def _inject_spike_anomalies( + self, signal: np.ndarray, spike_direction: AnomalyType + ) -> np.ndarray: + """ + Inject spike anomalies into the clean signal using realistic patterns. + + Parameters + ---------- + signal : np.ndarray + Clean baseline signal to inject spikes into. + spike_direction : AnomalyType + Direction of spikes for this series (all up or all down). + + Returns + ------- + np.ndarray + Signal with injected spike anomalies. + """ + anomalous_signal = signal.copy() + + # Generate spike positions based on pattern + spike_events = self._generate_spike_positions() + + # Flatten spike events to get total number of individual spikes + all_positions = [] + for event in spike_events: + all_positions.extend(event) + + if not all_positions: + return anomalous_signal + + # Generate magnitudes for all spikes + magnitudes = self._generate_spike_magnitudes(len(all_positions)) + + # Inject spikes + for i, position in enumerate(all_positions): + if position < len(anomalous_signal): + magnitude = magnitudes[i] + + if spike_direction == AnomalyType.SPIKE_UP: + anomalous_signal[position] += magnitude + else: # SPIKE_DOWN + anomalous_signal[position] -= magnitude + + return anomalous_signal + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + """ + Generate a synthetic time series with realistic spike anomalies. + + Parameters + ---------- + random_seed : int, optional + Random seed for reproducibility. + + Returns + ------- + np.ndarray + Generated time series of shape (length,) - clean baseline with periodic spikes. + """ + if random_seed is not None: + np.random.seed(random_seed) + + # Generate clean baseline signal (constant level) + baseline_level = np.random.uniform(*self.params.base_level_range) + signal = np.full(self.params.length, baseline_level) + + # Determine spike direction for this series (all up or all down) + spike_direction = self._determine_spike_direction() + + # Inject spike anomalies with realistic patterns + anomalous_signal = self._inject_spike_anomalies(signal, spike_direction) + + return anomalous_signal diff --git a/src/synthetic_generation/anomalies/anomaly_generator_wrapper.py b/src/synthetic_generation/anomalies/anomaly_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..97ec66bdb8fd1c917f1234a8485ab10bc7fe275b --- /dev/null +++ b/src/synthetic_generation/anomalies/anomaly_generator_wrapper.py @@ -0,0 +1,64 @@ +from typing import Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.anomalies.anomaly_generator import AnomalyGenerator +from src.synthetic_generation.generator_params import AnomalyGeneratorParams + + +class AnomalyGeneratorWrapper(GeneratorWrapper): + """ + Wrapper for AnomalyGenerator that handles batch generation and formatting. + """ + + def __init__(self, params: AnomalyGeneratorParams): + """ + Initialize the AnomalyGeneratorWrapper. + + Parameters + ---------- + params : AnomalyGeneratorParams + Parameters for the anomaly generator. + """ + super().__init__(params) + self.generator = AnomalyGenerator(params) + + def generate_batch( + self, batch_size: int, seed: Optional[int] = None + ) -> TimeSeriesContainer: + """ + Generate a batch of anomaly time series. + + Parameters + ---------- + batch_size : int + Number of time series to generate. + seed : int, optional + Random seed for reproducibility. + + Returns + ------- + TimeSeriesContainer + TimeSeriesContainer containing the generated time series. + """ + if seed is not None: + self._set_random_seeds(seed) + + # Sample parameters for the batch + sampled_params = self._sample_parameters(batch_size) + + # Generate time series + values = [] + for i in range(batch_size): + # Use a different seed for each series in the batch + series_seed = (seed + i) if seed is not None else None + series = self.generator.generate_time_series(series_seed) + values.append(series) + + return TimeSeriesContainer( + values=np.array(values), + start=sampled_params["start"], + frequency=sampled_params["frequency"], + ) diff --git a/src/synthetic_generation/audio_generators/financial_volatility_generator.py b/src/synthetic_generation/audio_generators/financial_volatility_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..69f002b6f97b1890d4ded82f26f72b5ea0fa30a1 --- /dev/null +++ b/src/synthetic_generation/audio_generators/financial_volatility_generator.py @@ -0,0 +1,103 @@ +from typing import Optional + +import numpy as np +from pyo import LFO, BrownNoise, Follower, Metro, Mix, Sine, TrigExpseg + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.audio_generators.utils import ( + normalize_waveform, + run_offline_pyo, +) + + +class FinancialVolatilityAudioGenerator(AbstractTimeSeriesGenerator): + """ + Generate synthetic univariate time series that mimics financial market + behavior with volatility clustering and occasional jumps. + """ + + def __init__( + self, + length: int, + server_duration: float, + sample_rate: int, + normalize_output: bool, + # Trend LFO + trend_lfo_freq_range: tuple[float, float], + trend_lfo_mul_range: tuple[float, float], + # Volatility clustering + volatility_carrier_freq_range: tuple[float, float], + follower_freq_range: tuple[float, float], + volatility_range: tuple[float, float], + # Jumps + jump_metro_time_range: tuple[float, float], + jump_env_start_range: tuple[float, float], + jump_env_decay_time_range: tuple[float, float], + jump_freq_range: tuple[float, float], + jump_direction_up_probability: float, + random_seed: Optional[int] = None, + ): + self.length = length + self.server_duration = server_duration + self.sample_rate = sample_rate + self.normalize_output = normalize_output + + self.trend_lfo_freq_range = trend_lfo_freq_range + self.trend_lfo_mul_range = trend_lfo_mul_range + self.volatility_carrier_freq_range = volatility_carrier_freq_range + self.follower_freq_range = follower_freq_range + self.volatility_range = volatility_range + self.jump_metro_time_range = jump_metro_time_range + self.jump_env_start_range = jump_env_start_range + self.jump_env_decay_time_range = jump_env_decay_time_range + self.jump_freq_range = jump_freq_range + self.jump_direction_up_probability = jump_direction_up_probability + + self.rng = np.random.default_rng(random_seed) + + def _build_synth(self): + # Trend + trend_freq = self.rng.uniform(*self.trend_lfo_freq_range) + trend_mul = self.rng.uniform(*self.trend_lfo_mul_range) + trend = LFO(freq=trend_freq, type=0, mul=trend_mul) + + # Volatility clustering + carrier_freq = self.rng.uniform(*self.volatility_carrier_freq_range) + follower_freq = self.rng.uniform(*self.follower_freq_range) + volatility_min, volatility_max = self.volatility_range + volatility_osc = Sine(freq=carrier_freq) + volatility = Follower(volatility_osc, freq=follower_freq).range( + volatility_min, volatility_max + ) + market_noise = BrownNoise(mul=volatility) + + # Jumps + jump_time = self.rng.uniform(*self.jump_metro_time_range) + jump_env_start = self.rng.uniform(*self.jump_env_start_range) + jump_env_decay = self.rng.uniform(*self.jump_env_decay_time_range) + jump_freq = self.rng.uniform(*self.jump_freq_range) + direction = ( + 1.0 if self.rng.random() < self.jump_direction_up_probability else -1.0 + ) + + jump_trigger = Metro(time=jump_time).play() + jump_env = TrigExpseg( + jump_trigger, list=[(0.0, jump_env_start), (jump_env_decay, 0.0)] + ) + jumps = Sine(freq=jump_freq, mul=jump_env * direction) + + return Mix([trend, market_noise, jumps], voices=1) + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + waveform = run_offline_pyo( + synth_builder=self._build_synth, + server_duration=self.server_duration, + sample_rate=self.sample_rate, + length=self.length, + ) + if self.normalize_output: + waveform = normalize_waveform(waveform) + return waveform diff --git a/src/synthetic_generation/audio_generators/financial_volatility_wrapper.py b/src/synthetic_generation/audio_generators/financial_volatility_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..f12a7221212b3604ca3bcc2efcfcb453e1ab60c7 --- /dev/null +++ b/src/synthetic_generation/audio_generators/financial_volatility_wrapper.py @@ -0,0 +1,91 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.audio_generators.financial_volatility_generator import ( + FinancialVolatilityAudioGenerator, +) +from src.synthetic_generation.generator_params import FinancialVolatilityAudioParams + + +class FinancialVolatilityAudioWrapper(GeneratorWrapper): + def __init__(self, params: FinancialVolatilityAudioParams): + super().__init__(params) + self.params: FinancialVolatilityAudioParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + params = super()._sample_parameters(batch_size) + params.update( + { + "length": self.params.length, + "server_duration": self.params.server_duration, + "sample_rate": self.params.sample_rate, + "normalize_output": self.params.normalize_output, + # Trend LFO + "trend_lfo_freq_range": self.params.trend_lfo_freq_range, + "trend_lfo_mul_range": self.params.trend_lfo_mul_range, + # Volatility clustering + "volatility_carrier_freq_range": self.params.volatility_carrier_freq_range, + "follower_freq_range": self.params.follower_freq_range, + "volatility_range": self.params.volatility_range, + # Jumps + "jump_metro_time_range": self.params.jump_metro_time_range, + "jump_env_start_range": self.params.jump_env_start_range, + "jump_env_decay_time_range": self.params.jump_env_decay_time_range, + "jump_freq_range": self.params.jump_freq_range, + "jump_direction_up_probability": self.params.jump_direction_up_probability, + } + ) + return params + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = FinancialVolatilityAudioGenerator( + length=params["length"], + server_duration=params["server_duration"], + sample_rate=params["sample_rate"], + normalize_output=params["normalize_output"], + trend_lfo_freq_range=params["trend_lfo_freq_range"], + trend_lfo_mul_range=params["trend_lfo_mul_range"], + volatility_carrier_freq_range=params["volatility_carrier_freq_range"], + follower_freq_range=params["follower_freq_range"], + volatility_range=params["volatility_range"], + jump_metro_time_range=params["jump_metro_time_range"], + jump_env_start_range=params["jump_env_start_range"], + jump_env_decay_time_range=params["jump_env_decay_time_range"], + jump_freq_range=params["jump_freq_range"], + jump_direction_up_probability=params["jump_direction_up_probability"], + random_seed=seed, + ) + + def _derive_series_seed(base_seed: int, index: int) -> int: + # Mix base seed with index and class hash to decorrelate adjacent seeds + mixed = ( + (base_seed & 0x7FFFFFFF) + ^ ((index * 0x9E3779B1) & 0x7FFFFFFF) + ^ (hash(self.__class__.__name__) & 0x7FFFFFFF) + ) + return int(mixed) + + batch_values = [] + for i in range(batch_size): + series_seed = None if seed is None else _derive_series_seed(seed, i) + values = generator.generate_time_series(random_seed=series_seed) + batch_values.append(values) + + return TimeSeriesContainer( + values=np.array(batch_values), + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/audio_generators/multi_scale_fractal_generator.py b/src/synthetic_generation/audio_generators/multi_scale_fractal_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..9d4af55082f22fd3eb2e3f8ec1df32504f4f7b6e --- /dev/null +++ b/src/synthetic_generation/audio_generators/multi_scale_fractal_generator.py @@ -0,0 +1,75 @@ +from typing import Optional + +import numpy as np +from pyo import Biquad, BrownNoise, Mix + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.audio_generators.utils import ( + normalize_waveform, + run_offline_pyo, +) + + +class MultiScaleFractalAudioGenerator(AbstractTimeSeriesGenerator): + """ + Generate multi-scale fractal-like patterns by filtering noise at + multiple frequency bands with varying Q and attenuation per scale. + """ + + def __init__( + self, + length: int, + server_duration: float, + sample_rate: int, + normalize_output: bool, + base_noise_mul_range: tuple[float, float], + num_scales_range: tuple[int, int], + scale_freq_base_range: tuple[float, float], + q_factor_range: tuple[float, float], + per_scale_attenuation_range: tuple[float, float], + random_seed: Optional[int] = None, + ): + self.length = length + self.server_duration = server_duration + self.sample_rate = sample_rate + self.normalize_output = normalize_output + + self.base_noise_mul_range = base_noise_mul_range + self.num_scales_range = num_scales_range + self.scale_freq_base_range = scale_freq_base_range + self.q_factor_range = q_factor_range + self.per_scale_attenuation_range = per_scale_attenuation_range + + self.rng = np.random.default_rng(random_seed) + + def _build_synth(self): + base_mul = self.rng.uniform(*self.base_noise_mul_range) + base = BrownNoise(mul=base_mul) + + num_scales = int( + self.rng.integers(self.num_scales_range[0], self.num_scales_range[1] + 1) + ) + + scales = [] + for i in range(num_scales): + scale_freq = self.rng.uniform(*self.scale_freq_base_range) * (0.5**i) + q_factor = self.rng.uniform(*self.q_factor_range) + per_scale_att = self.rng.uniform(*self.per_scale_attenuation_range) + filtered = Biquad(base, freq=scale_freq, q=q_factor, type=0) + scales.append(filtered * (per_scale_att**i)) + + return Mix(scales, voices=1) + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + waveform = run_offline_pyo( + synth_builder=self._build_synth, + server_duration=self.server_duration, + sample_rate=self.sample_rate, + length=self.length, + ) + if self.normalize_output: + waveform = normalize_waveform(waveform) + return waveform diff --git a/src/synthetic_generation/audio_generators/multi_scale_fractal_wrapper.py b/src/synthetic_generation/audio_generators/multi_scale_fractal_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..c2e5ab4dca6a6594c4b7f4ce3714b02c7358feba --- /dev/null +++ b/src/synthetic_generation/audio_generators/multi_scale_fractal_wrapper.py @@ -0,0 +1,77 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.audio_generators.multi_scale_fractal_generator import ( + MultiScaleFractalAudioGenerator, +) +from src.synthetic_generation.generator_params import MultiScaleFractalAudioParams + + +class MultiScaleFractalAudioWrapper(GeneratorWrapper): + def __init__(self, params: MultiScaleFractalAudioParams): + super().__init__(params) + self.params: MultiScaleFractalAudioParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + params = super()._sample_parameters(batch_size) + params.update( + { + "length": self.params.length, + "server_duration": self.params.server_duration, + "sample_rate": self.params.sample_rate, + "normalize_output": self.params.normalize_output, + "base_noise_mul_range": self.params.base_noise_mul_range, + "num_scales_range": self.params.num_scales_range, + "scale_freq_base_range": self.params.scale_freq_base_range, + "q_factor_range": self.params.q_factor_range, + "per_scale_attenuation_range": self.params.per_scale_attenuation_range, + } + ) + return params + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = MultiScaleFractalAudioGenerator( + length=params["length"], + server_duration=params["server_duration"], + sample_rate=params["sample_rate"], + normalize_output=params["normalize_output"], + base_noise_mul_range=params["base_noise_mul_range"], + num_scales_range=params["num_scales_range"], + scale_freq_base_range=params["scale_freq_base_range"], + q_factor_range=params["q_factor_range"], + per_scale_attenuation_range=params["per_scale_attenuation_range"], + random_seed=seed, + ) + + def _derive_series_seed(base_seed: int, index: int) -> int: + mixed = ( + (base_seed & 0x7FFFFFFF) + ^ ((index * 0x9E3779B1) & 0x7FFFFFFF) + ^ (hash(self.__class__.__name__) & 0x7FFFFFFF) + ) + return int(mixed) + + batch_values = [] + for i in range(batch_size): + series_seed = None if seed is None else _derive_series_seed(seed, i) + values = generator.generate_time_series(random_seed=series_seed) + batch_values.append(values) + + return TimeSeriesContainer( + values=np.array(batch_values), + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/audio_generators/network_topology_generator.py b/src/synthetic_generation/audio_generators/network_topology_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..648f983c3823a5e83c8fde7144e1e3d785110216 --- /dev/null +++ b/src/synthetic_generation/audio_generators/network_topology_generator.py @@ -0,0 +1,113 @@ +from typing import Optional, Tuple + +import numpy as np +from pyo import LFO, BrownNoise, Metro, Mix, Noise, TrigExpseg + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.audio_generators.utils import ( + normalize_waveform, + run_offline_pyo, +) + + +class NetworkTopologyAudioGenerator(AbstractTimeSeriesGenerator): + """ + Simulate network traffic with base flow, packet bursts, periodic congestion, + protocol overhead, and DDoS-like attacks. Parameters are sampled per series. + """ + + def __init__( + self, + length: int, + server_duration: float, + sample_rate: int, + normalize_output: bool, + traffic_lfo_freq_range: tuple[float, float], + traffic_lfo_mul_range: tuple[float, float], + burst_rate_hz_range: tuple[float, float], + burst_duration_range: tuple[float, float], + burst_mul_range: tuple[float, float], + congestion_period_range: tuple[float, float], + congestion_depth_range: tuple[float, float], + congestion_release_time_range: tuple[float, float], + overhead_lfo_freq_range: tuple[float, float], + overhead_mul_range: tuple[float, float], + attack_period_range: tuple[float, float], + attack_env_points: Tuple[ + Tuple[float, float], Tuple[float, float], Tuple[float, float] + ], + attack_mul_range: tuple[float, float], + random_seed: Optional[int] = None, + ): + self.length = length + self.server_duration = server_duration + self.sample_rate = sample_rate + self.normalize_output = normalize_output + + self.traffic_lfo_freq_range = traffic_lfo_freq_range + self.traffic_lfo_mul_range = traffic_lfo_mul_range + self.burst_rate_hz_range = burst_rate_hz_range + self.burst_duration_range = burst_duration_range + self.burst_mul_range = burst_mul_range + self.congestion_period_range = congestion_period_range + self.congestion_depth_range = congestion_depth_range + self.congestion_release_time_range = congestion_release_time_range + self.overhead_lfo_freq_range = overhead_lfo_freq_range + self.overhead_mul_range = overhead_mul_range + self.attack_period_range = attack_period_range + self.attack_env_points = attack_env_points + self.attack_mul_range = attack_mul_range + + self.rng = np.random.default_rng(random_seed) + + def _build_synth(self): + # Base traffic flow + traffic_freq = self.rng.uniform(*self.traffic_lfo_freq_range) + traffic_mul = self.rng.uniform(*self.traffic_lfo_mul_range) + traffic_base = LFO(freq=traffic_freq, type=0, mul=traffic_mul) + + # Packet bursts + burst_rate = self.rng.uniform(*self.burst_rate_hz_range) + burst_trigger = Metro(time=1.0 / burst_rate).play() + burst_duration = self.rng.uniform(*self.burst_duration_range) + burst_env = TrigExpseg(burst_trigger, list=[(0.0, 0.8), (burst_duration, 0.0)]) + burst_mul = self.rng.uniform(*self.burst_mul_range) + bursts = Noise(mul=burst_env * burst_mul) + + # Periodic congestion (negative amplitude dip) + congestion_period = self.rng.uniform(*self.congestion_period_range) + congestion_trigger = Metro(time=congestion_period).play() + congestion_depth = self.rng.uniform(*self.congestion_depth_range) # negative + congestion_release = self.rng.uniform(*self.congestion_release_time_range) + congestion_env = TrigExpseg( + congestion_trigger, + list=[(0.0, congestion_depth), (congestion_release, 0.0)], + ) + + # Protocol overhead + overhead_freq = self.rng.uniform(*self.overhead_lfo_freq_range) + overhead_mul = self.rng.uniform(*self.overhead_mul_range) + overhead = LFO(freq=overhead_freq, type=1, mul=overhead_mul) + + # DDoS-like attacks + attack_period = self.rng.uniform(*self.attack_period_range) + attack_trigger = Metro(time=attack_period).play() + attack_env = TrigExpseg(attack_trigger, list=list(self.attack_env_points)) + attack_mul = self.rng.uniform(*self.attack_mul_range) + attacks = BrownNoise(mul=attack_env * attack_mul) + + return Mix([traffic_base, bursts, congestion_env, overhead, attacks], voices=1) + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + waveform = run_offline_pyo( + synth_builder=self._build_synth, + server_duration=self.server_duration, + sample_rate=self.sample_rate, + length=self.length, + ) + if self.normalize_output: + waveform = normalize_waveform(waveform) + return waveform diff --git a/src/synthetic_generation/audio_generators/network_topology_wrapper.py b/src/synthetic_generation/audio_generators/network_topology_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..9c976fb5a68f2a4d72ce8d6edb8f923837a8fb2b --- /dev/null +++ b/src/synthetic_generation/audio_generators/network_topology_wrapper.py @@ -0,0 +1,93 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.audio_generators.network_topology_generator import ( + NetworkTopologyAudioGenerator, +) +from src.synthetic_generation.generator_params import NetworkTopologyAudioParams + + +class NetworkTopologyAudioWrapper(GeneratorWrapper): + def __init__(self, params: NetworkTopologyAudioParams): + super().__init__(params) + self.params: NetworkTopologyAudioParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + params = super()._sample_parameters(batch_size) + params.update( + { + "length": self.params.length, + "server_duration": self.params.server_duration, + "sample_rate": self.params.sample_rate, + "normalize_output": self.params.normalize_output, + "traffic_lfo_freq_range": self.params.traffic_lfo_freq_range, + "traffic_lfo_mul_range": self.params.traffic_lfo_mul_range, + "burst_rate_hz_range": self.params.burst_rate_hz_range, + "burst_duration_range": self.params.burst_duration_range, + "burst_mul_range": self.params.burst_mul_range, + "congestion_period_range": self.params.congestion_period_range, + "congestion_depth_range": self.params.congestion_depth_range, + "congestion_release_time_range": self.params.congestion_release_time_range, + "overhead_lfo_freq_range": self.params.overhead_lfo_freq_range, + "overhead_mul_range": self.params.overhead_mul_range, + "attack_period_range": self.params.attack_period_range, + "attack_env_points": self.params.attack_env_points, + "attack_mul_range": self.params.attack_mul_range, + } + ) + return params + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = NetworkTopologyAudioGenerator( + length=params["length"], + server_duration=params["server_duration"], + sample_rate=params["sample_rate"], + normalize_output=params["normalize_output"], + traffic_lfo_freq_range=params["traffic_lfo_freq_range"], + traffic_lfo_mul_range=params["traffic_lfo_mul_range"], + burst_rate_hz_range=params["burst_rate_hz_range"], + burst_duration_range=params["burst_duration_range"], + burst_mul_range=params["burst_mul_range"], + congestion_period_range=params["congestion_period_range"], + congestion_depth_range=params["congestion_depth_range"], + congestion_release_time_range=params["congestion_release_time_range"], + overhead_lfo_freq_range=params["overhead_lfo_freq_range"], + overhead_mul_range=params["overhead_mul_range"], + attack_period_range=params["attack_period_range"], + attack_env_points=params["attack_env_points"], + attack_mul_range=params["attack_mul_range"], + random_seed=seed, + ) + + def _derive_series_seed(base_seed: int, index: int) -> int: + mixed = ( + (base_seed & 0x7FFFFFFF) + ^ ((index * 0x9E3779B1) & 0x7FFFFFFF) + ^ (hash(self.__class__.__name__) & 0x7FFFFFFF) + ) + return int(mixed) + + batch_values = [] + for i in range(batch_size): + series_seed = None if seed is None else _derive_series_seed(seed, i) + values = generator.generate_time_series(random_seed=series_seed) + batch_values.append(values) + + return TimeSeriesContainer( + values=np.array(batch_values), + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/audio_generators/stochastic_rhythm_generator.py b/src/synthetic_generation/audio_generators/stochastic_rhythm_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..b46159e0bbed5aa46d9c200c78e322767cc0c619 --- /dev/null +++ b/src/synthetic_generation/audio_generators/stochastic_rhythm_generator.py @@ -0,0 +1,86 @@ +from typing import Optional + +import numpy as np +from pyo import Metro, Mix, Sine, TrigExpseg + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.audio_generators.utils import ( + normalize_waveform, + run_offline_pyo, +) + + +class StochasticRhythmAudioGenerator(AbstractTimeSeriesGenerator): + """ + Generate rhythmic patterns with layered triggers, per-layer envelopes + and tones. Parameters are sampled per series for diversity. + """ + + def __init__( + self, + length: int, + server_duration: float, + sample_rate: int, + normalize_output: bool, + base_tempo_hz_range: tuple[float, float], + num_layers_range: tuple[int, int], + subdivisions: tuple[int, ...], + attack_range: tuple[float, float], + decay_range: tuple[float, float], + tone_freq_range: tuple[float, float], + tone_mul_range: tuple[float, float], + random_seed: Optional[int] = None, + ): + self.length = length + self.server_duration = server_duration + self.sample_rate = sample_rate + self.normalize_output = normalize_output + + self.base_tempo_hz_range = base_tempo_hz_range + self.num_layers_range = num_layers_range + self.subdivisions = subdivisions + self.attack_range = attack_range + self.decay_range = decay_range + self.tone_freq_range = tone_freq_range + self.tone_mul_range = tone_mul_range + + self.rng = np.random.default_rng(random_seed) + + def _build_synth(self): + base_tempo = self.rng.uniform(*self.base_tempo_hz_range) + num_layers = int( + self.rng.integers(self.num_layers_range[0], self.num_layers_range[1] + 1) + ) + + layers = [] + for _ in range(num_layers): + subdivision = self.subdivisions[ + int(self.rng.integers(0, len(self.subdivisions))) + ] + rhythm_freq = base_tempo * subdivision + trigger = Metro(time=1.0 / rhythm_freq).play() + + attack = self.rng.uniform(*self.attack_range) + decay = self.rng.uniform(*self.decay_range) + env = TrigExpseg(trigger, list=[(0.0, 1.0), (attack, 0.8), (decay, 0.0)]) + + tone_freq = self.rng.uniform(*self.tone_freq_range) + tone_mul = self.rng.uniform(*self.tone_mul_range) + tone = Sine(freq=tone_freq, mul=env * tone_mul) + layers.append(tone) + + return Mix(layers, voices=1) + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + waveform = run_offline_pyo( + synth_builder=self._build_synth, + server_duration=self.server_duration, + sample_rate=self.sample_rate, + length=self.length, + ) + if self.normalize_output: + waveform = normalize_waveform(waveform) + return waveform diff --git a/src/synthetic_generation/audio_generators/stochastic_rhythm_wrapper.py b/src/synthetic_generation/audio_generators/stochastic_rhythm_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..1915f38d852c21245762c6e36afdb3c443a7c3b8 --- /dev/null +++ b/src/synthetic_generation/audio_generators/stochastic_rhythm_wrapper.py @@ -0,0 +1,81 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.audio_generators.stochastic_rhythm_generator import ( + StochasticRhythmAudioGenerator, +) +from src.synthetic_generation.generator_params import StochasticRhythmAudioParams + + +class StochasticRhythmAudioWrapper(GeneratorWrapper): + def __init__(self, params: StochasticRhythmAudioParams): + super().__init__(params) + self.params: StochasticRhythmAudioParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + params = super()._sample_parameters(batch_size) + params.update( + { + "length": self.params.length, + "server_duration": self.params.server_duration, + "sample_rate": self.params.sample_rate, + "normalize_output": self.params.normalize_output, + "base_tempo_hz_range": self.params.base_tempo_hz_range, + "num_layers_range": self.params.num_layers_range, + "subdivisions": self.params.subdivisions, + "attack_range": self.params.attack_range, + "decay_range": self.params.decay_range, + "tone_freq_range": self.params.tone_freq_range, + "tone_mul_range": self.params.tone_mul_range, + } + ) + return params + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = StochasticRhythmAudioGenerator( + length=params["length"], + server_duration=params["server_duration"], + sample_rate=params["sample_rate"], + normalize_output=params["normalize_output"], + base_tempo_hz_range=params["base_tempo_hz_range"], + num_layers_range=params["num_layers_range"], + subdivisions=params["subdivisions"], + attack_range=params["attack_range"], + decay_range=params["decay_range"], + tone_freq_range=params["tone_freq_range"], + tone_mul_range=params["tone_mul_range"], + random_seed=seed, + ) + + def _derive_series_seed(base_seed: int, index: int) -> int: + mixed = ( + (base_seed & 0x7FFFFFFF) + ^ ((index * 0x9E3779B1) & 0x7FFFFFFF) + ^ (hash(self.__class__.__name__) & 0x7FFFFFFF) + ) + return int(mixed) + + batch_values = [] + for i in range(batch_size): + series_seed = None if seed is None else _derive_series_seed(seed, i) + values = generator.generate_time_series(random_seed=series_seed) + batch_values.append(values) + + return TimeSeriesContainer( + values=np.array(batch_values), + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/audio_generators/utils.py b/src/synthetic_generation/audio_generators/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2d048fd1c60bd7ac1a2e08ba0473e6af6713ff56 --- /dev/null +++ b/src/synthetic_generation/audio_generators/utils.py @@ -0,0 +1,88 @@ +import os +import tempfile +import time +from contextlib import redirect_stderr, redirect_stdout +from typing import Callable + +import numpy as np +from pyo import NewTable, Server, TableRec + + +def run_offline_pyo( + synth_builder: Callable[[], object], + server_duration: float, + sample_rate: int, + length: int, +) -> np.ndarray: + """ + Render a pyo synthesis graph offline and return a numpy waveform. + + Parameters + ---------- + synth_builder : Callable[[], object] + Function that builds and returns a pyo object representing the synth graph. + server_duration : float + Duration in seconds to run the offline server. + sample_rate : int + Sample rate for the offline server. + length : int + Number of samples to return. + + Returns + ------- + np.ndarray + Waveform of shape (length,). + """ + # Suppress pyo console messages during offline rendering + with ( + open(os.devnull, "w") as devnull, + redirect_stdout(devnull), + redirect_stderr(devnull), + ): + s = Server(sr=sample_rate, nchnls=1, duplex=0, audio="offline") + # Use a unique temp filename to avoid clashes across concurrent jobs + tmp_wav = os.path.join( + tempfile.gettempdir(), + f"pyo_offline_{os.getpid()}_{int(time.time_ns())}.wav", + ) + # The filename is required by pyo's offline server even if we record to a table + s.recordOptions(dur=server_duration, filename=tmp_wav, fileformat=0) + s.boot() + + table = NewTable(length=server_duration, chnls=1) + + synth_obj = synth_builder() + + # Record the output of the synth object to the table + _ = TableRec(synth_obj, table, fadetime=0.01).play() + + s.start() + # Offline mode runs immediately to completion; no need for sleep + s.stop() + s.shutdown() + try: + if os.path.exists(tmp_wav): + os.remove(tmp_wav) + except Exception: + # Best-effort cleanup; ignore errors + pass + + waveform = np.array(table.getTable()) + if waveform.size > length: + waveform = waveform[:length] + elif waveform.size < length: + # Pad with zeros if the rendered buffer is shorter than requested + pad = np.zeros(length - waveform.size, dtype=waveform.dtype) + waveform = np.concatenate([waveform, pad], axis=0) + + return waveform + + +def normalize_waveform(values: np.ndarray) -> np.ndarray: + """ + Normalize a waveform to have max absolute value of 1 (if nonzero). + """ + max_abs = np.max(np.abs(values)) if values.size > 0 else 0.0 + if max_abs > 0: + return values / max_abs + return values diff --git a/src/synthetic_generation/augmentations/offline_per_sample_iid_augmentations.py b/src/synthetic_generation/augmentations/offline_per_sample_iid_augmentations.py new file mode 100644 index 0000000000000000000000000000000000000000..f15e31f1454159b2072949106a5ead8dbf0ce8f0 --- /dev/null +++ b/src/synthetic_generation/augmentations/offline_per_sample_iid_augmentations.py @@ -0,0 +1,1482 @@ +import argparse +import logging +import sys +import time +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple + +import numpy as np +import pandas as pd +import pyarrow as pa +import pyarrow.feather as feather +import torch + +from src.data.augmentations import ( + CensorAugmenter, + DifferentialAugmenter, + MixUpAugmenter, + QuantizationAugmenter, + RandomConvAugmenter, + TimeFlipAugmenter, + YFlipAugmenter, +) +from src.data.constants import LENGTH_CHOICES +from src.data.datasets import CyclicalBatchDataset +from src.data.filter import is_low_quality +from src.data.frequency import Frequency, parse_frequency +from src.data.scalers import MeanScaler, MedianScaler, MinMaxScaler, RobustScaler + + +class TimeSeriesDatasetManager: + def __init__(self, output_path: str, batch_size: int = 2**13): + self.output_path = Path(output_path) + self.output_path.mkdir(parents=True, exist_ok=True) + self.batches_dir = self.output_path + self.batches_dir.mkdir(exist_ok=True) + self.batch_size = batch_size + self.batch_counter = 0 + self.series_counter = 0 + + self.schema = pa.schema( + [ + ("series_id", pa.int64()), + ("values", pa.list_(pa.list_(pa.float64()))), + ("length", pa.int32()), + ("num_channels", pa.int32()), + ("generator_type", pa.string()), + ("start", pa.timestamp("ns")), + ("frequency", pa.string()), + ("generation_timestamp", pa.timestamp("ns")), + ] + ) + + self._initialize_state() + + def _initialize_state(self) -> None: + existing_batches = sorted(self.batches_dir.glob("batch_*.arrow")) + if not existing_batches: + logging.info("No existing batches found. Starting from scratch.") + return + + batch_numbers = [] + total_series = 0 + for batch_file in existing_batches: + try: + batch_num = int(batch_file.stem.split("_")[1]) + batch_numbers.append(batch_num) + batch_table = feather.read_table(batch_file) + total_series += len(batch_table) + except Exception as e: + logging.warning(f"Error reading batch {batch_file}: {e}") + continue + + if batch_numbers: + max_batch_num = max(batch_numbers) + self.batch_counter = max_batch_num + 1 + self.series_counter = total_series + + last_batch_file = self.batches_dir / f"batch_{max_batch_num:08d}.arrow" + if last_batch_file.exists(): + try: + last_batch_table = feather.read_table(last_batch_file) + if len(last_batch_table) < self.batch_size: + self.batch_counter = max_batch_num + logging.info( + f"Found incomplete last batch {max_batch_num} with {len(last_batch_table)} series" + ) + except Exception as e: + logging.warning(f"Error checking last batch: {e}") + + logging.info( + f"Resuming from: batch_counter={self.batch_counter}, series_counter={self.series_counter}" + ) + + def append_batch(self, batch_data: List[Dict[str, Any]]) -> None: + if not batch_data: + return + + try: + arrays = [] + for field in self.schema: + field_name = field.name + if field_name in ["start", "generation_timestamp"]: + timestamps = [row[field_name] for row in batch_data] + arrays.append( + pa.array( + [ts.value for ts in timestamps], type=pa.timestamp("ns") + ) + ) + else: + arrays.append(pa.array([row[field_name] for row in batch_data])) + + new_table = pa.Table.from_arrays(arrays, schema=self.schema) + batch_filename = f"batch_{self.batch_counter:08d}.arrow" + batch_filepath = self.batches_dir / batch_filename + feather.write_feather(new_table, batch_filepath) + + self.series_counter += len(batch_data) + self.batch_counter += 1 + + except Exception as e: + logging.error(f"Error writing batch: {e}") + raise + + +class UnivariateOfflineAugmentor: + def __init__( + self, + augmentations: Optional[Dict[str, bool]] = None, + augmentation_probabilities: Optional[Dict[str, float]] = None, + global_seed: int = 42, + ): + self.global_seed = global_seed + np.random.seed(global_seed) + torch.manual_seed(global_seed) + self.rng = np.random.default_rng(global_seed) + self.augmentation_probabilities = augmentation_probabilities + self.augmentations = augmentations + self.apply_augmentations = any(self.augmentations.values()) + + self.time_flip_augmenter = None + if self.augmentations["time_flip_augmentation"]: + self.time_flip_augmenter = TimeFlipAugmenter( + p_flip=self.augmentation_probabilities["time_flip_augmentation"] + ) + + self.yflip_augmenter = None + if self.augmentations["yflip_augmentation"]: + self.yflip_augmenter = YFlipAugmenter( + p_flip=self.augmentation_probabilities["yflip_augmentation"] + ) + + self.censor_augmenter = None + if self.augmentations["censor_augmentation"]: + self.censor_augmenter = CensorAugmenter() + + self.quantization_augmenter = None + if self.augmentations["quantization_augmentation"]: + self.quantization_augmenter = QuantizationAugmenter( + p_quantize=self.augmentation_probabilities[ + "censor_or_quantization_augmentation" + ], + level_range=(5, 15), + ) + + if self.augmentations["differential_augmentation"]: + self.differential_augmentor = DifferentialAugmenter( + p_transform=self.augmentation_probabilities["differential_augmentation"] + ) + + def apply( + self, + history_values: torch.Tensor, + starts: Optional[List[pd.Timestamp]] = None, + frequencies: Optional[List[str]] = None, + ) -> torch.Tensor: + if not self.apply_augmentations: + return history_values + + batch_size = int(history_values.shape[0]) + + # 0) Combination (MixUp) – handled early at batch level due to dependency on other series + if ( + self.augmentations.get("mixup_augmentation", False) + and self.mixup_augmenter is not None + ): + history_values = self.mixup_augmenter.transform(history_values) + + # Per-series plan: sample categories and apply in fixed order per series + # Categories (max one op per category): + # invariances, structure, seasonality, artifacts, analytic, discrete + for b in range(batch_size): + series = history_values[b : b + 1].clone() + + # Determine eligible categories and weights for this series + categories = [ + "invariances", + "structure", + "seasonality", + "artifacts", + "analytic", + "discrete", + ] + weights = { + "invariances": 0.6, + "structure": 0.6, + "seasonality": 0.5, + "artifacts": 0.3, + "analytic": 0.4, + "discrete": 0.6, + } + + # Remove disabled categories + if not ( + self.augmentations.get("time_flip_augmentation", False) + or self.augmentations.get("yflip_augmentation", False) + ): + weights["invariances"] = 0.0 + if not ( + self.augmentations.get("regime_change_augmentation", False) + or self.augmentations.get("shock_recovery_augmentation", False) + ): + weights["structure"] = 0.0 + if not ( + self.augmentations.get("calendar_augmentation", False) + or self.augmentations.get("amplitude_modulation_augmentation", False) + ): + weights["seasonality"] = 0.0 + if not self.augmentations.get("differential_augmentation", False): + weights["analytic"] = 0.0 + if not ( + self.augmentations.get("quantization_augmentation", False) + or self.augmentations.get("censor_augmentation", False) + ): + weights["discrete"] = 0.0 + + # Sample number of operations in [2, 5] + num_ops = int(self.rng.integers(2, 6)) + + # Build candidate list and normalized probabilities + candidates = [c for c in categories if weights[c] > 0.0] + if not candidates: + # Nothing to do for this series + history_values[b : b + 1] = series + continue + num_ops = min(num_ops, len(candidates)) + probs = np.array([weights[c] for c in candidates], dtype=float) + probs = probs / probs.sum() + chosen_categories = list( + self.rng.choice(candidates, size=num_ops, replace=False, p=probs) + ) + + # Apply in the fixed global order, only if selected + # 1) Invariances + if "invariances" in chosen_categories: + # Choose one: time_flip or yflip + choices = [] + if self.augmentations.get("time_flip_augmentation", False): + choices.append("time_flip") + if self.augmentations.get("yflip_augmentation", False): + choices.append("yflip") + if choices: + pick = str(self.rng.choice(choices)) + if pick == "time_flip": + series = torch.flip(series, dims=[1]) + elif pick == "yflip": + series = -series + + # 2) Structural edits + if "structure" in chosen_categories: + choices = [] + if self.augmentations.get("regime_change_augmentation", False): + choices.append("regime") + if self.augmentations.get("shock_recovery_augmentation", False): + choices.append("shock") + if choices: + pick = str(self.rng.choice(choices)) + if pick == "regime": + series = self._apply_regime_change(series, p_apply=1.0) + else: + series = self._apply_shock_recovery(series, p_apply=1.0) + + # 3) Seasonality/context + if "seasonality" in chosen_categories: + choices = [] + if self.augmentations.get("calendar_augmentation", False): + choices.append("calendar") + if self.augmentations.get("amplitude_modulation_augmentation", False): + choices.append("amplitude") + if choices: + pick = str(self.rng.choice(choices)) + if pick == "calendar": + series = self._apply_calendar_injections( + series, + [starts[b]] + if (starts is not None and b < len(starts)) + else None, + [frequencies[b]] + if (frequencies is not None and b < len(frequencies)) + else None, + p_apply=1.0, + ) + else: + series = self._apply_seasonality_amplitude_modulation( + series, p_apply=1.0 + ) + + # 4) Sampling artifacts + if "artifacts" in chosen_categories and self.augmentations.get( + "resample_artifacts_augmentation", False + ): + series = self._apply_resample_artifacts(series, p_apply=1.0) + + # 5) Analytic transforms + if ( + "analytic" in chosen_categories + and self.augmentations.get("differential_augmentation", False) + and hasattr(self, "differential_augmentor") + ): + series = self.differential_augmentor.transform(series) + + # 6) Discretization/clipping (mutually exclusive) + if "discrete" in chosen_categories: + can_quant = ( + self.augmentations.get("quantization_augmentation", False) + and self.quantization_augmenter is not None + ) + can_cens = ( + self.augmentations.get("censor_augmentation", False) + and self.censor_augmenter is not None + ) + if can_quant and can_cens: + method = self.rng.choice(["quantize", "censor"], p=[0.6, 0.4]) + if method == "quantize": + series = self.quantization_augmenter.transform(series) + else: + series = self.censor_augmenter.transform(series) + elif can_quant: + series = self.quantization_augmenter.transform(series) + elif can_cens: + series = self.censor_augmenter.transform(series) + + # Write back series + history_values[b : b + 1] = series + + # 7) Scaling then Noise (last, optional, batch-level) + if self.augmentations.get("scaling_augmentation", False): + if self.rng.random() < self.augmentation_probabilities.get( + "scaling_augmentation", 0.0 + ): + scale_factor = float(self.rng.uniform(0.95, 1.05)) + history_values = history_values * scale_factor + + if self.augmentations.get("noise_augmentation", False): + if self.rng.random() < self.augmentation_probabilities.get( + "noise_augmentation", 0.0 + ): + noise_std = 0.01 * torch.std(history_values) + if torch.isfinite(noise_std) and (noise_std > 0): + noise = torch.normal(0, noise_std, size=history_values.shape) + history_values = history_values + noise + + return history_values + + def apply_per_series_only( + self, + series: torch.Tensor, + start: Optional[pd.Timestamp] = None, + frequency: Optional[str] = None, + ) -> torch.Tensor: + """ + Apply all per-series augmentations (excluding mixup) to a single series tensor, + preserving ordering and probabilities used in apply(). + + Args: + series: Tensor of shape [1, length, 1] + start: Optional pandas.Timestamp for calendar injections + frequency: Optional frequency string for calendar injections + """ + if not self.apply_augmentations: + return series + + categories = [ + "invariances", + "structure", + "seasonality", + "artifacts", + "analytic", + "discrete", + ] + weights = { + "invariances": 0.6, + "structure": 0.6, + "seasonality": 0.5, + "artifacts": 0.3, + "analytic": 0.4, + "discrete": 0.6, + } + + # Disable categories not enabled + if not ( + self.augmentations.get("time_flip_augmentation", False) + or self.augmentations.get("yflip_augmentation", False) + ): + weights["invariances"] = 0.0 + if not ( + self.augmentations.get("regime_change_augmentation", False) + or self.augmentations.get("shock_recovery_augmentation", False) + ): + weights["structure"] = 0.0 + if not ( + self.augmentations.get("calendar_augmentation", False) + or self.augmentations.get("amplitude_modulation_augmentation", False) + ): + weights["seasonality"] = 0.0 + if not self.augmentations.get("differential_augmentation", False): + weights["analytic"] = 0.0 + if not ( + self.augmentations.get("quantization_augmentation", False) + or self.augmentations.get("censor_augmentation", False) + ): + weights["discrete"] = 0.0 + + # Sample number of operations in [2, 5] + num_ops = int(self.rng.integers(2, 6)) + candidates = [c for c in categories if weights[c] > 0.0] + if not candidates: + result = series + else: + num_ops = min(num_ops, len(candidates)) + probs = np.array([weights[c] for c in candidates], dtype=float) + probs = probs / probs.sum() + chosen_categories = list( + self.rng.choice(candidates, size=num_ops, replace=False, p=probs) + ) + + result = series.clone() + + # 1) Invariances + if "invariances" in chosen_categories: + choices = [] + if self.augmentations.get("time_flip_augmentation", False): + choices.append("time_flip") + if self.augmentations.get("yflip_augmentation", False): + choices.append("yflip") + if choices: + pick = str(self.rng.choice(choices)) + if pick == "time_flip": + result = torch.flip(result, dims=[1]) + elif pick == "yflip": + result = -result + + # 2) Structural edits + if "structure" in chosen_categories: + choices = [] + if self.augmentations.get("regime_change_augmentation", False): + choices.append("regime") + if self.augmentations.get("shock_recovery_augmentation", False): + choices.append("shock") + if choices: + pick = str(self.rng.choice(choices)) + if pick == "regime": + result = self._apply_regime_change(result, p_apply=1.0) + else: + result = self._apply_shock_recovery(result, p_apply=1.0) + + # 3) Seasonality/context + if "seasonality" in chosen_categories: + choices = [] + if self.augmentations.get("calendar_augmentation", False): + choices.append("calendar") + if self.augmentations.get("amplitude_modulation_augmentation", False): + choices.append("amplitude") + if choices: + pick = str(self.rng.choice(choices)) + if pick == "calendar": + result = self._apply_calendar_injections( + result, + [start] if start is not None else None, + [frequency] if frequency is not None else None, + p_apply=1.0, + ) + else: + result = self._apply_seasonality_amplitude_modulation( + result, p_apply=1.0 + ) + + # 4) Sampling artifacts + if "artifacts" in chosen_categories and self.augmentations.get( + "resample_artifacts_augmentation", False + ): + result = self._apply_resample_artifacts(result, p_apply=1.0) + + # 5) Analytic transforms + if ( + "analytic" in chosen_categories + and self.augmentations.get("differential_augmentation", False) + and hasattr(self, "differential_augmentor") + ): + result = self.differential_augmentor.transform(result) + + # 6) Discretization/clipping (mutually exclusive) + if "discrete" in chosen_categories: + can_quant = ( + self.augmentations.get("quantization_augmentation", False) + and self.quantization_augmenter is not None + ) + can_cens = ( + self.augmentations.get("censor_augmentation", False) + and self.censor_augmenter is not None + ) + if can_quant and can_cens: + method = self.rng.choice(["quantize", "censor"], p=[0.6, 0.4]) + if method == "quantize": + result = self.quantization_augmenter.transform(result) + else: + result = self.censor_augmenter.transform(result) + elif can_quant: + result = self.quantization_augmenter.transform(result) + elif can_cens: + result = self.censor_augmenter.transform(result) + + # Optional scaling and noise (applied to this single series) + if self.augmentations.get("scaling_augmentation", False): + if self.rng.random() < self.augmentation_probabilities.get( + "scaling_augmentation", 0.0 + ): + scale_factor = float(self.rng.uniform(0.95, 1.05)) + result = result * scale_factor + + if self.augmentations.get("noise_augmentation", False): + if self.rng.random() < self.augmentation_probabilities.get( + "noise_augmentation", 0.0 + ): + noise_std = 0.01 * torch.std(result) + if torch.isfinite(noise_std) and (noise_std > 0): + noise = torch.normal(0, noise_std, size=result.shape) + result = result + noise + + return result + + @property + def mixup_augmenter(self) -> Optional[MixUpAugmenter]: + if not hasattr(self, "_mixup_augmenter"): + self._mixup_augmenter = ( + MixUpAugmenter( + p_combine=self.augmentation_probabilities["mixup_augmentation"] + ) + if self.augmentations["mixup_augmentation"] + else None + ) + return self._mixup_augmenter + + def _apply_regime_change( + self, series: torch.Tensor, p_apply: float + ) -> torch.Tensor: + """ + Apply piecewise affine transforms with 1-3 change-points per series. + series shape: [batch, length, 1] + """ + if series.numel() == 0: + return series + batch_size, length, _ = series.shape + result = series.clone() + + # Iterate per-series to allow different change-points + for b in range(batch_size): + if self.rng.random() >= p_apply: + continue + # sample number of change points and ensure minimum segment length + num_cp = int(self.rng.integers(1, 4)) + min_seg = max(8, length // 32) + if length <= (num_cp + 1) * min_seg: + num_cp = max(1, length // (2 * min_seg) - 1) + if num_cp <= 0: + num_cp = 1 + # pick change-point indices + valid_positions = np.arange(min_seg, length - min_seg) + if valid_positions.size == 0: + continue + cp = np.sort(self.rng.choice(valid_positions, size=num_cp, replace=False)) + boundaries = np.concatenate([[0], cp, [length]]) + + # compute per-segment scale/shift + series_b = result[b, :, 0] + seg_scales = [] + seg_shifts = [] + overall_std = torch.std(series_b).item() + if not np.isfinite(overall_std) or overall_std == 0: + overall_std = 1.0 + for _ in range(len(boundaries) - 1): + scale = float(self.rng.uniform(0.8, 1.25)) + shift = float(self.rng.normal(0.0, 0.15 * overall_std)) + seg_scales.append(scale) + seg_shifts.append(shift) + + # apply per segment + for i in range(len(boundaries) - 1): + s, e = int(boundaries[i]), int(boundaries[i + 1]) + if e <= s: + continue + segment = series_b[s:e] + # preserve segment mean roughly while scaling deviations + seg_mean = torch.mean(segment) + transformed = ( + (segment - seg_mean) * seg_scales[i] + seg_mean + seg_shifts[i] + ) + result[b, s:e, 0] = transformed + return result + + def _apply_shock_recovery( + self, series: torch.Tensor, p_apply: float + ) -> torch.Tensor: + """ + Add an impulse at a random time and exponentially decay to baseline. + series shape: [batch, length, 1] + """ + if series.numel() == 0: + return series + batch_size, length, _ = series.shape + device = series.device + result = series.clone() + + time_idx = torch.arange(length, device=device).float() + + for b in range(batch_size): + if self.rng.random() >= p_apply: + continue + # choose shock time away from edges + t0 = int( + self.rng.integers( + low=max(1, length // 16), high=max(2, length - length // 16) + ) + ) + # magnitude relative to series std + s_b = result[b, :, 0] + std_b = torch.std(s_b).item() + if not np.isfinite(std_b) or std_b == 0: + std_b = 1.0 + mag = float(self.rng.uniform(0.5, 2.0) * std_b) + if self.rng.random() < 0.5: + mag = -mag + # decay constant + half_life = float(self.rng.uniform(0.03, 0.25) * length) + decay = torch.exp(-(time_idx - t0).clamp(min=0) / max(1.0, half_life)) + effect = mag * decay + result[b, :, 0] = s_b + effect + return result + + def _apply_calendar_injections( + self, + series: torch.Tensor, + starts: Optional[List[pd.Timestamp]], + frequencies: Optional[List[str]], + p_apply: float, + ) -> torch.Tensor: + if series.numel() == 0: + return series + if starts is None or frequencies is None: + return series + batch_size, length, _ = series.shape + result = series.clone() + + for b in range(batch_size): + if b >= len(starts) or b >= len(frequencies): + continue + if self.rng.random() >= p_apply: + continue + start_ts = starts[b] + try: + freq_enum = parse_frequency(str(frequencies[b])) + freq_alias = freq_enum.to_pandas_freq(for_date_range=True) + except Exception: + freq_alias = "D" + try: + index = pd.date_range(start=start_ts, periods=length, freq=freq_alias) + except Exception: + index = pd.date_range(start=start_ts, periods=length, freq="D") + + factors = np.ones(length, dtype=np.float32) + # Weekend dips (for daily/hourly-like) + try: + freq_enum_check = parse_frequency(str(frequencies[b])) + except Exception: + freq_enum_check = Frequency.D + if freq_enum_check in [ + Frequency.H, + Frequency.D, + Frequency.S, + Frequency.T1, + Frequency.T5, + Frequency.T10, + Frequency.T15, + Frequency.T30, + ]: + dow = index.dayofweek + if (dow >= 5).any(): + dip = float(self.rng.uniform(0.7, 0.95)) + factors[dow >= 5] *= dip + + # Month-end bumps + if hasattr(index, "is_month_end"): + me = np.asarray(index.is_month_end, dtype=bool) + if me.any(): + bump = float(self.rng.uniform(1.05, 1.3)) + factors[me] *= bump + + # Holiday-like one-off effects (1-2 random impulses) + n_imp = int(self.rng.integers(1, 3)) + imp_positions = self.rng.integers(0, length, size=n_imp) + for pos in np.atleast_1d(imp_positions): + if 0 <= pos < length: + impulse = float(self.rng.uniform(0.8, 1.4)) + factors[pos] *= impulse + + # Apply multiplicatively around mean to avoid drift + s = result[b, :, 0].cpu().numpy() + mean_val = float(np.mean(s)) + s_new = (s - mean_val) * factors + mean_val + result[b, :, 0] = torch.from_numpy(s_new).to(result.device) + return result + + def _apply_seasonality_amplitude_modulation( + self, series: torch.Tensor, p_apply: float + ) -> torch.Tensor: + if series.numel() == 0: + return series + batch_size, length, _ = series.shape + result = series.clone() + + for b in range(batch_size): + if self.rng.random() >= p_apply: + continue + min_w = max(8, length // 16) + max_w = max(min_w + 1, length // 2) + win = int(self.rng.integers(min_w, max_w + 1)) + start = int(self.rng.integers(0, max(1, length - win))) + end = start + win + seg = result[b, start:end, 0] + if seg.numel() == 0: + continue + seg_mean = torch.mean(seg) + amp = float(self.rng.uniform(0.5, 1.8)) + result[b, start:end, 0] = (seg - seg_mean) * amp + seg_mean + return result + + def _apply_resample_artifacts( + self, + series: torch.Tensor, + p_apply: float, + ) -> torch.Tensor: + """ + Downsample then upsample with interpolation to introduce artifacts. + """ + if series.numel() == 0: + return series + batch_size, length, _ = series.shape + result = series.clone() + + for b in range(batch_size): + if self.rng.random() >= p_apply: + continue + + s_np = result[b, :, 0].cpu().numpy() + max_factor = max(2, min(8, length // 32)) + if max_factor <= 1: + continue + factor = int(self.rng.integers(2, max_factor + 1)) + offset = int(self.rng.integers(0, factor)) + ds_idx = np.arange(offset, length, factor) + if ds_idx.size < 3: + continue + ds_vals = s_np[ds_idx] + base_idx = np.arange(length) + mode = self.rng.choice( + ["linear", "hold", "linear_smooth"], p=[0.5, 0.2, 0.3] + ) + if mode == "linear": + us = np.interp(base_idx, ds_idx, ds_vals) + elif mode == "hold": + us = np.empty(length, dtype=s_np.dtype) + last = ds_vals[0] + j = 0 + for i in range(length): + while j + 1 < ds_idx.size and i >= ds_idx[j + 1]: + j += 1 + last = ds_vals[j] + us[i] = last + else: + us = np.interp(base_idx, ds_idx, ds_vals) + k = max(3, length // 128) + kernel = np.ones(k) / k + us = np.convolve(us, kernel, mode="same") + result[b, :, 0] = torch.from_numpy(us).to(result.device) + return result + + +class OfflinePerSampleAugmentedGenerator: + def __init__( + self, + base_data_dir: str, + output_dir: str, + length: Optional[int], + chunk_size: int = 2**13, + generator_proportions: Optional[Dict[str, float]] = None, + augmentations: Optional[Dict[str, bool]] = None, + augmentation_probabilities: Optional[Dict[str, float]] = None, + global_seed: int = 42, + mixup_position: str = "both", + change_threshold: float = 0.05, + max_tries: int = 3, + enable_quality_filter: bool = False, + rc_batch_size: int = 8, + ): + self.base_data_dir = base_data_dir + self.length = length + self.chunk_size = chunk_size + self.global_seed = global_seed + np.random.seed(global_seed) + torch.manual_seed(global_seed) + self.rng = np.random.default_rng(global_seed) + self.mixup_position = mixup_position + self.change_threshold = float(change_threshold) + self.max_tries = int(max_tries) + self.enable_quality_filter = bool(enable_quality_filter) + self.rc_batch_size = int(rc_batch_size) + + out_dir_name = ( + f"augmented_per_sample_{length}" + if length is not None + else "augmented_per_sample" + ) + self.dataset_manager = TimeSeriesDatasetManager( + str(Path(output_dir) / out_dir_name), batch_size=chunk_size + ) + + self.augmentor = UnivariateOfflineAugmentor( + augmentations=augmentations, + augmentation_probabilities=augmentation_probabilities, + global_seed=global_seed, + ) + + self.generator_proportions = self._setup_proportions(generator_proportions) + self.datasets = self._initialize_datasets() + + # -------------------- Per-sample scaler utilities -------------------- + def _choose_scaler(self) -> Optional[object]: + """Choose a scaler with 50% probability of None; else one of four scalers uniformly.""" + if self.rng.random() < 0.5: + return None + pick = str(self.rng.choice(["robust", "minmax", "median", "mean"])) + if pick == "robust": + return RobustScaler() + if pick == "minmax": + return MinMaxScaler() + if pick == "median": + return MedianScaler() + return MeanScaler() + + def _apply_scaler( + self, values: torch.Tensor, scaler: Optional[object] + ) -> torch.Tensor: + """Apply the provided scaler to values of shape [1, length, channels].""" + if scaler is None: + return values + stats = scaler.compute_statistics(values) + return scaler.scale(values, stats) + + # -------------------- Mixup utilities (per-sample) -------------------- + def _mix_sources_static( + self, source_tensor: torch.Tensor, alpha: float + ) -> torch.Tensor: + """Static Dirichlet mix of k sources -> [1, L, C].""" + k = int(source_tensor.shape[0]) + device = source_tensor.device + concentration = torch.full((k,), float(alpha), device=device) + weights = torch.distributions.Dirichlet(concentration).sample() + mixed = (source_tensor * weights.view(k, 1, 1)).sum(dim=0, keepdim=True) + return mixed + + def _apply_mixup_to_series( + self, + base_series: torch.Tensor, + total_length_for_batch: int, + scaler: Optional[object], + ) -> torch.Tensor: + """Mix base with k-1 additional sources; returns [1, L, 1].""" + mixup = self.augmentor.mixup_augmenter + if mixup is None: + return base_series + + # Decide k + current_k = ( + mixup._sample_k() + if not mixup.randomize_k + else int(self.rng.integers(2, mixup.max_k + 1)) + ) + # Ensure at least 2 and include base in the set + current_k = max(2, int(current_k)) + num_sources_needed = current_k - 1 + + chosen_gens = self._choose_generators_for_mixup(current_k) + # If we sampled k gens but need only k-1 external sources, trim + chosen_gens = chosen_gens[:num_sources_needed] + + sources: List[torch.Tensor] = [] + # Base (already possibly scaled) first + sources.append(base_series) + # Additional sources + for gen in chosen_gens: + src_values, _, _, _ = self._get_one_sample_from_generator( + gen, total_length_for_batch + ) + if scaler is not None: + src_values = self._apply_scaler(src_values, scaler) + sources.append(src_values) + + source_tensor = torch.cat(sources, dim=0) + alpha = mixup._sample_alpha() + mixed_series = self._mix_sources_static(source_tensor, alpha=alpha) + return mixed_series + + # -------------------- RandomConv (temp batch) utilities -------------------- + def _apply_random_conv_with_temp_batch( + self, + base_series: torch.Tensor, + total_length_for_batch: int, + scaler: Optional[object], + ) -> torch.Tensor: + """Apply RandomConvAugmenter by creating a small temp batch and taking the transformed base element.""" + if not hasattr(self, "random_conv_augmenter"): + # Lazy init if not present but enabled in config + if self.augmentor.augmentations.get("random_conv_augmentation", False): + p_val = self.augmentor.augmentation_probabilities.get( + "random_conv_augmentation", 0.3 + ) + self.random_conv_augmenter = RandomConvAugmenter(p_transform=p_val) + else: + return base_series + + # Assemble temp batch: base + (rc_batch_size-1) sources + temp_series_list: List[torch.Tensor] = [base_series] + for _ in range(max(0, self.rc_batch_size - 1)): + try: + gen = self._sample_generator_name() + src_values, _, _, _ = self._get_one_sample_from_generator( + gen, total_length_for_batch + ) + if scaler is not None: + src_values = self._apply_scaler(src_values, scaler) + temp_series_list.append(src_values) + except Exception: + break + temp_batch = torch.cat(temp_series_list, dim=0) + + transformed = self.random_conv_augmenter.transform(temp_batch) + return transformed[0:1] + + # -------------------- Selection and quality helpers -------------------- + def _compute_change_score( + self, original: torch.Tensor, augmented: torch.Tensor + ) -> float: + """ + Computes a normalized change score between original and augmented series. + The score is the Mean Absolute Error (MAE) normalized by a robust + measure of the original series' scale (its Interquartile Range). + This makes the score less sensitive to outliers and absolute scale. + """ + original_flat = original.flatten() + + # Use the standard Interquartile Range (IQR) for robust scaling. + q25 = torch.quantile(original_flat, 0.25) + q75 = torch.quantile(original_flat, 0.75) + iqr = (q75 - q25).item() + + # Use a robust epsilon to prevent division by zero for flat series. + series_range = (torch.max(original_flat) - torch.min(original_flat)).item() + scale = max(iqr, 1e-6 * series_range, 1e-8) + + # Compute Mean Absolute Error + mae = torch.mean(torch.abs(augmented - original)).item() + + return float(mae / scale) + + # moved to src/synthetic_generation/augmentations/filter.py + + def _setup_proportions( + self, generator_proportions: Optional[Dict[str, float]] + ) -> Dict[str, float]: + # Default uniform proportions across discovered generators + if generator_proportions is None: + # Discover generator directories + base = Path(self.base_data_dir) + discovered = [p.name for p in base.iterdir() if p.is_dir()] + proportions = {name: 1.0 for name in discovered} + else: + proportions = dict(generator_proportions) + + total = sum(proportions.values()) + if total <= 0: + raise ValueError("Total generator proportions must be positive") + return {k: v / total for k, v in proportions.items()} + + def _initialize_datasets(self) -> Dict[str, CyclicalBatchDataset]: + datasets: Dict[str, CyclicalBatchDataset] = {} + for generator_name, proportion in self.generator_proportions.items(): + # Load batches only if the generator is explicitly listed and has positive proportion + if proportion <= 0: + continue + batches_dir = Path(self.base_data_dir) / generator_name + if not batches_dir.is_dir(): + logging.warning( + f"Skipping '{generator_name}' because directory does not exist: {batches_dir}" + ) + continue + try: + dataset = CyclicalBatchDataset( + batches_dir=str(batches_dir), + generator_type=generator_name, + device=None, + prefetch_next=True, + prefetch_threshold=32, + ) + datasets[generator_name] = dataset + logging.info(f"Loaded dataset for {generator_name}") + except Exception as e: + logging.warning(f"Failed to load dataset for {generator_name}: {e}") + if not datasets: + raise ValueError("No valid datasets loaded from base_data_dir") + return datasets + + def _convert_sample_to_tensor( + self, sample: dict + ) -> Tuple[torch.Tensor, Any, str, int]: + num_channels = sample.get("num_channels", 1) + values_data = sample["values"] + + if num_channels == 1: + if isinstance(values_data[0], list): + values = torch.tensor(values_data[0], dtype=torch.float32) + else: + values = torch.tensor(values_data, dtype=torch.float32) + values = values.unsqueeze(0).unsqueeze(-1) + else: + channel_tensors = [] + for channel_values in values_data: + channel_tensor = torch.tensor(channel_values, dtype=torch.float32) + channel_tensors.append(channel_tensor) + values = torch.stack(channel_tensors, dim=-1).unsqueeze(0) + + freq_str = sample["frequency"] + start_val = sample["start"] + # Keep start as pandas.Timestamp for Arrow writing later + if isinstance(start_val, pd.Timestamp): + start = start_val + else: + start = pd.Timestamp(start_val) + + return values, start, freq_str, num_channels + + def _maybe_resize(self, values: torch.Tensor, target_len: int) -> torch.Tensor: + if values.shape[1] == target_len: + return values + if values.shape[1] > target_len: + max_start_idx = values.shape[1] - target_len + start_idx = np.random.randint(0, max_start_idx + 1) + return values[:, start_idx : start_idx + target_len, :] + # Subsample evenly to reach target_len + indices = np.linspace(0, values.shape[1] - 1, target_len, dtype=int) + return values[:, indices, :] + + def _sample_generator_name(self) -> str: + available = [g for g in self.generator_proportions.keys() if g in self.datasets] + probs = np.array( + [self.generator_proportions[g] for g in available], dtype=float + ) + probs = probs / probs.sum() + return str(np.random.choice(available, p=probs)) + + def _get_one_sample( + self, total_length_for_batch: int + ) -> Tuple[torch.Tensor, pd.Timestamp, str, int]: + attempts = 0 + while attempts < 20: + attempts += 1 + gen_name = self._sample_generator_name() + dataset = self.datasets[gen_name] + sample = dataset.get_samples(1)[0] + values, start, freq_str, num_channels = self._convert_sample_to_tensor( + sample + ) + values = self._maybe_resize(values, total_length_for_batch) + if values.shape[2] != 1: + continue + return values, start, freq_str, num_channels + raise RuntimeError( + "Failed to sample a valid univariate series after multiple attempts" + ) + + def _get_one_sample_from_generator( + self, gen_name: str, total_length_for_batch: int + ) -> Tuple[torch.Tensor, pd.Timestamp, str, int]: + attempts = 0 + dataset = self.datasets[gen_name] + while attempts < 20: + attempts += 1 + sample = dataset.get_samples(1)[0] + values, start, freq_str, num_channels = self._convert_sample_to_tensor( + sample + ) + values = self._maybe_resize(values, total_length_for_batch) + if values.shape[2] != 1: + continue + return values, start, freq_str, num_channels + raise RuntimeError( + f"Failed to sample a valid univariate series from generator '{gen_name}' after multiple attempts" + ) + + def _choose_generators_for_mixup(self, k: int) -> List[str]: + available = [g for g in self.generator_proportions.keys() if g in self.datasets] + if not available: + raise RuntimeError("No available generators to sample from for mixup") + k_eff = min(k, len(available)) + # Weighted sampling without replacement by sequential renormalization + chosen: List[str] = [] + remaining = available.copy() + while len(chosen) < k_eff: + weights = np.array( + [self.generator_proportions[g] for g in remaining], dtype=float + ) + if weights.sum() <= 0: + # fallback to uniform + probs = np.ones(len(remaining)) / len(remaining) + else: + probs = weights / weights.sum() + pick = str(np.random.choice(remaining, p=probs)) + chosen.append(pick) + remaining.remove(pick) + return chosen + + def _maybe_apply_mixup_to_single( + self, base_series: torch.Tensor, total_length_for_batch: int + ) -> torch.Tensor: + do_mixup = ( + self.augmentor.augmentations.get("mixup_augmentation", False) + and self.augmentor.rng.random() + < self.augmentor.augmentation_probabilities.get("mixup_augmentation", 0.0) + ) + if not do_mixup: + return base_series + + # Use MixUpAugmenter to avoid duplication + mixup = self.augmentor.mixup_augmenter + if mixup is None: + return base_series + + # Decide number of sources k consistent with MixUpAugmenter behavior + current_k = ( + mixup._sample_k() + if not mixup.randomize_k + else int(self.augmentor.rng.integers(2, mixup.max_k + 1)) + ) + + # Choose distinct generators for sources according to proportions + chosen_gens = self._choose_generators_for_mixup(current_k) + + # Collect one source per chosen generator + sources: List[torch.Tensor] = [] + for gen in chosen_gens: + src_values, _, _, _ = self._get_one_sample_from_generator( + gen, total_length_for_batch + ) + sources.append(src_values) + source_tensor = torch.cat(sources, dim=0) + + # Sample alpha via MixUpAugmenter, then mix + alpha = mixup._sample_alpha() + mixed_series = mixup.mix_sources(source_tensor, alpha=alpha) + return mixed_series + + def _tensor_to_values_list( + self, series_tensor: torch.Tensor + ) -> Tuple[List[List[float]], int, int]: + # series_tensor shape: [1, seq_len, num_channels] + seq_len = int(series_tensor.shape[1]) + num_channels = int(series_tensor.shape[2]) + if num_channels == 1: + return [series_tensor.squeeze(0).squeeze(-1).tolist()], seq_len, 1 + channels: List[List[float]] = [] + for ch in range(num_channels): + channels.append(series_tensor[0, :, ch].tolist()) + return channels, seq_len, num_channels + + def run(self, num_batches: int) -> None: + logging.info( + f"Starting offline augmentation into {self.dataset_manager.batches_dir} | chunk_size={self.chunk_size}" + ) + + augmented_buffer: List[Dict[str, Any]] = [] + target_batches = num_batches + start_time = time.time() + + try: + while self.dataset_manager.batch_counter < target_batches: + # Decide target length for this sample + total_length_for_batch = ( + self.length + if self.length is not None + else int(np.random.choice(LENGTH_CHOICES)) + ) + + for _ in range(max(1, self.max_tries)): + # Sample one base series + base_values, base_start, base_freq, _ = self._get_one_sample( + total_length_for_batch + ) + original_base = base_values.clone() + + # Per-sample scaler choice (50% none; else robust/minmax/median/mean) + per_sample_scaler = self._choose_scaler() + base_values = self._apply_scaler(base_values, per_sample_scaler) + + # Early mixup (if enabled and position includes first) + do_mixup_early = ( + self.augmentor.augmentations.get("mixup_augmentation", False) + and self.mixup_position in ["first", "both"] + and self.augmentor.rng.random() + < self.augmentor.augmentation_probabilities.get( + "mixup_augmentation", 0.0 + ) + ) + if do_mixup_early: + base_values = self._apply_mixup_to_series( + base_values, total_length_for_batch, per_sample_scaler + ) + + # Apply per-series augmentations + augmented_single = self.augmentor.apply_per_series_only( + base_values, start=base_start, frequency=base_freq + ) + + # Optional analytic: RandomConvAugmenter via temp batch (before late mixup) + if self.augmentor.augmentations.get( + "random_conv_augmentation", False + ): + if ( + self.rng.random() + < self.augmentor.augmentation_probabilities.get( + "random_conv_augmentation", 0.3 + ) + ): + augmented_single = self._apply_random_conv_with_temp_batch( + augmented_single, + total_length_for_batch, + per_sample_scaler, + ) + + # Late mixup (if enabled and position includes last) + do_mixup_late = ( + self.augmentor.augmentations.get("mixup_augmentation", False) + and self.mixup_position in ["last", "both"] + and self.augmentor.rng.random() + < self.augmentor.augmentation_probabilities.get( + "mixup_augmentation", 0.0 + ) + ) + if do_mixup_late: + augmented_single = self._apply_mixup_to_series( + augmented_single, total_length_for_batch, per_sample_scaler + ) + + # Compute change score and unchanged check + score = self._compute_change_score(original_base, augmented_single) + if score < self.change_threshold: + continue + + # Optional quality filter + if self.enable_quality_filter and is_low_quality(augmented_single): + continue + + # Accept first candidate that passes thresholds + values_list, seq_len, num_channels = self._tensor_to_values_list( + augmented_single + ) + record = { + "series_id": self.dataset_manager.series_counter, + "values": values_list, + "length": int(seq_len), + "num_channels": int(num_channels), + "generator_type": "augmented", + "start": pd.Timestamp(base_start), + "frequency": base_freq, + "generation_timestamp": pd.Timestamp.now(), + } + augmented_buffer.append(record) + break + + # Discard combined_values_augmented and loop + if len(augmented_buffer) >= self.chunk_size: + write_start = time.time() + self.dataset_manager.append_batch(augmented_buffer) + write_time = time.time() - write_start + elapsed = time.time() - start_time + series_per_sec = ( + self.dataset_manager.series_counter / elapsed + if elapsed > 0 + else 0 + ) + print( + f"✓ Wrote batch {self.dataset_manager.batch_counter - 1}/{target_batches} | Series: {self.dataset_manager.series_counter:,} | Rate: {series_per_sec:.1f}/s | Write: {write_time:.2f}s" + ) + augmented_buffer = [] + + except KeyboardInterrupt: + logging.info( + f"Interrupted. Generated {self.dataset_manager.series_counter} series, {self.dataset_manager.batch_counter} batches." + ) + finally: + # Flush remaining buffer if any + if augmented_buffer: + self.dataset_manager.append_batch(augmented_buffer) + logging.info("Offline augmentation completed.") + + +def setup_logging(verbose: bool = False) -> None: + level = logging.DEBUG if verbose else logging.INFO + logging.basicConfig( + level=level, + format="%(asctime)s - %(levelname)s - %(message)s", + handlers=[logging.StreamHandler(sys.stdout)], + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Offline augmentation script to precompute augmented series", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + + parser.add_argument( + "--base-data-dir", + type=str, + required=True, + help="Base directory with generator subdirectories (inputs)", + ) + parser.add_argument( + "--output-dir", + type=str, + required=True, + help="Base output directory for augmented datasets", + ) + parser.add_argument( + "--length", + type=int, + default=None, + help="Fixed length of augmented series. If set, saves under augmented{length}", + ) + parser.add_argument( + "--chunk-size", + type=int, + default=2**13, # 8192 + help="Number of series per written Arrow batch", + ) + parser.add_argument( + "--num-batches", + type=int, + default=1000, + help="Number of Arrow batches to write", + ) + parser.add_argument( + "--mixup-position", + type=str, + default="both", + choices=["first", "last", "both"], + help="Where to apply mixup in the pipeline (first, last, or both)", + ) + parser.add_argument( + "--change-threshold", + type=float, + default=0.05, + help="Minimum normalized change score (vs IQR) required to keep series", + ) + parser.add_argument( + "--max-tries", + type=int, + default=3, + help="Max attempts to produce an acceptable augmented series per output", + ) + parser.add_argument( + "--enable-quality-filter", + action="store_true", + help="Enable low-quality series filter (noise-like removal)", + ) + # Quality filter thresholds moved to filter module defaults + parser.add_argument( + "--rc-batch-size", + type=int, + default=8, + help="Temporary batch size used for RandomConvAugmenter", + ) + parser.add_argument("--verbose", action="store_true", help="Enable verbose logging") + parser.add_argument( + "--global-seed", type=int, default=42, help="Global random seed" + ) + + args = parser.parse_args() + setup_logging(args.verbose) + + generator_proportions = { + "forecast_pfn": 1.0, + "gp": 1.0, + "kernel": 1.0, + "sinewave": 1.0, + "sawtooth": 1.0, + "step": 0.1, + "anomaly": 1.0, + "spike": 1.0, + "cauker_univariate": 2.0, + "ou_process": 1.0, + "audio_financial_volatility": 0.1, + "audio_multi_scale_fractal": 0.1, + "audio_network_topology": 0.5, + "audio_stochastic_rhythm": 1.0, + } + + augmentations = { + "censor_augmentation": True, + "quantization_augmentation": True, + "mixup_augmentation": True, + "time_flip_augmentation": True, + "yflip_augmentation": True, + "differential_augmentation": True, + "regime_change_augmentation": True, + "shock_recovery_augmentation": True, + "calendar_augmentation": True, + "amplitude_modulation_augmentation": True, + "resample_artifacts_augmentation": True, + "scaling_augmentation": True, + "noise_augmentation": True, + "random_conv_augmentation": True, + } + + augmentation_probabilities = { + "censor_or_quantization_augmentation": 0.40, + "mixup_augmentation": 0.50, + "time_flip_augmentation": 0.30, + "yflip_augmentation": 0.30, + "differential_augmentation": 0.40, + "regime_change_augmentation": 0.40, + "shock_recovery_augmentation": 0.40, + "calendar_augmentation": 0.40, + "amplitude_modulation_augmentation": 0.35, + "resample_artifacts_augmentation": 0.40, + "scaling_augmentation": 0.50, + "noise_augmentation": 0.1, + "random_conv_augmentation": 0.30, + } + + try: + generator = OfflinePerSampleAugmentedGenerator( + base_data_dir=args.base_data_dir, + output_dir=args.output_dir, + length=args.length, + chunk_size=args.chunk_size, + generator_proportions=generator_proportions, + augmentations=augmentations, + augmentation_probabilities=augmentation_probabilities, + global_seed=args.global_seed, + mixup_position=args.mixup_position, + change_threshold=args.change_threshold, + max_tries=args.max_tries, + enable_quality_filter=args.enable_quality_filter, + rc_batch_size=args.rc_batch_size, + ) + + generator.run(num_batches=args.num_batches) + except Exception as e: + logging.error(f"Fatal error: {e}") + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/synthetic_generation/augmentations/offline_temp_batch_augmentations.py b/src/synthetic_generation/augmentations/offline_temp_batch_augmentations.py new file mode 100644 index 0000000000000000000000000000000000000000..494a2d8195abdf1998eaaf3ef13edf6610304d3f --- /dev/null +++ b/src/synthetic_generation/augmentations/offline_temp_batch_augmentations.py @@ -0,0 +1,739 @@ +import argparse +import logging +import sys +import time +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple + +import numpy as np +import pandas as pd +import torch + +from src.data.augmentations import ( + CensorAugmenter, + DifferentialAugmenter, + MixUpAugmenter, + QuantizationAugmenter, + RandomConvAugmenter, + TimeFlipAugmenter, + YFlipAugmenter, +) +from src.data.constants import LENGTH_CHOICES +from src.data.datasets import CyclicalBatchDataset +from src.data.filter import is_low_quality +from src.data.scalers import MeanScaler, MedianScaler, MinMaxScaler, RobustScaler +from src.synthetic_generation.augmentations.offline_per_sample_iid_augmentations import ( + TimeSeriesDatasetManager, + UnivariateOfflineAugmentor, +) + + +class OfflineTempBatchAugmentedGenerator: + def __init__( + self, + base_data_dir: str, + output_dir: str, + length: Optional[int], + mixed_batch_size: int = 10, + chunk_size: int = 2**13, + generator_proportions: Optional[Dict[str, float]] = None, + augmentations: Optional[Dict[str, bool]] = None, + augmentation_probabilities: Optional[Dict[str, float]] = None, + global_seed: int = 42, + mixup_position: str = "both", + selection_strategy: str = "random", + change_threshold: float = 0.05, + enable_quality_filter: bool = False, + temp_batch_retries: int = 3, + ): + self.base_data_dir = base_data_dir + self.length = length + self.mixed_batch_size = mixed_batch_size + self.chunk_size = chunk_size + self.global_seed = global_seed + np.random.seed(global_seed) + torch.manual_seed(global_seed) + + out_dir_name = ( + f"augmented_temp_batch_{length}" + if length is not None + else "augmented_temp_batch" + ) + self.dataset_manager = TimeSeriesDatasetManager( + str(Path(output_dir) / out_dir_name), batch_size=chunk_size + ) + + # Augmentation config + self.augmentation_probabilities = augmentation_probabilities or {} + self.augmentations = augmentations or {} + self.apply_augmentations = any(self.augmentations.values()) + + # RNG for category choices and sampling + self.rng = np.random.default_rng(global_seed) + + # Mixup placement and selection strategy + self.mixup_position = mixup_position + self.selection_strategy = selection_strategy + self.change_threshold = float(change_threshold) + self.enable_quality_filter = bool(enable_quality_filter) + self.temp_batch_retries = int(temp_batch_retries) + + # Initialize augmenters as in old composer ordering + self.flip_augmenter = None + if self.augmentations.get("time_flip_augmentation", False): + self.flip_augmenter = TimeFlipAugmenter( + p_flip=self.augmentation_probabilities.get( + "time_flip_augmentation", 0.5 + ) + ) + + self.yflip_augmenter = None + if self.augmentations.get("yflip_augmentation", False): + self.yflip_augmenter = YFlipAugmenter( + p_flip=self.augmentation_probabilities.get("yflip_augmentation", 0.5) + ) + + self.censor_augmenter = None + if self.augmentations.get("censor_augmentation", False): + self.censor_augmenter = CensorAugmenter() + + self.quantization_augmenter = None + if self.augmentations.get("quantization_augmentation", False): + self.quantization_augmenter = QuantizationAugmenter( + p_quantize=self.augmentation_probabilities.get( + "censor_or_quantization_augmentation", 0.5 + ), + level_range=(5, 15), + ) + + self.mixup_augmenter = None + if self.augmentations.get("mixup_augmentation", False): + self.mixup_augmenter = MixUpAugmenter( + p_combine=self.augmentation_probabilities.get("mixup_augmentation", 0.5) + ) + + self.differential_augmentor = None + if self.augmentations.get("differential_augmentation", False): + self.differential_augmentor = DifferentialAugmenter( + p_transform=self.augmentation_probabilities.get( + "differential_augmentation", 0.5 + ) + ) + + self.random_conv_augmenter = None + if self.augmentations.get("random_conv_augmentation", False): + self.random_conv_augmenter = RandomConvAugmenter( + p_transform=self.augmentation_probabilities.get( + "random_conv_augmentation", 0.3 + ) + ) + + self.generator_proportions = self._setup_proportions(generator_proportions) + self.datasets = self._initialize_datasets() + + # Per-series augmentor from offline_augmentations.py (categories only) + self.per_series_augmentor = UnivariateOfflineAugmentor( + augmentations=self.augmentations, + augmentation_probabilities=self.augmentation_probabilities, + global_seed=global_seed, + ) + + def _compute_change_scores( + self, original_batch: torch.Tensor, augmented_batch: torch.Tensor + ) -> np.ndarray: + # Normalized MAE vs IQR (q25-q75) per element + bsz = augmented_batch.shape[0] + scores: List[float] = [] + for i in range(bsz): + base_flat = original_batch[i].reshape(-1) + q25 = torch.quantile(base_flat, 0.25) + q75 = torch.quantile(base_flat, 0.75) + iqr = (q75 - q25).item() + iqr = iqr if iqr > 0 else 1e-6 + mae = torch.mean(torch.abs(augmented_batch[i] - original_batch[i])).item() + scores.append(mae / iqr) + return np.asarray(scores, dtype=float) + + def _setup_proportions( + self, generator_proportions: Optional[Dict[str, float]] + ) -> Dict[str, float]: + # Default uniform across discovered generators + if generator_proportions is None: + base = Path(self.base_data_dir) + discovered = [p.name for p in base.iterdir() if p.is_dir()] + proportions = {name: 1.0 for name in discovered} + else: + proportions = dict(generator_proportions) + + total = sum(proportions.values()) + if total <= 0: + raise ValueError("Total generator proportions must be positive") + return {k: v / total for k, v in proportions.items()} + + def _initialize_datasets(self) -> Dict[str, CyclicalBatchDataset]: + datasets: Dict[str, CyclicalBatchDataset] = {} + for generator_name, proportion in self.generator_proportions.items(): + if proportion <= 0: + continue + batches_dir = Path(self.base_data_dir) / generator_name + if not batches_dir.is_dir(): + logging.warning( + f"Skipping '{generator_name}' because directory does not exist: {batches_dir}" + ) + continue + try: + dataset = CyclicalBatchDataset( + batches_dir=str(batches_dir), + generator_type=generator_name, + device=None, + prefetch_next=True, + prefetch_threshold=32, + ) + datasets[generator_name] = dataset + logging.info(f"Loaded dataset for {generator_name}") + except Exception as e: + logging.warning(f"Failed to load dataset for {generator_name}: {e}") + if not datasets: + raise ValueError("No valid datasets loaded from base_data_dir") + return datasets + + def _sample_generator_name(self) -> str: + available = [g for g in self.generator_proportions.keys() if g in self.datasets] + probs = np.array( + [self.generator_proportions[g] for g in available], dtype=float + ) + probs = probs / probs.sum() + return str(self.rng.choice(available, p=probs)) + + def _series_key(self, gen_name: str, sample: dict, values: torch.Tensor) -> str: + series_id = sample.get("series_id", None) + if series_id is not None: + return f"{gen_name}:{series_id}" + # Fallback: hash by values and metadata + try: + arr = values.detach().cpu().numpy() + h = hash( + ( + gen_name, + sample.get("start", None), + sample.get("frequency", None), + arr.shape, + float(arr.mean()), + float(arr.std()), + ) + ) + return f"{gen_name}:hash:{h}" + except Exception: + return f"{gen_name}:rand:{self.rng.integers(0, 1 << 31)}" + + def _convert_sample_to_tensor( + self, sample: dict + ) -> Tuple[torch.Tensor, pd.Timestamp, str, int]: + num_channels = sample.get("num_channels", 1) + values_data = sample["values"] + + if num_channels == 1: + if isinstance(values_data[0], list): + values = torch.tensor(values_data[0], dtype=torch.float32) + else: + values = torch.tensor(values_data, dtype=torch.float32) + values = values.unsqueeze(0).unsqueeze(-1) + else: + channel_tensors = [] + for channel_values in values_data: + channel_tensor = torch.tensor(channel_values, dtype=torch.float32) + channel_tensors.append(channel_tensor) + values = torch.stack(channel_tensors, dim=-1).unsqueeze(0) + + freq_str = sample["frequency"] + start_val = sample["start"] + start = ( + start_val + if isinstance(start_val, pd.Timestamp) + else pd.Timestamp(start_val) + ) + return values, start, freq_str, num_channels + + def _shorten_like_batch_composer( + self, values: torch.Tensor, target_len: int + ) -> Optional[torch.Tensor]: + # Only shorten if longer; if shorter than target_len, reject (to keep batch aligned) + seq_len = int(values.shape[1]) + if seq_len == target_len: + return values + if seq_len < target_len: + return None + # Randomly choose cut or subsample with equal probability + strategy = str(self.rng.choice(["cut", "subsample"])) + if strategy == "cut": + max_start_idx = seq_len - target_len + start_idx = int(self.rng.integers(0, max_start_idx + 1)) + return values[:, start_idx : start_idx + target_len, :] + # Subsample evenly spaced indices + indices = np.linspace(0, seq_len - 1, target_len, dtype=int) + return values[:, indices, :] + + def _maybe_apply_scaler(self, values: torch.Tensor) -> torch.Tensor: + scaler_choice = str( + self.rng.choice(["robust", "minmax", "median", "mean", "none"]) + ) + scaler = None + if scaler_choice == "robust": + scaler = RobustScaler() + elif scaler_choice == "minmax": + scaler = MinMaxScaler() + elif scaler_choice == "median": + scaler = MedianScaler() + elif scaler_choice == "mean": + scaler = MeanScaler() + if scaler is not None: + values = scaler.scale(values, scaler.compute_statistics(values)) + return values + + def _apply_augmentations( + self, + batch_values: torch.Tensor, + starts: List[pd.Timestamp], + freqs: List[str], + ) -> torch.Tensor: + if not self.apply_augmentations: + return batch_values + + # 1) Early mixup (batch-level) + if ( + self.mixup_position in ["first", "both"] + and self.augmentations.get("mixup_augmentation", False) + and self.mixup_augmenter is not None + ): + batch_values = self.mixup_augmenter.transform(batch_values) + + # 2) Per-series categories (apply to ALL series with starts/freqs) + batch_size = int(batch_values.shape[0]) + augmented_list = [] + for i in range(batch_size): + s = batch_values[i : i + 1] + start_i = starts[i] if i < len(starts) else None + freq_i = freqs[i] if i < len(freqs) else None + s_aug = self.per_series_augmentor.apply_per_series_only( + s, start=start_i, frequency=freq_i + ) + augmented_list.append(s_aug) + batch_values = torch.cat(augmented_list, dim=0) + + # 3) Noise augmentation (batch-level) + if self.augmentations.get("noise_augmentation", False): + if self.rng.random() < self.augmentation_probabilities.get( + "noise_augmentation", 0.5 + ): + noise_std = 0.01 * torch.std(batch_values) + if torch.isfinite(noise_std) and (noise_std > 0): + noise = torch.normal(0, noise_std, size=batch_values.shape) + batch_values = batch_values + noise + + # 4) Scaling augmentation (batch-level) + if self.augmentations.get("scaling_augmentation", False): + if self.rng.random() < self.augmentation_probabilities.get( + "scaling_augmentation", 0.5 + ): + scale_factor = float(self.rng.uniform(0.95, 1.05)) + batch_values = batch_values * scale_factor + + # 5) RandomConvAugmenter (batch-level) + if ( + self.augmentations.get("random_conv_augmentation", False) + and self.random_conv_augmenter is not None + ): + if self.rng.random() < self.augmentation_probabilities.get( + "random_conv_augmentation", 0.3 + ): + batch_values = self.random_conv_augmenter.transform(batch_values) + + # 6) Late mixup (batch-level) + if ( + self.mixup_position in ["last", "both"] + and self.augmentations.get("mixup_augmentation", False) + and self.mixup_augmenter is not None + ): + batch_values = self.mixup_augmenter.transform(batch_values) + + return batch_values + + def _get_one_source_sample( + self, total_length_for_batch: int, used_source_keys: set + ) -> Optional[Tuple[torch.Tensor, pd.Timestamp, str, str]]: + # Returns (values, start, freq, source_key) or None if cannot fetch + attempts = 0 + while attempts < 50: + attempts += 1 + gen_name = self._sample_generator_name() + dataset = self.datasets[gen_name] + sample = dataset.get_samples(1)[0] + values, start, freq_str, num_channels = self._convert_sample_to_tensor( + sample + ) + if num_channels != 1: + continue + # Reject NaNs + if torch.isnan(values).any(): + continue + # Shorten to target_len; reject if too short + shortened = self._shorten_like_batch_composer( + values, total_length_for_batch + ) + if shortened is None: + continue + values = shortened + # Random scaler + values = self._maybe_apply_scaler(values) + # Uniqueness check + key = self._series_key(gen_name, sample, values) + if key in used_source_keys: + continue + # Reserve key immediately to avoid re-use in same temp batch + used_source_keys.add(key) + return values, start, freq_str, key + return None + + def _tensor_to_values_list( + self, series_tensor: torch.Tensor + ) -> Tuple[List[List[float]], int, int]: + seq_len = int(series_tensor.shape[1]) + num_channels = int(series_tensor.shape[2]) + if num_channels == 1: + return [series_tensor.squeeze(0).squeeze(-1).tolist()], seq_len, 1 + channels: List[List[float]] = [] + for ch in range(num_channels): + channels.append(series_tensor[0, :, ch].tolist()) + return channels, seq_len, num_channels + + def run(self, num_batches: int) -> None: + logging.info( + f"Starting offline IID augmentation into {self.dataset_manager.batches_dir} | chunk_size={self.chunk_size} | mixed_batch_size={self.mixed_batch_size}" + ) + + augmented_buffer: List[Dict[str, Any]] = [] + target_batches = num_batches + start_time = time.time() + + try: + while self.dataset_manager.batch_counter < target_batches: + # Decide target length for this temp batch + total_length_for_batch = ( + self.length + if self.length is not None + else int(self.rng.choice(LENGTH_CHOICES)) + ) + + selected_record: Optional[Dict[str, Any]] = None + for _retry in range(max(1, self.temp_batch_retries + 1)): + # Collect a temporary mixed batch without reusing sources + temp_values_list: List[torch.Tensor] = [] + temp_starts: List[pd.Timestamp] = [] + temp_freqs: List[str] = [] + temp_used_keys: set = set() + + attempts = 0 + while ( + len(temp_values_list) < self.mixed_batch_size + and attempts < self.mixed_batch_size * 200 + ): + attempts += 1 + fetched = self._get_one_source_sample( + total_length_for_batch, temp_used_keys + ) + if fetched is None: + continue + values, start, freq, _ = fetched + temp_values_list.append(values) + temp_starts.append(start) + temp_freqs.append(freq) + + if len(temp_values_list) == 0: + # If we could not fetch anything, rebuild next retry + continue + + temp_batch = torch.cat(temp_values_list, dim=0) + original_temp_batch = temp_batch.clone() + + # Apply augmentations sequentially + augmented_temp_batch = self._apply_augmentations( + temp_batch, temp_starts, temp_freqs + ) + + # Compute change scores + scores = self._compute_change_scores( + original_temp_batch, augmented_temp_batch + ) + + # Build eligible indices by threshold + eligible = np.where(scores >= self.change_threshold)[0].tolist() + + # Apply quality filter if enabled + if self.enable_quality_filter: + eligible_q: List[int] = [] + for idx in eligible: + cand = augmented_temp_batch[idx : idx + 1] + if not is_low_quality(cand): + eligible_q.append(idx) + eligible = eligible_q + + sel_idx: Optional[int] = None + if self.selection_strategy == "max_change": + if eligible: + sel_idx = int(max(eligible, key=lambda i: scores[i])) + else: + # Fallback to best by score (respect quality if possible) + if self.enable_quality_filter: + qual_idxs = [ + i + for i in range(augmented_temp_batch.shape[0]) + if not is_low_quality( + augmented_temp_batch[i : i + 1] + ) + ] + if qual_idxs: + sel_idx = int( + max(qual_idxs, key=lambda i: scores[i]) + ) + if sel_idx is None: + sel_idx = int(np.argmax(scores)) + else: + # random selection among eligible, else fallback to best + if eligible: + sel_idx = int( + self.rng.choice(np.asarray(eligible, dtype=int)) + ) + else: + if self.enable_quality_filter: + qual_idxs = [ + i + for i in range(augmented_temp_batch.shape[0]) + if not is_low_quality( + augmented_temp_batch[i : i + 1] + ) + ] + if qual_idxs: + sel_idx = int( + max(qual_idxs, key=lambda i: scores[i]) + ) + if sel_idx is None: + sel_idx = int(np.argmax(scores)) + + # If still none (shouldn't happen), rebuild + if sel_idx is None: + continue + + selected_series = augmented_temp_batch[sel_idx : sel_idx + 1] + values_list, seq_len, num_channels = self._tensor_to_values_list( + selected_series + ) + selected_record = { + "series_id": self.dataset_manager.series_counter, + "values": values_list, + "length": int(seq_len), + "num_channels": int(num_channels), + "generator_type": "augmented", + "start": pd.Timestamp(temp_starts[sel_idx]), + "frequency": temp_freqs[sel_idx], + "generation_timestamp": pd.Timestamp.now(), + } + break + + if selected_record is None: + # Could not assemble a valid candidate after retries; skip iteration + continue + + augmented_buffer.append(selected_record) + + if len(augmented_buffer) >= self.chunk_size: + write_start = time.time() + self.dataset_manager.append_batch(augmented_buffer) + write_time = time.time() - write_start + elapsed = time.time() - start_time + series_per_sec = ( + self.dataset_manager.series_counter / elapsed + if elapsed > 0 + else 0 + ) + print( + f"✓ Wrote batch {self.dataset_manager.batch_counter - 1}/{target_batches} | Series: {self.dataset_manager.series_counter:,} | Rate: {series_per_sec:.1f}/s | Write: {write_time:.2f}s" + ) + augmented_buffer = [] + + except KeyboardInterrupt: + logging.info( + f"Interrupted. Generated {self.dataset_manager.series_counter} series, {self.dataset_manager.batch_counter} batches." + ) + finally: + if augmented_buffer: + self.dataset_manager.append_batch(augmented_buffer) + logging.info("Offline IID augmentation completed.") + + +def setup_logging(verbose: bool = False) -> None: + level = logging.DEBUG if verbose else logging.INFO + logging.basicConfig( + level=level, + format="%(asctime)s - %(levelname)s - %(message)s", + handlers=[logging.StreamHandler(sys.stdout)], + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Offline IID augmentation script using temp mixed batches", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + + parser.add_argument( + "--base-data-dir", + type=str, + required=True, + help="Base directory with generator subdirectories (inputs)", + ) + parser.add_argument( + "--output-dir", + type=str, + required=True, + help="Base output directory for augmented datasets", + ) + parser.add_argument( + "--length", + type=int, + default=None, + help="Fixed length of augmented series. If set, saves under augmented{length}", + ) + parser.add_argument( + "--mixed-batch-size", + type=int, + default=64, + help="Temporary mixed batch size before selecting a single element", + ) + parser.add_argument( + "--chunk-size", + type=int, + default=2**13, + help="Number of series per written Arrow batch", + ) + parser.add_argument( + "--num-batches", + type=int, + default=1000, + help="Number of Arrow batches to write", + ) + parser.add_argument( + "--mixup-position", + type=str, + default="both", + choices=["first", "last", "both"], + help="Where to apply mixup in the pipeline (first, last, or both)", + ) + parser.add_argument( + "--selection-strategy", + type=str, + default="random", + choices=["random", "max_change"], + help="How to select the final series from the temp batch", + ) + parser.add_argument( + "--change-threshold", + type=float, + default=0.05, + help="Minimum normalized change score (vs IQR) required for selection", + ) + parser.add_argument( + "--enable-quality-filter", + action="store_true", + help="Enable low-quality filter using autocorr/SNR/complexity", + ) + parser.add_argument( + "--temp-batch-retries", + type=int, + default=3, + help="Number of times to rebuild temp batch if selection fails thresholds", + ) + parser.add_argument("--verbose", action="store_true", help="Enable verbose logging") + parser.add_argument( + "--global-seed", type=int, default=42, help="Global random seed" + ) + + args = parser.parse_args() + setup_logging(args.verbose) + + generator_proportions = { + "forecast_pfn": 1.0, + "gp": 1.0, + "kernel": 1.0, + "sinewave": 1.0, + "sawtooth": 1.0, + "step": 0.1, + "anomaly": 1.0, + "spike": 1.0, + "cauker_univariate": 2.0, + "ou_process": 1.0, + "audio_financial_volatility": 0.1, + "audio_multi_scale_fractal": 0.1, + "audio_network_topology": 0.5, + "audio_stochastic_rhythm": 1.0, + } + + # Defaults reflecting configs/train.yaml from the prompt + augmentations = { + "censor_augmentation": True, + "quantization_augmentation": False, + "mixup_augmentation": True, + "time_flip_augmentation": True, + "yflip_augmentation": True, + "differential_augmentation": True, + "regime_change_augmentation": True, + "shock_recovery_augmentation": True, + "calendar_augmentation": False, + "amplitude_modulation_augmentation": True, + "resample_artifacts_augmentation": True, + "scaling_augmentation": True, + "noise_augmentation": True, + "random_conv_augmentation": True, + } + + augmentation_probabilities = { + "censor_or_quantization_augmentation": 0.40, + "mixup_augmentation": 0.50, + "time_flip_augmentation": 0.30, + "yflip_augmentation": 0.30, + "differential_augmentation": 0.40, + "regime_change_augmentation": 0.40, + "shock_recovery_augmentation": 0.40, + "calendar_augmentation": 0.40, + "amplitude_modulation_augmentation": 0.35, + "resample_artifacts_augmentation": 0.40, + "scaling_augmentation": 0.50, + "noise_augmentation": 0.10, + "random_conv_augmentation": 0.30, + } + + try: + generator = OfflineTempBatchAugmentedGenerator( + base_data_dir=args.base_data_dir, + output_dir=args.output_dir, + length=args.length, + mixed_batch_size=args.mixed_batch_size, + chunk_size=args.chunk_size, + generator_proportions=generator_proportions, + augmentations=augmentations, + augmentation_probabilities=augmentation_probabilities, + global_seed=args.global_seed, + mixup_position=args.mixup_position, + selection_strategy=args.selection_strategy, + change_threshold=args.change_threshold, + enable_quality_filter=args.enable_quality_filter, + temp_batch_retries=args.temp_batch_retries, + ) + + generator.run(num_batches=args.num_batches) + except Exception as e: + logging.error(f"Fatal error: {e}") + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/synthetic_generation/cauker/cauker_generator.py b/src/synthetic_generation/cauker/cauker_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..d82d2e667d1435215ed8f0b6945a789a447f4022 --- /dev/null +++ b/src/synthetic_generation/cauker/cauker_generator.py @@ -0,0 +1,269 @@ +import functools +import random +from typing import Dict, List, Optional, Tuple, Union + +import cupy as cp +import networkx as nx +import numpy as np +from sklearn.gaussian_process.kernels import ( + RBF, + ConstantKernel, + DotProduct, + ExpSineSquared, + RationalQuadratic, + WhiteKernel, +) + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.generator_params import CauKerGeneratorParams + + +class CauKerGenerator(AbstractTimeSeriesGenerator): + """Structural‑Causal‑Model GP-based time series generator. + + This class is a refactor of the original script-level implementation, exposing + the same logic as instance methods and generating one multivariate series per call. + """ + + def __init__(self, params: CauKerGeneratorParams): + self.params = params + + # ------------------------------------------------------------------------- + # 1. Kernel Bank Construction (parameterised by `time_length`) + # ------------------------------------------------------------------------- + def build_kernel_bank(self, time_length: int) -> List: + return [ + # Hourly / sub‑hourly cycles + ExpSineSquared(periodicity=24 / time_length), + ExpSineSquared(periodicity=48 / time_length), + ExpSineSquared(periodicity=96 / time_length), + # Hourly components embedded in weekly structure + ExpSineSquared(periodicity=24 * 7 / time_length), + ExpSineSquared(periodicity=48 * 7 / time_length), + ExpSineSquared(periodicity=96 * 7 / time_length), + # Daily / sub‑daily + ExpSineSquared(periodicity=7 / time_length), + ExpSineSquared(periodicity=14 / time_length), + ExpSineSquared(periodicity=30 / time_length), + ExpSineSquared(periodicity=60 / time_length), + ExpSineSquared(periodicity=365 / time_length), + ExpSineSquared(periodicity=365 * 2 / time_length), + # Weekly / monthly / quarterly variations + ExpSineSquared(periodicity=4 / time_length), + ExpSineSquared(periodicity=26 / time_length), + ExpSineSquared(periodicity=52 / time_length), + ExpSineSquared(periodicity=4 / time_length), + ExpSineSquared(periodicity=6 / time_length), + ExpSineSquared(periodicity=12 / time_length), + ExpSineSquared(periodicity=4 / time_length), + ExpSineSquared(periodicity=(4 * 10) / time_length), + ExpSineSquared(periodicity=10 / time_length), + # Stationary + noise kernels + DotProduct(sigma_0=0.0), + DotProduct(sigma_0=1.0), + DotProduct(sigma_0=10.0), + RBF(length_scale=0.1), + RBF(length_scale=1.0), + RBF(length_scale=10.0), + RationalQuadratic(alpha=0.1), + RationalQuadratic(alpha=1.0), + RationalQuadratic(alpha=10.0), + WhiteKernel(noise_level=0.1), + WhiteKernel(noise_level=1.0), + ConstantKernel(), + ] + + # ------------------------------------------------------------------------- + # 2. Binary map utility for kernel algebra + # ------------------------------------------------------------------------- + def random_binary_map(self, a, b): + binary_ops = [lambda x, y: x + y, lambda x, y: x * y] + return np.random.choice(binary_ops)(a, b) + + # ------------------------------------------------------------------------- + # 3. Mean‑function library + # ------------------------------------------------------------------------- + def zero_mean(self, x: np.ndarray) -> np.ndarray: + return np.zeros_like(x) + + def linear_mean(self, x: np.ndarray) -> np.ndarray: + a = np.random.uniform(-1.0, 1.0) + b = np.random.uniform(-1.0, 1.0) + return a * x + b + + def exponential_mean(self, x: np.ndarray) -> np.ndarray: + a = np.random.uniform(0.5, 1.5) + b = np.random.uniform(0.5, 1.5) + return a * np.exp(b * x) + + def anomaly_mean(self, x: np.ndarray) -> np.ndarray: + m = np.zeros_like(x) + num_anomalies = np.random.randint(1, 6) + for _ in range(num_anomalies): + idx = np.random.randint(0, len(x)) + m[idx] += np.random.uniform(-5.0, 5.0) + return m + + def random_mean_combination(self, x: np.ndarray) -> np.ndarray: + mean_functions = [ + self.zero_mean, + self.linear_mean, + self.exponential_mean, + self.anomaly_mean, + ] + m1, m2 = np.random.choice(mean_functions, 2, replace=True) + combine_ops = [lambda u, v: u + v, lambda u, v: u * v] + return np.random.choice(combine_ops)(m1(x), m2(x)) + + # ------------------------------------------------------------------------- + # 4. GPU‑accelerated sampling from the GP prior + # ------------------------------------------------------------------------- + def sample_from_gp_prior_efficient_gpu( + self, + *, + kernel, + X: np.ndarray, + random_seed: Optional[int] = None, + method: str = "eigh", + mean_vec: Optional[np.ndarray] = None, + ) -> np.ndarray: + if X.ndim == 1: + X = X[:, None] + + cov_cpu = kernel(X) + n = X.shape[0] + + mean_vec = np.zeros(n, dtype=np.float64) if mean_vec is None else mean_vec + + cov_gpu = cp.asarray(cov_cpu) + mean_gpu = cp.asarray(mean_vec) + + if random_seed is not None: + cp.random.seed(random_seed) + + ts_gpu = cp.random.multivariate_normal( + mean=mean_gpu, cov=cov_gpu, method=method + ) + return cp.asnumpy(ts_gpu) + + # ------------------------------------------------------------------------- + # 5. Structural‑Causal‑Model time‑series generator (parameterised) + # ------------------------------------------------------------------------- + def generate_random_dag(self, num_nodes: int, max_parents: int = 3) -> nx.DiGraph: + G = nx.DiGraph() + nodes = list(range(num_nodes)) + random.shuffle(nodes) + G.add_nodes_from(nodes) + for i in range(num_nodes): + possible_parents = nodes[:i] + num_par = np.random.randint(0, min(len(possible_parents), max_parents) + 1) + for p in random.sample(possible_parents, num_par): + G.add_edge(p, nodes[i]) + return G + + def random_activation(self, x: np.ndarray, func_type: str = "linear") -> np.ndarray: + if func_type == "linear": + a = np.random.uniform(0.5, 2.0) + b = np.random.uniform(-1.0, 1.0) + return a * x + b + if func_type == "relu": + return np.maximum(0.0, x) + if func_type == "sigmoid": + return 1.0 / (1.0 + np.exp(-x)) + if func_type == "sin": + return np.sin(x) + if func_type == "mod": + c = np.random.uniform(1.0, 5.0) + return np.mod(x, c) + # default: leaky‑ReLU + alpha = np.random.uniform(0.01, 0.3) + return np.where(x > 0, x, alpha * x) + + def random_edge_mapping(self, parents_data: List[np.ndarray]) -> np.ndarray: + combined = np.stack(parents_data, axis=1) + W = np.random.randn(len(parents_data)) + b = np.random.randn() + non_linear_input = combined @ W + b + chosen_func = np.random.choice( + ["linear", "relu", "sigmoid", "sin", "mod", "leakyrelu"] + ) + return self.random_activation(non_linear_input, chosen_func) + + # ------------------------------------------------------------------------- + # 6. End‑to‑end SCM sampler + # ------------------------------------------------------------------------- + def generate_scm_time_series( + self, + *, + time_length: int, + num_features: int, + max_parents: int, + seed: int, + num_nodes: int, + ) -> Dict[int, np.ndarray]: + np.random.seed(seed) + random.seed(seed) + + dag = self.generate_random_dag(num_nodes, max_parents=max_parents) + kernel_bank = self.build_kernel_bank(time_length) + + root_nodes = [n for n in dag.nodes if dag.in_degree(n) == 0] + node_data: Dict[int, np.ndarray] = {} + + X = np.linspace(0.0, 1.0, time_length) + + # Sample roots directly from the GP prior + for r in root_nodes: + selected_kernels = np.random.choice( + kernel_bank, np.random.randint(1, 8), replace=True + ) + kernel = functools.reduce(self.random_binary_map, selected_kernels) + mean_vec = self.random_mean_combination(X) + node_data[r] = self.sample_from_gp_prior_efficient_gpu( + kernel=kernel, X=X, mean_vec=mean_vec, random_seed=seed + ) + + # Propagate through DAG + for node in nx.topological_sort(dag): + if node in root_nodes: + continue + parents = list(dag.predecessors(node)) + parents_ts = [node_data[p] for p in parents] + node_data[node] = self.random_edge_mapping(parents_ts) + + return node_data + + # ------------------------------------------------------------------------- + # Public API: generate one multivariate series (length, num_channels) + # ------------------------------------------------------------------------- + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + """Generate one multivariate series with shape (length, num_channels).""" + seed = self.params.global_seed if random_seed is None else random_seed + + # Resolve num_channels which can be int or (min, max) + desired_channels: Union[int, Tuple[int, int]] = self.params.num_channels + if isinstance(desired_channels, tuple): + low, high = desired_channels + if low > high: + low, high = high, low + num_channels = int(np.random.default_rng(seed).integers(low, high + 1)) + else: + num_channels = int(desired_channels) + + if num_channels > self.params.num_nodes: + raise ValueError( + f"num_channels ({num_channels}) cannot exceed num_nodes ({self.params.num_nodes})." + ) + + node_data = self.generate_scm_time_series( + time_length=self.params.length, + num_features=num_channels, + max_parents=self.params.max_parents, + seed=seed, + num_nodes=self.params.num_nodes, + ) + + chosen_nodes = random.sample(list(node_data.keys()), num_channels) + channels = [node_data[n].astype(np.float32) for n in chosen_nodes] + values = np.stack(channels, axis=1) # (length, num_channels) + return values diff --git a/src/synthetic_generation/cauker/cauker_generator_wrapper.py b/src/synthetic_generation/cauker/cauker_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..f900f309555da411e71661f88ebf2b15b51685b4 --- /dev/null +++ b/src/synthetic_generation/cauker/cauker_generator_wrapper.py @@ -0,0 +1,74 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.cauker.cauker_generator import CauKerGenerator +from src.synthetic_generation.generator_params import CauKerGeneratorParams + + +class CauKerGeneratorWrapper(GeneratorWrapper): + """ + Wrapper for CauKerGenerator that handles batch generation and formatting. + """ + + def __init__(self, params: CauKerGeneratorParams): + super().__init__(params) + self.params: CauKerGeneratorParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + params = super()._sample_parameters(batch_size) + # Resolve num_channels if range is given: sample once per batch for consistency + desired_channels = self.params.num_channels + if isinstance(desired_channels, tuple) and len(desired_channels) == 2: + low, high = desired_channels + if low > high: + low, high = high, low + num_channels = int(self.rng.integers(low, high + 1)) + elif isinstance(desired_channels, list): + num_channels = int(self.rng.choice(desired_channels)) + else: + num_channels = int(desired_channels) + + params.update( + { + "length": self.params.length, + "num_channels": num_channels, + "max_parents": self.params.max_parents, + "num_nodes": self.params.num_nodes, + } + ) + return params + + def generate_batch( + self, batch_size: int, seed: Optional[int] = None + ) -> TimeSeriesContainer: + # Establish a base seed to ensure different series use different seeds + base_seed = seed if seed is not None else self.params.global_seed + self._set_random_seeds(base_seed) + + sampled = self._sample_parameters(batch_size) + + batch_params = CauKerGeneratorParams( + global_seed=self.params.global_seed, + length=sampled["length"], + frequency=None, + start=None, + num_channels=sampled["num_channels"], + max_parents=sampled["max_parents"], + num_nodes=sampled["num_nodes"], + ) + generator = CauKerGenerator(batch_params) + + values = [] + for i in range(batch_size): + series_seed = base_seed + i + series = generator.generate_time_series(series_seed) + values.append(series) + + return TimeSeriesContainer( + values=np.array(values, dtype=np.float32), + start=sampled["start"], + frequency=sampled["frequency"], + ) diff --git a/src/synthetic_generation/continuous_generation.py b/src/synthetic_generation/continuous_generation.py new file mode 100644 index 0000000000000000000000000000000000000000..62910e73714d18a61cadc4262ce3ed13e3dfab1b --- /dev/null +++ b/src/synthetic_generation/continuous_generation.py @@ -0,0 +1,599 @@ +import argparse +import logging +import os +import random +import signal +import sys +import tempfile +import time +from pathlib import Path +from typing import Any, Dict, List, Optional + +import numpy as np +import pandas as pd +import pyarrow as pa +import pyarrow.feather as feather + +from src.synthetic_generation.anomalies.anomaly_generator_wrapper import ( + AnomalyGeneratorWrapper, +) +from src.synthetic_generation.audio_generators.financial_volatility_wrapper import ( + FinancialVolatilityAudioWrapper, +) +from src.synthetic_generation.audio_generators.multi_scale_fractal_wrapper import ( + MultiScaleFractalAudioWrapper, +) +from src.synthetic_generation.audio_generators.network_topology_wrapper import ( + NetworkTopologyAudioWrapper, +) +from src.synthetic_generation.audio_generators.stochastic_rhythm_wrapper import ( + StochasticRhythmAudioWrapper, +) +from src.synthetic_generation.cauker.cauker_generator_wrapper import ( + CauKerGeneratorWrapper, +) +from src.synthetic_generation.forecast_pfn_prior.forecast_pfn_generator_wrapper import ( + ForecastPFNGeneratorWrapper, +) +from src.synthetic_generation.generator_params import ( + AnomalyGeneratorParams, + CauKerGeneratorParams, + FinancialVolatilityAudioParams, + ForecastPFNGeneratorParams, + GPGeneratorParams, + KernelGeneratorParams, + MultiScaleFractalAudioParams, + NetworkTopologyAudioParams, + OrnsteinUhlenbeckProcessGeneratorParams, + SawToothGeneratorParams, + SineWaveGeneratorParams, + SpikesGeneratorParams, + StepGeneratorParams, + StochasticRhythmAudioParams, +) +from src.synthetic_generation.gp_prior.gp_generator_wrapper import GPGeneratorWrapper +from src.synthetic_generation.kernel_synth.kernel_generator_wrapper import ( + KernelGeneratorWrapper, +) +from src.synthetic_generation.ornstein_uhlenbeck_process.ou_generator_wrapper import ( + OrnsteinUhlenbeckProcessGeneratorWrapper, +) +from src.synthetic_generation.sawtooth.sawtooth_generator_wrapper import ( + SawToothGeneratorWrapper, +) +from src.synthetic_generation.sine_waves.sine_wave_generator_wrapper import ( + SineWaveGeneratorWrapper, +) +from src.synthetic_generation.spikes.spikes_generator_wrapper import ( + SpikesGeneratorWrapper, +) +from src.synthetic_generation.steps.step_generator_wrapper import StepGeneratorWrapper + + +class TimeSeriesDatasetManager: + """Manages writing time series data to disk in batches, safe for parallel runs.""" + + def __init__(self, output_path: str, batch_size: int = 2**16): + self.output_path = Path(output_path) + self.output_path.mkdir(parents=True, exist_ok=True) + self.batches_dir = self.output_path + self.batch_size = batch_size + self.series_counter = 0 + + self.schema = pa.schema( + [ + ("series_id", pa.int64()), + ("values", pa.list_(pa.list_(pa.float64()))), + ("length", pa.int32()), + ("num_channels", pa.int32()), + ("generator_type", pa.string()), + ("start", pa.timestamp("ns")), + ("frequency", pa.string()), + ("generation_timestamp", pa.timestamp("ns")), + ] + ) + self._initialize_state() + + def _initialize_state(self) -> None: + """Initializes state by scanning existing files to count total series.""" + existing_batches = sorted(self.batches_dir.glob("batch_*.arrow")) + total_series = 0 + if not existing_batches: + logging.info("No existing batches found. Starting from scratch.") + else: + for batch_file in existing_batches: + try: + batch_table = feather.read_table(batch_file) + total_series += len(batch_table) + except Exception as e: + logging.warning(f"Error reading {batch_file}: {e}, skipping.") + self.series_counter = total_series + logging.info(f"Found {self.series_counter} existing series in dataset.") + + def get_current_series_count(self) -> int: + """Returns the total number of series found on disk at initialization.""" + return self.series_counter + + def append_batch(self, batch_data: List[Dict[str, Any]]) -> None: + """Appends a batch to a new file using an atomic rename for parallel safety.""" + if not batch_data: + return + + try: + arrays = [] + for field in self.schema: + field_name = field.name + if field_name in ["start", "generation_timestamp"]: + timestamps = [d[field_name] for d in batch_data] + arrays.append( + pa.array([t.value for t in timestamps], type=pa.timestamp("ns")) + ) + else: + arrays.append(pa.array([d[field_name] for d in batch_data])) + new_table = pa.Table.from_arrays(arrays, schema=self.schema) + except Exception as e: + logging.error(f"Error creating Arrow table: {e}") + raise + + tmp_path = None + try: + with tempfile.NamedTemporaryFile( + delete=False, dir=self.batches_dir, suffix=".arrow.tmp" + ) as tmp: + tmp_path = tmp.name + feather.write_feather(new_table, tmp_path) + + max_retries = 20 + for _ in range(max_retries): + existing = self.batches_dir.glob("batch_*.arrow") + batch_nums = [ + int(p.stem.split("_")[1]) + for p in existing + if p.stem.split("_")[1].isdigit() + ] + next_num = max(batch_nums) + 1 if batch_nums else 0 + target_path = self.batches_dir / f"batch_{next_num:08d}.arrow" + try: + os.rename(tmp_path, target_path) + self.series_counter += len(batch_data) + logging.info( + f"Saved {target_path.name} with {len(batch_data)} series." + ) + return + except FileExistsError: + logging.warning( + f"Race condition on {target_path.name}. Retrying..." + ) + time.sleep(random.uniform(0.1, 1.0)) + + raise IOError("Failed to write batch due to file conflicts.") + finally: + if tmp_path and os.path.exists(tmp_path): + os.remove(tmp_path) + + +class GeneratorWrapper: + def __init__( + self, + generator_type: str, + length: int = 2048, + global_seed: int = 42, + num_channels: Optional[int] = None, + ): + self.generator_type = generator_type + self.length = length + self.is_multivariate = generator_type.lower() in [ + "cauker_multivariate", + ] + self.explode_multivariate_to_univariate = ( + generator_type.lower() == "cauker_univariate" + ) + self._explode_channels = 0 + + # Create appropriate parameter object and wrapper + if generator_type.lower() == "gp": + params = GPGeneratorParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = GPGeneratorWrapper(params) + elif generator_type.lower() == "kernel": + params = KernelGeneratorParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = KernelGeneratorWrapper(params) + elif generator_type.lower() == "forecast_pfn": + params = ForecastPFNGeneratorParams( + global_seed=global_seed, + length=length, + max_absolute_spread=500.0, + max_absolute_value=500.0, + ) + self.wrapper = ForecastPFNGeneratorWrapper(params) + elif generator_type.lower() == "sinewave": + params = SineWaveGeneratorParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = SineWaveGeneratorWrapper(params) + elif generator_type.lower() == "sawtooth": + params = SawToothGeneratorParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = SawToothGeneratorWrapper(params) + elif generator_type.lower() == "cauker_univariate": + params = CauKerGeneratorParams( + global_seed=global_seed, + length=length, + num_channels=6, + ) + self.wrapper = CauKerGeneratorWrapper(params) + self._explode_channels = 6 + elif generator_type.lower() == "cauker_multivariate": + effective_channels = ( + int(num_channels) + if num_channels is not None + else CauKerGeneratorParams().num_channels # type: ignore[arg-type] + ) + params = CauKerGeneratorParams( + global_seed=global_seed, + length=length, + num_channels=effective_channels, + num_nodes=effective_channels, + ) + self.wrapper = CauKerGeneratorWrapper(params) + elif generator_type.lower() == "step": + params = StepGeneratorParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = StepGeneratorWrapper(params) + elif generator_type.lower() == "spike": + params = SpikesGeneratorParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = SpikesGeneratorWrapper(params) + elif generator_type.lower() == "anomaly": + params = AnomalyGeneratorParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = AnomalyGeneratorWrapper(params) + elif generator_type.lower() == "ou_process": + params = OrnsteinUhlenbeckProcessGeneratorParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = OrnsteinUhlenbeckProcessGeneratorWrapper(params) + elif generator_type.lower() == "audio_financial_volatility": + params = FinancialVolatilityAudioParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = FinancialVolatilityAudioWrapper(params) + elif generator_type.lower() == "audio_multi_scale_fractal": + params = MultiScaleFractalAudioParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = MultiScaleFractalAudioWrapper(params) + elif generator_type.lower() == "audio_stochastic_rhythm": + params = StochasticRhythmAudioParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = StochasticRhythmAudioWrapper(params) + elif generator_type.lower() == "audio_network_topology": + params = NetworkTopologyAudioParams( + global_seed=global_seed, + length=length, + ) + self.wrapper = NetworkTopologyAudioWrapper(params) + else: + raise ValueError(f"Unsupported generator type: {generator_type}") + + def generate_batch(self, batch_size: int, start_seed: int) -> List[Dict[str, Any]]: + """Generate a batch of time series using the wrapper's batch generation.""" + try: + if self.explode_multivariate_to_univariate and self._explode_channels > 0: + base_batch_size = int(np.ceil(batch_size / self._explode_channels)) + container = self.wrapper.generate_batch( + batch_size=base_batch_size, seed=start_seed + ) + else: + container = self.wrapper.generate_batch( + batch_size=batch_size, seed=start_seed + ) + + batch_data = [] + container_batch_size = container.values.shape[0] + for i in range(container_batch_size): + series_id_base = start_seed + i + + if self.explode_multivariate_to_univariate: + series_data = container.values[i] + if series_data.ndim != 2: + raise ValueError( + "Expected multivariate data for CauKer univariate mode" + ) + num_channels = series_data.shape[1] + for channel in range(num_channels): + channel_values = self._ensure_proper_format( + series_data[:, channel] + ) + values_list = [channel_values.tolist()] + batch_data.append( + { + "series_id": series_id_base * 1_000 + channel, + "values": values_list, + "length": len(channel_values), + "num_channels": 1, + "generator_type": self.generator_type, + "start": pd.Timestamp(container.start[i]), + "frequency": container.frequency[i].value, + "generation_timestamp": pd.Timestamp.now(), + } + ) + continue + elif self.is_multivariate: + series_data = container.values[i] + num_channels = series_data.shape[1] + values_list = [ + self._ensure_proper_format(series_data[:, c]).tolist() + for c in range(num_channels) + ] + seq_length = len(values_list[0]) + else: + values = self._ensure_proper_format(container.values[i, :]) + values_list = [values.tolist()] + num_channels = 1 + seq_length = len(values) + + batch_data.append( + { + "series_id": series_id_base, + "values": values_list, + "length": seq_length, + "num_channels": num_channels, + "generator_type": self.generator_type, + "start": pd.Timestamp(container.start[i]), + "frequency": container.frequency[i].value, + "generation_timestamp": pd.Timestamp.now(), + } + ) + + if self.explode_multivariate_to_univariate: + batch_data = batch_data[:batch_size] + + return batch_data + + except Exception as e: + logging.error(f"Error generating batch: {e}") + return [] + + def _ensure_proper_format(self, values: Any) -> np.ndarray: + values = np.asarray(values).flatten() + if len(values) != self.length: + logging.warning( + f"Generated series length {len(values)} != expected {self.length}. Padding/truncating." + ) + if len(values) > self.length: + values = values[: self.length] + else: + values = np.pad(values, (0, self.length - len(values)), mode="constant") + return values.astype(np.float64) + + +class ContinuousGenerator: + def __init__( + self, + generator_wrapper: GeneratorWrapper, + dataset_manager: TimeSeriesDatasetManager, + batch_size: int = 2**16, + run_id: int = 0, + ): + self.generator_wrapper = generator_wrapper + self.dataset_manager = dataset_manager + self.batch_size = batch_size + self.run_id = run_id + self.series_in_run = 0 + self.partial_batch_data: List[Dict[str, Any]] = [] + self.shutting_down = False + logging.info(f"Generator initialized for run_id: {self.run_id}") + + def _setup_signal_handlers(self) -> None: + """Sets up signal handlers for graceful shutdown.""" + self.original_sigint = signal.getsignal(signal.SIGINT) + self.original_sigterm = signal.getsignal(signal.SIGTERM) + + def graceful_shutdown(signum, frame): + if self.shutting_down: + return + self.shutting_down = True + logging.warning( + f"\nSignal {signal.Signals(signum).name} received. Shutting down." + ) + if self.partial_batch_data: + logging.info( + f"Saving incomplete batch of {len(self.partial_batch_data)} series..." + ) + try: + self.dataset_manager.append_batch(self.partial_batch_data) + except Exception as e: + logging.error(f"Could not save partial batch on exit: {e}") + sys.exit(0) + + signal.signal(signal.SIGINT, graceful_shutdown) + signal.signal(signal.SIGTERM, graceful_shutdown) + + def run_continuous(self, num_batches_to_generate: int) -> None: + """Runs the generation loop, creating chunks and saving batches.""" + self._setup_signal_handlers() + logging.info(f"Job starting. Goal: {num_batches_to_generate} new batches.") + start_time = time.time() + batches_completed = 0 + + while batches_completed < num_batches_to_generate: + if self.shutting_down: + logging.info("Shutdown signal caught, stopping generation.") + break + + chunk_size = min(64, self.batch_size - len(self.partial_batch_data)) + + # Create a seed that fits in uint32 range by combining run_id and series count + # Use modulo to ensure it stays within valid range + series_id_start = (self.run_id + self.series_in_run) % (2**32) + + new_chunk = self.generator_wrapper.generate_batch( + batch_size=chunk_size, start_seed=series_id_start + ) + + if not new_chunk: + logging.error("Generator failed to produce data. Stopping job.") + break + + self.partial_batch_data.extend(new_chunk) + self.series_in_run += len(new_chunk) + + if len(self.partial_batch_data) >= self.batch_size: + batch_to_write = self.partial_batch_data[: self.batch_size] + self.partial_batch_data = self.partial_batch_data[self.batch_size :] + self.dataset_manager.append_batch(batch_to_write) + batches_completed += 1 + + elapsed = time.time() - start_time + series_per_sec = ( + (batches_completed * self.batch_size) / elapsed + if elapsed > 0 + else 0 + ) + print( + f"✓ Completed batch {batches_completed}/{num_batches_to_generate} in job | " + f"Total Series in DS: {self.dataset_manager.series_counter:,} | " + f"Rate: {series_per_sec:.1f}/s" + ) + + if not self.shutting_down and self.partial_batch_data: + logging.info( + f"Job finished. Saving final partial batch of {len(self.partial_batch_data)}." + ) + self.dataset_manager.append_batch(self.partial_batch_data) + + +def setup_logging(verbose: bool = False) -> None: + level = logging.DEBUG if verbose else logging.INFO + logging.basicConfig( + level=level, + format="%(asctime)s - %(levelname)s - %(message)s", + handlers=[logging.StreamHandler(sys.stdout)], + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Continuous time series generation script", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "--generator", + type=str, + required=True, + choices=[ + "forecast_pfn", + "gp", + "kernel", + "cauker_univariate", + "cauker_multivariate", + "sinewave", + "sawtooth", + "step", + "spike", + "anomaly", + "ou_process", + "audio_financial_volatility", + "audio_multi_scale_fractal", + "audio_stochastic_rhythm", + "audio_network_topology", + ], + help="Type of generator to use", + ) + parser.add_argument( + "--output-dir", + type=str, + required=True, + help="Output directory for datasets", + ) + parser.add_argument( + "--length", type=int, default=2048, help="Length of each time series" + ) + parser.add_argument( + "--batch-size", + type=int, + default=16384, + help="Number of series per batch file", + ) + parser.add_argument( + "--num-batches", + type=int, + default=100, + help="Number of batches to generate in this job run", + ) + parser.add_argument( + "--num-channels", + type=int, + help="Number of channels for multivariate generators (cauker_multivariate)", + ) + parser.add_argument("--verbose", action="store_true", help="Enable verbose logging") + args = parser.parse_args() + + setup_logging(args.verbose) + + # Use a high-precision timestamp for a unique run ID and a compatible seed + run_id = time.time_ns() + global_seed = run_id % (2**32) + logging.info(f"Using unique run ID: {run_id} (Seed: {global_seed})") + + gen_name = args.generator.lower() + if gen_name in ["cauker_multivariate"]: + if args.num_channels is None or args.num_channels < 2: + logging.error( + "--num-channels (>=2) is required for multivariate generators" + ) + sys.exit(2) + dataset_dir_name = ( + f"cauker_{args.num_channels}_variates" + ) + else: + dataset_dir_name = args.generator + + output_path = Path(args.output_dir) / dataset_dir_name + + try: + generator_wrapper = GeneratorWrapper( + generator_type=args.generator, + length=args.length, + global_seed=global_seed, + num_channels=args.num_channels, + ) + dataset_manager = TimeSeriesDatasetManager( + str(output_path), batch_size=args.batch_size + ) + continuous_gen = ContinuousGenerator( + generator_wrapper=generator_wrapper, + dataset_manager=dataset_manager, + batch_size=args.batch_size, + run_id=run_id, + ) + continuous_gen.run_continuous(num_batches_to_generate=args.num_batches) + logging.info("Generation job completed successfully!") + + except Exception as e: + logging.error(f"Fatal error: {e}", exc_info=True) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/synthetic_generation/forecast_pfn_prior/forecast_pfn_generator.py b/src/synthetic_generation/forecast_pfn_prior/forecast_pfn_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..2452d13b94e9e68edcfe4021b876d0f79d4a1e09 --- /dev/null +++ b/src/synthetic_generation/forecast_pfn_prior/forecast_pfn_generator.py @@ -0,0 +1,726 @@ +import logging +from typing import Dict, Optional + +import numpy as np +import pandas as pd +from pandas.tseries.frequencies import to_offset + +from src.data.frequency import FREQUENCY_MAPPING, Frequency +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.forecast_pfn_prior.series_config import ( + ComponentNoise, + ComponentScale, + SeriesConfig, +) +from src.synthetic_generation.forecast_pfn_prior.utils import ( + get_random_walk_series, + get_transition_coefficients, + sample_scale, + shift_axis, + weibull_noise, +) +from src.synthetic_generation.generator_params import ForecastPFNGeneratorParams +from src.synthetic_generation.utils import generate_spikes + + +class ForecastPFNGenerator(AbstractTimeSeriesGenerator): + """ + Time series generator based on the ForecastPFN prior. + + This generator creates synthetic time series with configurable trends, seasonality, + and noise patterns. It includes built-in filtering to avoid extreme values and + supports various univariate augmentations. + """ + + def __init__( + self, + params: ForecastPFNGeneratorParams, + length: int = 1024, + random_seed: Optional[int] = None, + max_absolute_spread: float = 300.0, + max_absolute_value: float = 300.0, + max_retries: int = 100, + ) -> None: + """ + Initialize the ForecastPFNGenerator. + + Parameters + ---------- + params : ForecastPFNGeneratorParams + Configuration parameters for the generator + length : int, default=1024 + Length of the generated time series + random_seed : Optional[int], default=None + Random seed for reproducibility + max_absolute_spread : float, default=300.0 + Maximum allowed spread (max - min) for generated series + max_absolute_value : float, default=300.0 + Maximum allowed absolute value for any point in the series + max_retries : int, default=100 + Maximum number of retry attempts if generation fails or produces extreme values + """ + self.params = params + self.length = length + self.rng = np.random.default_rng(random_seed) + self.frequency = params.frequency + + # Filtering parameters + self.max_absolute_spread = max_absolute_spread + self.max_absolute_value = max_absolute_value + self.max_retries = max_retries + + def _calculate_scaled_exp_base(self, timescale: float) -> float: + """ + Calculate an exponential base that is scaled according to the series length + to prevent extreme values at the end of long sequences. + + Parameters + ---------- + timescale : float + The timescale factor for the frequency + + Returns + ------- + float + Scaled exponential base that keeps final values within reasonable bounds + """ + if not self.params.trend_exp: + return 1.0 + + # Estimate maximum days in the series based on length and frequency + # For most frequencies, each step represents timescale days + max_days = self.length * timescale + + # Sample a raw exponential base with the original logic + raw_exp_base = self.rng.normal(1, 0.005 / timescale) + + # Define reasonable bounds for the final exponential multiplier + # Allow growth/decay up to 10x in either direction + max_growth_factor = 10.0 + min_decay_factor = 0.1 + + # Calculate what the maximum absolute exponent could be + # considering the offset range of (-0.1, 0.5) + # Worst case is when |1 - offset| * max_days is maximized + max_abs_exponent = 1.1 * max_days # Conservative estimate + + if raw_exp_base > 1.0: + # For growth, ensure base^max_abs_exponent <= max_growth_factor + max_allowed_base = max_growth_factor ** (1.0 / max_abs_exponent) + scaled_base = min(raw_exp_base, max_allowed_base) + elif raw_exp_base < 1.0: + # For decay, ensure base^max_abs_exponent >= min_decay_factor + min_allowed_base = min_decay_factor ** (1.0 / max_abs_exponent) + scaled_base = max(raw_exp_base, min_allowed_base) + else: + scaled_base = raw_exp_base + + # Apply the original bounds as a final safety check + return max(0.0001, min(1.01, scaled_base)) + + def _is_series_acceptable(self, values: np.ndarray) -> bool: + """ + Check if a generated time series meets the spread and value criteria. + + Parameters + ---------- + values : np.ndarray + Time series values to check + + Returns + ------- + bool + True if series is acceptable, False if it should be regenerated + """ + if len(values) == 0: + return False + + # Check for non-finite values + if not np.all(np.isfinite(values)): + return False + + min_val = np.min(values) + max_val = np.max(values) + spread = max_val - min_val + + # Check absolute spread threshold + if spread > self.max_absolute_spread: + return False + + # Check absolute value thresholds + if ( + abs(min_val) > self.max_absolute_value + or abs(max_val) > self.max_absolute_value + ): + return False + + return True + + def _generate_damping( + self, input_size: int, p: list = [0.4, 0.5, 0.1] + ) -> np.ndarray: + """Generate damping effect for a univariate time series.""" + spacing = self.rng.choice(["equal", "regular", "random"], p=p) + t = np.arange(0, input_size, 1).astype(float) + + if spacing == "random": + num_steps = self.rng.integers(1, 3) + damping_intervals = np.sort( + self.rng.choice(t[: -int(input_size * 0.1)], num_steps, replace=False) + ) + damping_factors = self.rng.uniform(0.1, 2, num_steps + 1) + elif spacing == "equal": + num_steps = self.rng.integers(3, 7) + damping_intervals = np.linspace(0, input_size, num_steps + 2)[1:-1] + damping_factors = np.array( + [ + self.rng.uniform(0.4, 0.8) + if (i % 2) == 0 + else self.rng.uniform(1, 2) + for i in range(num_steps + 1) + ] + ) + else: + custom_lengths = self.rng.integers(1, input_size // 2, 2) + damping_intervals = [] + current_time = 0 + while current_time < input_size: + for length in custom_lengths: + current_time += length + if current_time <= input_size: + damping_intervals.append(current_time) + else: + break + damping_intervals = np.array(damping_intervals) + num_steps = len(damping_intervals) + damping_factors = np.array( + [ + self.rng.uniform(0.4, 0.8) + if (i % 2) == 0 + else self.rng.uniform(1, 2) + for i in range(num_steps + 1) + ] + ) + + damping = np.piecewise( + t, + [t < damping_intervals[0]] + + [ + (t >= damping_intervals[i]) & (t < damping_intervals[i + 1]) + for i in range(num_steps - 1) + ] + + [t >= damping_intervals[-1]], + damping_factors.tolist(), + ) + return damping + + def _apply_time_warping( + self, values: np.ndarray, warp_strength: float = 0.1 + ) -> np.ndarray: + """Apply time warping augmentation to univariate series.""" + length = len(values) + # Create smooth random warping function + n_knots = max(3, length // 20) # Adaptive number of knots + knot_positions = np.linspace(0, length - 1, n_knots) + warp_offsets = self.rng.normal(0, warp_strength * length, n_knots) + warp_offsets[0] = warp_offsets[-1] = 0 # Keep endpoints fixed + + # Interpolate to get smooth warping + original_indices = np.arange(length) + warped_indices = np.interp( + original_indices, knot_positions, knot_positions + warp_offsets + ) + warped_indices = np.clip(warped_indices, 0, length - 1) + + # Interpolate values at warped positions + return np.interp(warped_indices, original_indices, values) + + def _apply_magnitude_scaling( + self, values: np.ndarray, scale_range: tuple = (0.8, 1.2) + ) -> np.ndarray: + """Apply random magnitude scaling to different segments of the series.""" + length = len(values) + num_segments = self.rng.integers(1, 4) + segment_boundaries = np.sort( + self.rng.choice(length, num_segments - 1, replace=False) + ) + segment_boundaries = np.concatenate([[0], segment_boundaries, [length]]) + + scaled_values = values.copy() + for i in range(len(segment_boundaries) - 1): + start, end = segment_boundaries[i], segment_boundaries[i + 1] + scale_factor = self.rng.uniform(scale_range[0], scale_range[1]) + scaled_values[start:end] *= scale_factor + + return scaled_values + + def _apply_univariate_augmentations(self, values: np.ndarray) -> np.ndarray: + """Apply univariate-specific augmentations to a single time series.""" + augmented_values = np.asarray(values).copy() + + # Apply time warping with some probability + if ( + hasattr(self.params, "time_warp_prob") + and self.rng.random() < self.params.time_warp_prob + ): + warp_strength = getattr(self.params, "time_warp_strength", 0.05) + augmented_values = self._apply_time_warping(augmented_values, warp_strength) + + # Apply magnitude scaling with some probability + if ( + hasattr(self.params, "magnitude_scale_prob") + and self.rng.random() < self.params.magnitude_scale_prob + ): + scale_range = getattr(self.params, "magnitude_scale_range", (0.9, 1.1)) + augmented_values = self._apply_magnitude_scaling( + augmented_values, scale_range + ) + + # Apply damping augmentation + if ( + hasattr(self.params, "damping_prob") + and self.rng.random() < self.params.damping_prob + ): + damping = self._generate_damping(len(augmented_values)) + augmented_values = augmented_values * damping + + # Apply spike augmentation + if ( + hasattr(self.params, "spike_prob") + and self.rng.random() < self.params.spike_prob + ): + spikes = generate_spikes(len(augmented_values)) + spikes = spikes.numpy() # Convert torch tensor to numpy array + if spikes.max() < 0: + augmented_values = augmented_values * spikes + else: + augmented_values = augmented_values + spikes + 1 + + # Replace with pure spike signal (rare event) + if ( + hasattr(self.params, "pure_spike_prob") + and self.rng.random() < self.params.pure_spike_prob + ): + spikes = generate_spikes(len(augmented_values)) + augmented_values = spikes.numpy() # Convert torch tensor to numpy array + + return augmented_values + + def generate_time_series( + self, + start: np.datetime64, + random_seed: Optional[int] = None, + apply_augmentations: bool = True, + frequency: Optional[Frequency] = None, + ) -> np.ndarray: + """ + Generate a time series with built-in filtering and retry logic. + + If the generated series has extreme spreads, it will retry with different + random seeds up to max_retries times. + + Parameters + ---------- + start : np.datetime64 + Start date for the time series + random_seed : Optional[int], default=None + Random seed for reproducibility + apply_augmentations : bool, default=True + Whether to apply univariate augmentations + frequency : Optional[Frequency], default=None + Frequency for the time series. If None, uses self.frequency + + Returns + ------- + np.ndarray + Generated time series values of shape (length,) + + Raises + ------ + RuntimeError + If all generation attempts fail + """ + original_seed = random_seed + values = None # Initialize to avoid UnboundLocalError + + for attempt in range(self.max_retries + 1): + # Use different seed for each retry + current_seed = original_seed + if attempt > 0: + # Generate a new seed based on the original seed and attempt number + if original_seed is not None: + current_seed = ( + original_seed + attempt * 123 + ) # Large offset to ensure different sequences + else: + current_seed = attempt * 987 + + if current_seed is not None: + self.rng = np.random.default_rng(current_seed) + + try: + # Choose frequency per attempt: first attempt uses provided frequency (if any), + # subsequent attempts randomly sample a supported frequency to avoid bad start/frequency combos + supported_frequencies = [ + Frequency.S, + Frequency.T1, + Frequency.T5, + Frequency.T10, + Frequency.T15, + Frequency.H, + Frequency.D, + Frequency.W, + Frequency.M, + Frequency.Q, + Frequency.A, + ] + if attempt == 0: + chosen_frequency = ( + frequency if frequency is not None else self.frequency + ) + else: + # Sample a different frequency when possible + candidate_freqs = ( + [ + f + for f in supported_frequencies + if f != (frequency or self.frequency) + ] + if (frequency or self.frequency) in supported_frequencies + else supported_frequencies + ) + chosen_frequency = self.rng.choice(candidate_freqs) + logging.debug( + f"Attempt {attempt + 1}: Using frequency {chosen_frequency} (seed={current_seed})" + ) + + values = self._generate_single_series( + start, apply_augmentations, chosen_frequency + ) + + # Check if the generated series is acceptable + values = np.asarray(values) + if self._is_series_acceptable(values): + if attempt > 0: + logging.debug( + f"Generated acceptable series on attempt {attempt + 1}" + ) + return values + else: + min_val = np.min(values) if len(values) > 0 else 0 + max_val = np.max(values) if len(values) > 0 else 0 + spread = max_val - min_val + logging.debug( + f"Attempt {attempt + 1}: Series rejected (spread={spread:.1f}, " + f"min={min_val:.1f}, max={max_val:.1f})" + ) + continue + + except Exception as e: + logging.debug( + f"Attempt {attempt + 1}: Generation failed with error: {e}" + ) + # Keep the last successfully generated values (if any) for fallback + continue + + # If all retries failed, raise an error as documented + if values is None: + raise RuntimeError( + f"Failed to generate acceptable series after {self.max_retries + 1} attempts. " + f"All generation attempts failed completely. Please try different parameters or increase max_retries." + ) + else: + # If we have values but they don't meet criteria, log warning and return them + logging.warning( + f"Failed to generate acceptable series after {self.max_retries + 1} attempts. " + f"Returning last attempt (may have extreme values)." + ) + return values + + def _generate_single_series( + self, + start: np.datetime64, + apply_augmentations: bool = True, + frequency: Optional[Frequency] = None, + ) -> np.ndarray: + """ + Generate a single time series attempt (extracted from original generate_time_series). + """ + # Use provided frequency or fall back to self.frequency + effective_frequency = frequency if frequency is not None else self.frequency + + freq_key, subfreq, timescale = FREQUENCY_MAPPING.get( + effective_frequency, ("D", "", 1) + ) + freq = f"{subfreq}{freq_key}" if subfreq else freq_key + + # Seasonal component weights based on frequency + a, m, w, h, minute = 0.0, 0.0, 0.0, 0.0, 0.0 + if effective_frequency == Frequency.S: + minute = self.rng.uniform(0.0, 1.0) + h = self.rng.uniform(0.0, 0.2) + elif effective_frequency in [ + Frequency.T1, + Frequency.T5, + Frequency.T10, + Frequency.T15, + ]: + minute = self.rng.uniform(0.0, 1.0) + h = self.rng.uniform(0.0, 0.2) + elif effective_frequency == Frequency.H: + minute = self.rng.uniform(0.0, 0.2) + h = self.rng.uniform(0.0, 1.0) + elif effective_frequency == Frequency.D: + w = self.rng.uniform(0.0, 1.0) + m = self.rng.uniform(0.0, 0.2) + elif effective_frequency == Frequency.W: + m = self.rng.uniform(0.0, 0.3) + a = self.rng.uniform(0.0, 0.3) + elif effective_frequency == Frequency.M: + w = self.rng.uniform(0.0, 0.1) + a = self.rng.uniform(0.0, 0.5) + elif effective_frequency == Frequency.Q: + a = self.rng.uniform(0.0, 1.0) + elif effective_frequency == Frequency.A: + w = self.rng.uniform(0.0, 0.2) + a = self.rng.uniform(0.0, 1.0) + else: + raise NotImplementedError(f"Frequency {effective_frequency} not supported") + + scale_config = ComponentScale( + base=1.0, + linear=self.rng.normal(0, 0.01), + exp=self._calculate_scaled_exp_base(timescale) + if self.params.trend_exp + else 1.0, + a=a, + m=m, + w=w, + minute=minute, + h=h, + ) + + offset_config = ComponentScale( + base=0, + linear=self.rng.uniform(-0.1, 0.5), + exp=self.rng.uniform(-0.1, 0.5), + a=self.rng.uniform(0.0, 1.0), + m=self.rng.uniform(0.0, 1.0), + w=self.rng.uniform(0.0, 1.0), + ) + + noise_config = ComponentNoise( + k=self.rng.uniform(1, 5), + median=1, + scale=sample_scale( + low_ratio=self.params.scale_noise[0], + moderate_ratio=self.params.scale_noise[1], + rng=self.rng, + ), + ) + + cfg = SeriesConfig(scale_config, offset_config, noise_config) + options = { + "trend_exp": self.params.trend_exp, + "scale_noise": self.params.scale_noise, + "harmonic_scale_ratio": self.params.harmonic_scale_ratio, + "harmonic_rate": self.params.harmonic_rate, + "period_factor": self.params.period_factor, + "seasonal_only": self.params.seasonal_only, + "trend_additional": self.params.trend_additional, + } + + # Generate first series + series1 = self._make_series( + cfg, to_offset(freq), start, options, self.params.random_walk + ) + + # Generate second series for transition if enabled + transition = self.rng.random() < self.params.transition_ratio + if transition: + cfg2 = SeriesConfig( + ComponentScale( + base=1.0, + linear=self.rng.normal(0, 0.01), + exp=self._calculate_scaled_exp_base(timescale) + if self.params.trend_exp + else 1.0, + a=a, + m=m, + w=w, + minute=minute, + h=h, + ), + ComponentScale( + base=0, + linear=self.rng.uniform(-0.1, 0.5), + exp=self.rng.uniform(-0.1, 0.5), + a=self.rng.uniform(0.0, 1.0), + m=self.rng.uniform(0.0, 1.0), + w=self.rng.uniform(0.0, 1.0), + ), + ComponentNoise( + k=self.rng.uniform(1, 5), + median=1, + scale=sample_scale( + low_ratio=self.params.scale_noise[0], + moderate_ratio=self.params.scale_noise[1], + rng=self.rng, + ), + ), + ) + series2 = self._make_series( + cfg2, to_offset(freq), start, options, self.params.random_walk + ) + coeff = get_transition_coefficients(self.length) + values = coeff * series1["values"] + (1 - coeff) * series2["values"] + else: + values = series1["values"] + + # Apply univariate augmentations if requested + if apply_augmentations: + # Ensure values is a numpy array before augmentation + values = np.asarray(values) + values = self._apply_univariate_augmentations(values) + + return values + + def _make_series( + self, + series: SeriesConfig, + freq: pd.DateOffset, + start: np.datetime64, + options: dict, + random_walk: bool, + ) -> Dict: + start = freq.rollback(start) + dates = pd.date_range(start=start, periods=self.length, freq=freq) + scaled_noise_term = 0 + values_seasonal = {} + + if random_walk: + values = get_random_walk_series(len(dates), rng=self.rng) + elif options["seasonal_only"]: + values_seasonal = self._make_series_seasonal(series, dates, options) + values = values_seasonal["seasonal"] + else: + values_trend = self._make_series_trend(series, dates) + values_seasonal = self._make_series_seasonal(series, dates, options) + values = ( + values_trend + values_seasonal["seasonal"] + if options["trend_additional"] + else values_trend * values_seasonal["seasonal"] + ) + + weibull_noise_term = weibull_noise( + k=series.noise_config.k, + median=series.noise_config.median, + length=len(values), + rng=self.rng, + ) + noise_expected_val = series.noise_config.median + scaled_noise_term = series.noise_config.scale * ( + weibull_noise_term - noise_expected_val + ) + values = values * (1 + scaled_noise_term) + + return { + "values": values, + "noise": 1 + scaled_noise_term, + "dates": dates, + "seasonal": values_seasonal.get("seasonal", np.ones_like(values)), + } + + def _make_series_trend( + self, series: SeriesConfig, dates: pd.DatetimeIndex + ) -> np.ndarray: + values = np.full_like(dates, series.scale.base, dtype=np.float32) + days = (dates - dates[0]).days + if series.scale.linear is not None: + values += shift_axis(days, series.offset.linear) * series.scale.linear + if series.scale.exp is not None: + values *= np.power(series.scale.exp, shift_axis(days, series.offset.exp)) + + return values + + def _make_series_seasonal( + self, series: SeriesConfig, dates: pd.DatetimeIndex, options: dict + ) -> Dict: + seasonal = 1 + harmonic_scale = self.rng.random() < options["harmonic_scale_ratio"] + harmonic_rate = options["harmonic_rate"] + period_factor = options["period_factor"] + seasonal_components = {} + if series.scale.minute is not None and series.scale.minute != 0: + seasonal_components["minute"] = ( + 1 + + series.scale.minute + * self._get_freq_component( + dates.minute, + int(np.ceil(10 * harmonic_rate)), + 60 * period_factor, + harmonic_scale, + ) + ) + seasonal *= seasonal_components["minute"] + if series.scale.h is not None and series.scale.h != 0: + seasonal_components["h"] = 1 + series.scale.h * self._get_freq_component( + dates.hour, + int(np.ceil(10 * harmonic_rate)), + 24 * period_factor, + harmonic_scale, + ) + seasonal *= seasonal_components["h"] + if series.scale.a is not None and series.scale.a != 0: + seasonal_components["a"] = 1 + series.scale.a * self._get_freq_component( + dates.month, + int(np.ceil(6 * harmonic_rate)), + 12 * period_factor, + harmonic_scale, + ) + seasonal *= seasonal_components["a"] + if series.scale.m is not None and series.scale.m != 0: + seasonal_components["m"] = 1 + series.scale.m * self._get_freq_component( + dates.day, + int(np.ceil(10 * harmonic_rate)), + 30.5 * period_factor, + harmonic_scale, + ) + seasonal *= seasonal_components["m"] + if series.scale.w is not None and series.scale.w != 0: + seasonal_components["w"] = 1 + series.scale.w * self._get_freq_component( + dates.dayofweek, + int(np.ceil(4 * harmonic_rate)), + 7 * period_factor, + harmonic_scale, + ) + seasonal *= seasonal_components["w"] + seasonal_components["seasonal"] = seasonal + return seasonal_components + + def _get_freq_component( + self, + dates_feature: pd.Index, + n_harmonics: int, + n_total: float, + harmonic_scale: bool = True, + ) -> np.ndarray: + harmonics = list(range(1, n_harmonics + 1)) + sin_coef = np.zeros(n_harmonics) + cos_coef = np.zeros(n_harmonics) + for idx, harmonic in enumerate(harmonics): + h = 1 if not harmonic_scale else harmonic + sin_coef[idx] = self.rng.normal(scale=1 / h) + cos_coef[idx] = self.rng.normal(scale=1 / h) + coef_sq_sum = np.sqrt(np.sum(np.square(sin_coef)) + np.sum(np.square(cos_coef))) + sin_coef /= coef_sq_sum + cos_coef /= coef_sq_sum + return_val = 0 + for idx, harmonic in enumerate(harmonics): + return_val += sin_coef[idx] * np.sin( + 2 * np.pi * harmonic * dates_feature / n_total + ) + return_val += cos_coef[idx] * np.cos( + 2 * np.pi * harmonic * dates_feature / n_total + ) + return return_val diff --git a/src/synthetic_generation/forecast_pfn_prior/forecast_pfn_generator_wrapper.py b/src/synthetic_generation/forecast_pfn_prior/forecast_pfn_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..b26b725b51c07e26705f43c9eb92d12e6f8ae52d --- /dev/null +++ b/src/synthetic_generation/forecast_pfn_prior/forecast_pfn_generator_wrapper.py @@ -0,0 +1,176 @@ +import logging +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.forecast_pfn_prior.forecast_pfn_generator import ( + ForecastPFNGenerator, +) +from src.synthetic_generation.generator_params import ForecastPFNGeneratorParams + + +class ForecastPFNGeneratorWrapper(GeneratorWrapper): + def __init__(self, params: ForecastPFNGeneratorParams): + super().__init__(params) + self.params: ForecastPFNGeneratorParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + """ + Sample parameters for generating a batch of time series. + + Parameters + ---------- + batch_size : int + Number of time series to generate parameters for + + Returns + ------- + Dict[str, Any] + Dictionary containing sampled parameters including: + - frequency: List of frequencies (one per batch item) + - start: List of start dates (one per batch item) + - length: Series length + - All ForecastPFN-specific parameters + """ + params = super()._sample_parameters(batch_size) + + params.update( + { + "length": self.params.length, + "trend_exp": self.params.trend_exp, + "scale_noise": self.params.scale_noise, + "harmonic_scale_ratio": self.params.harmonic_scale_ratio, + "harmonic_rate": self.params.harmonic_rate, + "period_factor": self.params.period_factor, + "seasonal_only": self.params.seasonal_only, + "trend_additional": self.params.trend_additional, + "transition_ratio": self.params.transition_ratio, + "random_walk": self.params.random_walk, + # Univariate augmentation parameters + "time_warp_prob": self.params.time_warp_prob, + "time_warp_strength": self.params.time_warp_strength, + "magnitude_scale_prob": self.params.magnitude_scale_prob, + "magnitude_scale_range": self.params.magnitude_scale_range, + "damping_prob": self.params.damping_prob, + "spike_prob": self.params.spike_prob, + "pure_spike_prob": self.params.pure_spike_prob, + } + ) + return params + + def _apply_augmentations( + self, batch_values: np.ndarray, mixup_prob: float, mixup_series: int + ) -> np.ndarray: + """ + Apply multivariate augmentations to the batch. + + Parameters + ---------- + batch_values : np.ndarray + Batch of time series values with shape (batch_size, length) + mixup_prob : float + Probability of applying mixup augmentation + mixup_series : int + Maximum number of series to mix in mixup + + Returns + ------- + np.ndarray + Augmented batch values with same shape as input + """ + batch_size = batch_values.shape[0] + + # Apply mixup augmentation if enabled + if self.rng.random() < mixup_prob: + mixup_series = self.rng.integers(2, mixup_series + 1) + mixup_indices = self.rng.choice(batch_size, mixup_series, replace=False) + original_vals = batch_values[mixup_indices, :].copy() + for i, idx in enumerate(mixup_indices): + mixup_weights = self.rng.random(mixup_series) + mixup_weights /= np.sum(mixup_weights) + batch_values[idx, :] = np.sum( + original_vals * mixup_weights[:, np.newaxis], axis=0 + ) + + return batch_values + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + """ + Generate a batch of time series. + + Parameters + ---------- + batch_size : int + Number of time series to generate + seed : Optional[int], default=None + Random seed for reproducibility + params : Optional[Dict[str, Any]], default=None + Generation parameters. If None, will be sampled automatically + + Returns + ------- + TimeSeriesContainer + Container with generated time series values, start dates, and frequencies + """ + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = ForecastPFNGenerator( + params=ForecastPFNGeneratorParams(**params), + length=params["length"], + random_seed=seed, + max_retries=100, + ) + + batch_values = [] + + for i in range(batch_size): + batch_seed = None if seed is None else seed + i + # Extract individual parameters for this batch item + frequency_i = ( + params["frequency"][i] + if isinstance(params["frequency"], list) + else params["frequency"] + ) + start_i = ( + params["start"][i] + if isinstance(params["start"], list) + else params["start"] + ) + + try: + values = generator.generate_time_series( + start=start_i, + random_seed=batch_seed, + apply_augmentations=True, + frequency=frequency_i, + ) + batch_values.append(values) + except RuntimeError as e: + # Log the failure and generate a fallback series + logging.warning(f"Failed to generate series {i} in batch: {e}") + + # Convert to numpy array before applying augmentations + batch_values = np.array(batch_values) + + # Apply batch augmentations + batch_values = self._apply_augmentations( + batch_values=batch_values, + mixup_prob=self.params.mixup_prob, + mixup_series=self.params.mixup_series, + ) + + return TimeSeriesContainer( + values=batch_values, + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/forecast_pfn_prior/series_config.py b/src/synthetic_generation/forecast_pfn_prior/series_config.py new file mode 100644 index 0000000000000000000000000000000000000000..4a84907ed84fd5ac844b6ba68a2dd8c2f60aae03 --- /dev/null +++ b/src/synthetic_generation/forecast_pfn_prior/series_config.py @@ -0,0 +1,34 @@ +from dataclasses import dataclass + +import numpy as np + + +@dataclass +class ComponentScale: + base: float + linear: float = None + exp: float = None + a: np.ndarray = None + q: np.ndarray = None + m: np.ndarray = None + w: np.ndarray = None + h: np.ndarray = None + minute: np.ndarray = None + + +@dataclass +class ComponentNoise: + # shape parameter for the weibull distribution + k: float + median: float + scale: float + + +@dataclass +class SeriesConfig: + scale: ComponentScale + offset: ComponentScale + noise_config: ComponentNoise + + def __str__(self): + return f"L{1000 * self.scale.linear:+02.0f}E{10000 * (self.scale.exp - 1):+02.0f}A{100 * self.scale.a:02.0f}M{100 * self.scale.m:02.0f}W{100 * self.scale.w:02.0f}" diff --git a/src/synthetic_generation/forecast_pfn_prior/utils.py b/src/synthetic_generation/forecast_pfn_prior/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2f95401c19dc6265d22151c2b24900ac44e4600d --- /dev/null +++ b/src/synthetic_generation/forecast_pfn_prior/utils.py @@ -0,0 +1,76 @@ +import numpy as np + + +def weibull_noise(k=2, length=1, median=1, rng=None): + """ + Function to generate weibull noise with a fixed median + """ + if rng is None: + rng = np.random.default_rng() + # Lambda is chosen such that the median is a given value + lamda = median / (np.log(2) ** (1 / k)) + return lamda * rng.weibull(k, length) + + +def shift_axis(days, shift): + if shift is None: + return days + return days - shift * days[-1] + + +def get_random_walk_series(length, movements=[-1, 1], rng=None): + """ + Function to generate a random walk series with a specified length + """ + if rng is None: + rng = np.random.default_rng() + random_walk = list() + random_walk.append(rng.choice(movements)) + for i in range(1, length): + movement = rng.choice(movements) + value = random_walk[i - 1] + movement + random_walk.append(value) + + return np.array(random_walk) + + +def sample_scale(low_ratio=0.6, moderate_ratio=0.3, rng=None): + """ + Function to sample scale such that it follows 60-30-10 distribution + i.e. 60% of the times it is very low, 30% of the times it is moderate and + the rest 10% of the times it is high + """ + if rng is None: + rng = np.random.default_rng() + rand = rng.random() + # very low noise + if rand <= low_ratio: + return rng.uniform(0, 0.1) + # moderate noise + elif rand <= (low_ratio + moderate_ratio): + return rng.uniform(0.2, 0.5) + # high noise + else: + return rng.uniform(0.7, 0.9) + + +def get_transition_coefficients(context_length): + """ + Transition series refers to the linear combination of 2 series + S1 and S2 such that the series S represents S1 for a period and S2 + for the remaining period. We model S as S = (1 - f) * S1 + f * S2 + Here f = 1 / (1 + e^{-k (x-m)}) where m = (a + b) / 2 and k is chosen + such that f(a) = 0.1 (and hence f(b) = 0.9). a and b refer to + 0.2 * CONTEXT_LENGTH and 0.8 * CONTEXT_LENGTH + """ + # a and b are chosen with 0.2 and 0.8 parameters + a, b = 0.2 * context_length, 0.8 * context_length + + # fixed to this value + f_a = 0.1 + + m = (a + b) / 2 + k = 1 / (a - m) * np.log(f_a / (1 - f_a)) + + coeff = 1 / (1 + np.exp(-k * (np.arange(1, context_length + 1) - m))) + return coeff diff --git a/src/synthetic_generation/generator_params.py b/src/synthetic_generation/generator_params.py new file mode 100644 index 0000000000000000000000000000000000000000..130aaa6942408961b44b953781107bd0e553d3da --- /dev/null +++ b/src/synthetic_generation/generator_params.py @@ -0,0 +1,748 @@ +from dataclasses import dataclass, field +from enum import Enum +from typing import Dict, List, Optional, Tuple, Union + +import numpy as np + +from src.data.frequency import Frequency + + +@dataclass +class GeneratorParams: + """Base class for generator parameters.""" + + global_seed: int = 42 + length: int = 2048 + frequency: Optional[List[Frequency]] = None + start: Optional[List[np.datetime64]] = None + + def update(self, **kwargs): + """Update parameters from keyword arguments.""" + for k, v in kwargs.items(): + if hasattr(self, k): + setattr(self, k, v) + + +@dataclass +class ForecastPFNGeneratorParams(GeneratorParams): + """Parameters for the ForecastPFNGenerator.""" + + trend_exp: bool = True + scale_noise: Tuple[float, float] = (0.6, 0.3) + harmonic_scale_ratio: float = 0.5 + harmonic_rate: float = 1.0 + period_factor: float = 1.0 + seasonal_only: bool = False + trend_additional: bool = True + transition_ratio: float = ( + 1.0 # Probability of applying transition between two series + ) + random_walk: bool = False + + # Multivariate augmentation parameters (applied in wrapper) + mixup_prob: float = 0.1 # Probability of applying mixup augmentation + mixup_series: int = 4 # Maximum number of series to mix in mixup + damp_and_spike: bool = False # Whether to apply damping and spike augmentations + damping_noise_ratio: float = 0.05 # Ratio of batch to apply damping + spike_noise_ratio: float = 0.05 # Ratio of batch to apply spike noise + spike_signal_ratio: float = 0.05 # Probability of applying spike signal replacement + spike_batch_ratio: float = 0.05 # Ratio of batch for spike signal replacement + + # Univariate augmentation parameters (applied in generator) + time_warp_prob: float = 0.1 # Probability of applying time warping + time_warp_strength: float = 0.05 # Strength of time warping effect + magnitude_scale_prob: float = 0.2 # Probability of applying magnitude scaling + magnitude_scale_range: Tuple[float, float] = ( + 0.9, + 1.1, + ) # Range for magnitude scaling + damping_prob: float = 0.1 # Probability of applying damping augmentation + spike_prob: float = 0.15 # Probability of applying spike augmentation + pure_spike_prob: float = 0.02 # Probability of replacing with pure spike signal + + # Built-in filtering parameters + max_absolute_spread: float = ( + 300.0 # Maximum allowed spread (max - min) for generated series + ) + max_absolute_value: float = 300.0 + max_retries: int = 10 + + +@dataclass +class GPGeneratorParams(GeneratorParams): + """ + Parameters for the Gaussian Process (GP) Prior synthetic data generator. + """ + + max_kernels: int = 6 + likelihood_noise_level: float = 0.1 + noise_level: str = "low" # Options: ["random", "high", "moderate", "low"] + use_original_gp: bool = False + gaussians_periodic: bool = True + peak_spike_ratio: float = 0.1 + subfreq_ratio: float = 0.2 + periods_per_freq: float = 0.5 + gaussian_sampling_ratio: float = 0.2 + max_period_ratio: float = 0.5 + kernel_periods: Tuple[int, ...] = (4, 5, 7, 21, 24, 30, 60, 120) + kernel_bank: Dict[str, float] = field( + default_factory=lambda: { + "matern_kernel": 1.5, + "linear_kernel": 1.0, + "periodic_kernel": 5.0, + "polynomial_kernel": 0.0, + "spectral_mixture_kernel": 0.0, + } + ) + + +@dataclass +class KernelGeneratorParams(GeneratorParams): + """Parameters for the KernelSynthGenerator.""" + + max_kernels: int = 5 + + +@dataclass +class SineWaveGeneratorParams(GeneratorParams): + """Parameters for the SineWaveGenerator - focused on diverse sinusoidal patterns.""" + + # Core sinusoidal parameters + num_components_range: Tuple[int, int] = (1, 3) + period_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (10.0, 200.0) + amplitude_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.5, 3.0) + phase_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.0, 2.0 * np.pi) + + # Trend parameters + trend_slope_range: Tuple[float, float] = (-0.01, 0.01) + base_level_range: Tuple[float, float] = (0.0, 2.0) + + # Noise parameters + noise_probability: float = ( + 0.7 # Probability of adding noise (70% of series have noise) + ) + noise_level_range: Tuple[float, float] = ( + 0.05, + 0.2, + ) # Small noise as fraction of amplitude (when noise is applied) + + # Time-varying parameters (subtle modulation) + enable_amplitude_modulation: bool = True + amplitude_modulation_strength: float = 0.1 # Max 10% amplitude variation + enable_frequency_modulation: bool = True + frequency_modulation_strength: float = 0.05 # Max 5% frequency variation + + +@dataclass +class SawToothGeneratorParams(GeneratorParams): + """Parameters for the SawToothGenerator.""" + + periods: Tuple[int, int] = (2, 7) # Number of sawtooth periods in the series + amplitude_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.5, 3.0) + phase_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.0, 1.0) # Phase shift as fraction of period + trend_slope_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (-0.001, 0.001) # Slightly stronger linear trend slope for more straight lines + seasonality_amplitude_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.0, 0.02) # Minimal seasonal component amplitude + add_trend: bool = True # Whether to add linear trend + add_seasonality: bool = True # Whether to add seasonal component + + +class StepPatternType(Enum): + """Types of step patterns that can be generated.""" + + STABLE = "stable" # Flat line with minimal variation + GRADUAL_INCREASE = "gradual_increase" # Gradual upward steps + GRADUAL_DECREASE = "gradual_decrease" # Gradual downward steps + SPIKE_UP = "spike_up" # Sharp increase then gradual decrease + SPIKE_DOWN = "spike_down" # Sharp decrease then gradual increase + OSCILLATING = "oscillating" # Up and down pattern + RANDOM_WALK = "random_walk" # Random steps (current behavior) + + +@dataclass +class SubseriesConfig: + """Configuration for a single subseries pattern.""" + + pattern_type: StepPatternType + length_range: Tuple[int, int] # Min and max length for this subseries + num_changepoints_range: Tuple[int, int] # Number of changepoints in this subseries + step_size_range: Tuple[float, float] # Step size range for this pattern + level_drift_range: Tuple[float, float] = (0.0, 0.0) # Overall level drift + step_size_decay: float = 1.0 # Decay factor for step sizes over time + weight: float = 1.0 # Probability weight for selecting this pattern + + +@dataclass +class StepGeneratorParams(GeneratorParams): + """Parameters for the StepGenerator with subseries support.""" + + # Subseries configuration + subseries_configs: List[SubseriesConfig] = field( + default_factory=lambda: [ + # Stable beginning (20-30% of series) + SubseriesConfig( + pattern_type=StepPatternType.STABLE, + length_range=(200, 600), + num_changepoints_range=(0, 3), + step_size_range=(-1.0, 1.0), + weight=0.8, + ), + # Gradual increase pattern (15-25% of series) + SubseriesConfig( + pattern_type=StepPatternType.GRADUAL_INCREASE, + length_range=(300, 700), + num_changepoints_range=(5, 15), + step_size_range=(1.0, 5.0), + level_drift_range=(0.0, 0.1), + weight=0.6, + ), + # Gradual decrease pattern (15-25% of series) + SubseriesConfig( + pattern_type=StepPatternType.GRADUAL_DECREASE, + length_range=(300, 700), + num_changepoints_range=(5, 15), + step_size_range=(-5.0, -1.0), + level_drift_range=(-0.1, 0.0), + weight=0.6, + ), + # Spike up pattern (10-20% of series) + SubseriesConfig( + pattern_type=StepPatternType.SPIKE_UP, + length_range=(200, 500), + num_changepoints_range=(3, 8), + step_size_range=(3.0, 10.0), + step_size_decay=0.7, + weight=0.4, + ), + # Spike down pattern (10-20% of series) + SubseriesConfig( + pattern_type=StepPatternType.SPIKE_DOWN, + length_range=(200, 500), + num_changepoints_range=(3, 8), + step_size_range=(-10.0, -3.0), + step_size_decay=0.7, + weight=0.4, + ), + # Oscillating pattern (10-15% of series) + SubseriesConfig( + pattern_type=StepPatternType.OSCILLATING, + length_range=(400, 800), + num_changepoints_range=(8, 20), + step_size_range=(-4.0, 4.0), + weight=0.3, + ), + # Random walk pattern (fallback) + SubseriesConfig( + pattern_type=StepPatternType.RANDOM_WALK, + length_range=(100, 400), + num_changepoints_range=(5, 20), + step_size_range=(-3.0, 3.0), + weight=0.2, + ), + ] + ) + + # Minimum number of subseries to combine + min_subseries: int = 10 + max_subseries: int = 100 + + # Transition smoothing between subseries + enable_smooth_transitions: bool = False + transition_length: int = 5 + + # Base level and global parameters + base_level_range: Tuple[float, float] = (5.0, 15.0) + noise_level_range: Tuple[float, float] = (0.001, 0.01) + + # Seasonal component parameters + add_seasonality: bool = True + daily_seasonality_amplitude_range: Tuple[float, float] = (0.0, 0.8) + weekly_seasonality_amplitude_range: Tuple[float, float] = (0.0, 0.7) + + # Trend parameters + add_trend: bool = False + trend_slope_range: Tuple[float, float] = (-0.005, 0.005) + + # Scaling parameters + scale_range: Tuple[float, float] = (0.1, 10.0) + + # Anomaly injection parameters + inject_anomalies: bool = False + anomaly_probability: float = 0.02 + anomaly_magnitude_range: Tuple[float, float] = (2.0, 5.0) + + # Level continuity between subseries + maintain_level_continuity: bool = True + max_level_jump_between_subseries: float = 5.0 + + +class AnomalyType(Enum): + """Types of anomalies that can be generated.""" + + SPIKE_UP = "spike_up" + SPIKE_DOWN = "spike_down" + + +class MagnitudePattern(Enum): + """Spike magnitude patterns.""" + + CONSTANT = "constant" # All spikes have similar magnitude + INCREASING = "increasing" # Magnitude increases over time + DECREASING = "decreasing" # Magnitude decreases over time + CYCLICAL = "cyclical" # Magnitude follows a cyclical pattern + RANDOM_BOUNDED = "random_bounded" # Random within bounds but with some correlation + + +@dataclass +class AnomalyGeneratorParams(GeneratorParams): + """Parameters for anomaly time series generation.""" + + # Base signal parameters + base_level_range: Tuple[float, float] = (-100.0, 100.0) + + # Spike direction (50% up-only, 50% down-only series) + spike_direction_probability: float = ( + 0.5 # Probability of up-only vs down-only series + ) + + # Periodicity parameters (uniform singles are always generated; variance/jitter ignored for base schedule) + base_period_range: Tuple[int, int] = (100, 300) # Base period between spike events + period_variance: float = 0.0 # Not used for base schedule anymore + + # Series-level behavior probabilities + cluster_series_probability: float = ( + 0.25 # 25% of series add clusters near base spikes + ) + random_series_probability: float = 0.25 # 25% of series add random single spikes + + # Cluster augmentation parameters (relative to base uniform spikes) + # Fraction of base spike events that will receive nearby extra spikes + cluster_event_fraction: float = 0.3 + # Number of additional spikes to add per selected event (upper bound exclusive like np.random.randint) + cluster_additional_spikes_range: Tuple[int, int] = (1, 4) # yields 1..3 + # Offset window (in time steps) around the base spike for additional spikes (inclusive of negatives) + cluster_offset_range: Tuple[int, int] = (-10, 11) # yields [-10..10] + + # Random single spikes augmentation across the series (not tied to base events) + # Number of random spikes as a fraction of the number of base spikes + random_spike_fraction_of_base: float = 0.3 + + # Spike magnitude parameters + magnitude_pattern: MagnitudePattern = MagnitudePattern.RANDOM_BOUNDED + base_magnitude_range: Tuple[float, float] = (10.0, 50.0) + magnitude_correlation: float = ( + 0.7 # Correlation between consecutive spike magnitudes (0-1) + ) + magnitude_trend_strength: float = 0.02 # Strength of increasing/decreasing trend + cyclical_period_ratio: float = 0.3 # Ratio of cyclical period to series length + + # Noise parameters + magnitude_noise: float = ( + 0.1 # Random noise added to magnitude (as fraction of base magnitude) + ) + timing_jitter: float = 0.0 # Not used for base schedule anymore + + def __post_init__(self): + """Validate parameters after initialization.""" + if not (0 <= self.spike_direction_probability <= 1): + raise ValueError("spike_direction_probability must be between 0 and 1") + if not (0 <= self.period_variance <= 0.5): + raise ValueError("period_variance must be between 0 and 0.5") + if not (0 <= self.magnitude_correlation <= 1): + raise ValueError("magnitude_correlation must be between 0 and 1") + if self.base_period_range[0] >= self.base_period_range[1]: + raise ValueError("base_period_range must have min < max") + # Validate series-type probabilities + if not (0.0 <= self.cluster_series_probability <= 1.0): + raise ValueError("cluster_series_probability must be between 0 and 1") + if not (0.0 <= self.random_series_probability <= 1.0): + raise ValueError("random_series_probability must be between 0 and 1") + if self.cluster_series_probability + self.random_series_probability > 1.0: + raise ValueError( + "Sum of cluster_series_probability and random_series_probability must be <= 1" + ) + # Validate cluster augmentation + if not (0.0 <= self.cluster_event_fraction <= 1.0): + raise ValueError("cluster_event_fraction must be between 0 and 1") + if ( + self.cluster_additional_spikes_range[0] + >= self.cluster_additional_spikes_range[1] + ): + raise ValueError("cluster_additional_spikes_range must have min < max") + if self.cluster_offset_range[0] >= self.cluster_offset_range[1]: + raise ValueError("cluster_offset_range must have min < max") + # Validate random augmentation + if not (0.0 <= self.random_spike_fraction_of_base <= 1.0): + raise ValueError("random_spike_fraction_of_base must be between 0 and 1") + + +class SpikeShape(Enum): + """Enumeration of spike shapes.""" + + V_SHAPE = "v" + INVERTED_V = "inverted_v" + CHOPPED_V = "chopped_v" + CHOPPED_INVERTED_V = "chopped_inverted_v" + + +@dataclass +class SpikesGeneratorParams(GeneratorParams): + """Parameters for spike time series generation.""" + + # Separate spike counts for different modes + spike_count_burst: Tuple[int, int] = (2, 4) + spike_count_uniform: Tuple[int, int] = (4, 7) + + # Spike amplitude parameters (absolute values, sign determined per series) + spike_amplitude: Union[float, Tuple[float, float]] = (50.0, 300.0) + + # Spike angle range in degrees (controls steepness) - sampled once per series + spike_angle_range: Tuple[float, float] = (70.0, 85.0) + + # Probability of burst mode vs spread mode (5% burst, 95% spread) + burst_mode_probability: float = 0.05 + + # Plateau duration for chopped spikes (in time steps) + plateau_duration: Tuple[int, int] = (30, 50) + + # Baseline value (should be close to zero) + baseline: Union[float, Tuple[float, float]] = (-200, 200) + + # Burst clustering parameters - fraction of series length for burst width + burst_width_fraction: Tuple[float, float] = (0.1, 0.25) + + # Spread mode edge margin ratio: edges are set to this fraction of the + # inter-spike spacing. Smaller values yield smaller left/right margins and + # larger spacing between spikes. Example: 0.2 => edge margins are 20% of + # the spacing between spikes. + edge_margin_ratio: float = 0.2 + + # Probability of spikes being above baseline (vs below baseline) per series + spikes_above_baseline_probability: float = 0.5 + + # Probability of each series type + series_type_probabilities: Dict[str, float] = field( + default_factory=lambda: { + "v_only": 0.4, + "chopped_only": 0.3, + "mixed": 0.3, + } + ) + + # Minimum spike width in time steps (to ensure visible spikes) + min_spike_width: int = 30 + + # Maximum spike width in time steps (to prevent overly wide spikes) + max_spike_width: int = 100 + + # Minimum margin between spikes (only used in burst mode) + min_spike_margin: int = 10 + + # Noise parameters - applied to entire signal + noise_std: float = 2 + noise_probability: float = 0.5 + brown_noise_alpha: float = 2.0 # Power law exponent (2.0 = brown noise) + noise_cutoff_freq: float = 0.1 # Relative to Nyquist frequency + + +@dataclass +class CauKerGeneratorParams(GeneratorParams): + """Parameters for the CauKer (SCM-GP) generator.""" + + # Number of channels (features) to sample per series. If a tuple(range) + # or list is provided, the wrapper will pick a single value for the whole batch. + num_channels: Union[int, Tuple[int, int], List[int]] = 6 + + # Maximum number of parents per node in the DAG + max_parents: int = 3 + + # Total number of nodes in the underlying DAG + num_nodes: int = 6 + + +class TrendType(Enum): + """Types of trends that can be applied to the OU process.""" + + NONE = "none" # No trend, classic OU behavior + LINEAR = "linear" # Linear drift in mu over time + EXPONENTIAL = "exponential" # Exponential growth/decay in mu + LOGISTIC = "logistic" # S-curve growth pattern + SINUSOIDAL = "sinusoidal" # Cyclical trend + PIECEWISE_LINEAR = "piecewise_linear" # Multiple linear segments + POLYNOMIAL = "polynomial" # Polynomial trend (quadratic/cubic) + + +@dataclass +class TrendConfig: + """Configuration for time-varying trends in OU process parameters.""" + + trend_type: TrendType = TrendType.NONE + + # Linear trend parameters + linear_slope_range: Tuple[float, float] = (-0.01, 0.01) + + # Exponential trend parameters + exp_rate_range: Tuple[float, float] = (-0.005, 0.005) + exp_asymptote_range: Tuple[float, float] = (-5.0, 5.0) + + # Logistic trend parameters + logistic_growth_rate_range: Tuple[float, float] = (0.01, 0.1) + logistic_capacity_range: Tuple[float, float] = (5.0, 20.0) + logistic_midpoint_ratio_range: Tuple[float, float] = ( + 0.3, + 0.7, + ) # As fraction of series length + + # Sinusoidal trend parameters + sin_amplitude_range: Tuple[float, float] = (1.0, 5.0) + sin_period_ratio_range: Tuple[float, float] = ( + 0.1, + 0.5, + ) # As fraction of series length + sin_phase_range: Tuple[float, float] = (0.0, 2.0 * np.pi) + + # Piecewise linear parameters + num_segments_range: Tuple[int, int] = (2, 5) + segment_slope_range: Tuple[float, float] = (-0.02, 0.02) + + # Polynomial trend parameters + poly_degree_range: Tuple[int, int] = (2, 3) + poly_coeff_range: Tuple[float, float] = ( + -1e-6, + 1e-6, + ) # Small coefficients for stability + + # Structural change parameters + enable_structural_changes: bool = True + num_structural_changes_range: Tuple[int, int] = (0, 3) + structural_change_magnitude_range: Tuple[float, float] = (1.0, 5.0) + min_segment_length: int = 200 # Minimum length between structural changes + + +@dataclass +class OrnsteinUhlenbeckProcessGeneratorParams(GeneratorParams): + """Parameters for the Regime-Switching Ornstein-Uhlenbeck generator. + + The generator samples concrete values per series using these ranges. + Enhanced with time-varying parameter support for realistic non-stationary behavior. + """ + + # Integration step size used inside the generator + dt: float = 0.01 + + # Regime 0 parameter distributions + regime0_theta_range: Tuple[float, float] = (1.0, 5.0) + regime0_mu_mean_std: Tuple[float, float] = (-2.0, 1.0) + regime0_sigma_lognormal_params: Tuple[float, float] = (float(np.log(0.3)), 0.3) + + # Regime 0 volatility process parameters + regime0_vol_reversion_range: Tuple[float, float] = (2.0, 5.0) # kappa_v + regime0_vol_mean_range: Tuple[float, float] = (0.2, 0.4) # theta_v + regime0_vol_vol_range: Tuple[float, float] = (0.1, 0.3) # xi_v + + # Regime 1 parameter distributions + regime1_theta_range: Tuple[float, float] = (0.05, 0.5) + regime1_mu_mean_std: Tuple[float, float] = (2.0, 1.0) + regime1_sigma_lognormal_params: Tuple[float, float] = (float(np.log(1.5)), 0.5) + + # Regime 1 volatility process parameters + regime1_vol_reversion_range: Tuple[float, float] = (0.5, 2.0) # kappa_v + regime1_vol_mean_range: Tuple[float, float] = (0.8, 1.2) # theta_v + regime1_vol_vol_range: Tuple[float, float] = (0.3, 0.5) # xi_v + + # Initial value distributions + x0_mean_std: Tuple[float, float] = (0.0, 2.0) + + # Transition matrix diagonal probabilities (allow more frequent regime changes) + p00_range: Tuple[float, float] = (0.85, 0.999) # Allow more frequent transitions + p11_range: Tuple[float, float] = (0.85, 0.999) + + # Time-varying parameter support + trend_config: TrendConfig = field(default_factory=TrendConfig) + + # Probability of applying trends to different parameters + mu_trend_probability: float = 0.7 # High probability for realistic non-stationarity + theta_trend_probability: float = 0.2 # Occasional changes in mean reversion speed + sigma_trend_probability: float = 0.3 # Occasional changes in volatility + + # Global scaling and level parameters for real-world applicability + global_level_range: Tuple[float, float] = ( + -100.0, + 100.0, + ) # Base level around which process evolves + global_scale_range: Tuple[float, float] = ( + 0.1, + 50.0, + ) # Scale factor for entire series + + # Noise injection for additional realism + measurement_noise_std_range: Tuple[float, float] = ( + 0.0, + 0.1, + ) # Additive measurement noise + + # Long-term memory parameters (for more realistic autocorrelation) + enable_long_memory: bool = False + hurst_exponent_range: Tuple[float, float] = ( + 0.3, + 0.8, + ) # Fractional Brownian motion component + + # Seasonality parameters + enable_seasonality: bool = True + num_seasonal_components_range: Tuple[int, int] = ( + 1, + 3, + ) # Number of seasonal components + seasonal_periods: Tuple[float, ...] = ( + 7.0, # Weekly + 30.0, # Monthly + 90.0, # Quarterly + 365.25, # Yearly + 182.625, # Semi-annual + ) # Available seasonal periods (in time units) + seasonal_amplitude_range: Tuple[float, float] = ( + 0.5, + 3.0, + ) # Amplitude of seasonal components + seasonal_phase_range: Tuple[float, float] = (0.0, 2.0 * np.pi) # Phase shift range + seasonal_period_jitter: float = 0.05 # Jitter applied to periods for realism (±5%) + + # Probability of applying seasonality to different parameters + mu_seasonality_probability: float = 0.6 # Probability of seasonal mean + sigma_seasonality_probability: float = 0.3 # Probability of seasonal volatility + + # Seasonal component decay/growth over time + enable_seasonal_evolution: bool = True + seasonal_amplitude_trend_range: Tuple[float, float] = ( + -0.001, + 0.001, + ) # Trend in seasonal amplitude + + def __post_init__(self): + if self.dt <= 0: + raise ValueError("dt must be positive for OU process simulation") + + if not (0.0 <= self.mu_trend_probability <= 1.0): + raise ValueError("mu_trend_probability must be between 0 and 1") + if not (0.0 <= self.theta_trend_probability <= 1.0): + raise ValueError("theta_trend_probability must be between 0 and 1") + if not (0.0 <= self.sigma_trend_probability <= 1.0): + raise ValueError("sigma_trend_probability must be between 0 and 1") + + if self.global_level_range[0] >= self.global_level_range[1]: + raise ValueError("global_level_range must have min < max") + if self.global_scale_range[0] <= 0: + raise ValueError("global_scale_range values must be positive") + + +# ===================== +# Audio generator params +# ===================== + + +@dataclass +class AudioGeneratorParams(GeneratorParams): + """Common parameters for audio-based time series generators (pyo-backed).""" + + # Offline pyo rendering configuration + server_duration: float = 2.0 # seconds + sample_rate: int = 44100 # Hz + + # Output post-processing + normalize_output: bool = True # Normalize to unit max abs before returning + + +@dataclass +class FinancialVolatilityAudioParams(AudioGeneratorParams): + """Parameters for the FinancialVolatility audio generator.""" + + # Trend LFO controlling slow drift + trend_lfo_freq_range: Tuple[float, float] = (0.1, 0.5) + trend_lfo_mul_range: Tuple[float, float] = (0.2, 0.5) + + # Volatility clustering + volatility_carrier_freq_range: Tuple[float, float] = (1.0, 5.0) + follower_freq_range: Tuple[float, float] = (1.0, 4.0) + volatility_range: Tuple[float, float] = (0.1, 0.8) + + # Market jumps/shocks + jump_metro_time_range: Tuple[float, float] = (0.3, 1.0) + jump_env_start_range: Tuple[float, float] = (0.5, 1.0) + jump_env_decay_time_range: Tuple[float, float] = (0.05, 0.2) + jump_freq_range: Tuple[float, float] = (20.0, 80.0) + jump_direction_up_probability: float = 0.5 + + +@dataclass +class MultiScaleFractalAudioParams(AudioGeneratorParams): + """Parameters for the Multi-Scale Fractal audio generator.""" + + base_noise_mul_range: Tuple[float, float] = (0.3, 0.8) + num_scales_range: Tuple[int, int] = (3, 6) + scale_freq_base_range: Tuple[float, float] = (20.0, 2000.0) + q_factor_range: Tuple[float, float] = (0.5, 3.0) + per_scale_attenuation_range: Tuple[float, float] = ( + 0.5, + 0.8, + ) # multiplier per scale index + + +@dataclass +class StochasticRhythmAudioParams(AudioGeneratorParams): + """Parameters for the Stochastic Rhythm audio generator.""" + + base_tempo_hz_range: Tuple[float, float] = (2.0, 8.0) + num_layers_range: Tuple[int, int] = (3, 5) + subdivisions: Tuple[int, ...] = (1, 2, 3, 4, 6, 8) + attack_range: Tuple[float, float] = (0.001, 0.01) + decay_range: Tuple[float, float] = (0.05, 0.3) + tone_freq_range: Tuple[float, float] = (50.0, 800.0) + tone_mul_range: Tuple[float, float] = (0.2, 0.5) + + +@dataclass +class NetworkTopologyAudioParams(AudioGeneratorParams): + """Parameters for the Network Topology audio generator.""" + + # Base traffic flow + traffic_lfo_freq_range: Tuple[float, float] = (0.2, 1.0) + traffic_lfo_mul_range: Tuple[float, float] = (0.2, 0.5) + + # Packet bursts + burst_rate_hz_range: Tuple[float, float] = (3.0, 12.0) + burst_duration_range: Tuple[float, float] = (0.02, 0.1) + burst_mul_range: Tuple[float, float] = (0.2, 0.6) + + # Periodic congestion + congestion_period_range: Tuple[float, float] = (1.0, 3.0) # seconds between events + congestion_depth_range: Tuple[float, float] = (-0.6, -0.2) + congestion_release_time_range: Tuple[float, float] = (0.3, 0.8) + + # Protocol overhead + overhead_lfo_freq_range: Tuple[float, float] = (20.0, 50.0) + overhead_mul_range: Tuple[float, float] = (0.05, 0.15) + + # DDoS-like spikes / attacks + attack_period_range: Tuple[float, float] = (2.0, 5.0) + attack_env_points: Tuple[ + Tuple[float, float], Tuple[float, float], Tuple[float, float] + ] = ( + (0.0, 1.2), + (0.1, 0.8), + (0.8, 0.0), + ) + attack_mul_range: Tuple[float, float] = (0.4, 0.8) diff --git a/src/synthetic_generation/gp_prior/constants.py b/src/synthetic_generation/gp_prior/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..d27c2560d427513b48f302575f108e51b1673a87 --- /dev/null +++ b/src/synthetic_generation/gp_prior/constants.py @@ -0,0 +1,18 @@ +KERNEL_BANK = { + 0: ("matern_kernel", 3), + 1: ("linear_kernel", 2), + 2: ("rbf_kernel", 2), + 3: ("periodic_kernel", 5), + 4: ("polynomial_kernel", 1), + 5: ("rational_quadratic_kernel", 1), + 6: ("spectral_mixture_kernel", 2), +} + + +KERNEL_PERIODS_BY_FREQ = { + "min": [5, 15, 30, 60, 120, 240, 360], + "H": [3, 6, 12, 24, 48, 72, 168], + "D": [7, 14, 28, 30, 90, 180, 365], + "W": [2, 4, 8, 12, 24, 52], + "MS": [3, 4, 6, 12, 24, 36, 60], +} diff --git a/src/synthetic_generation/gp_prior/gp_generator.py b/src/synthetic_generation/gp_prior/gp_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..eb25f94eaa06ddfaefb96c55b5b78da95a692d6e --- /dev/null +++ b/src/synthetic_generation/gp_prior/gp_generator.py @@ -0,0 +1,200 @@ +import functools +from typing import Dict, Optional + +import gpytorch +import numpy as np +import torch + +from src.data.frequency import FREQUENCY_MAPPING +from src.synthetic_generation.generator_params import GPGeneratorParams +from src.synthetic_generation.gp_prior.constants import ( + KERNEL_BANK, + KERNEL_PERIODS_BY_FREQ, +) +from src.synthetic_generation.gp_prior.utils import ( + create_kernel, + extract_periodicities, + random_binary_map, +) +from src.synthetic_generation.utils import generate_peak_spikes + + +class GPModel(gpytorch.models.ExactGP): + def __init__(self, train_x, train_y, likelihood, mean_module, kernel): + super().__init__(train_x, train_y, likelihood) + self.mean_module = mean_module + self.covar_module = kernel + + def forward(self, x): + mean_x = self.mean_module(x) + covar_x = self.covar_module(x) + return gpytorch.distributions.MultivariateNormal(mean_x, covar_x) + + +class GPGenerator: + def __init__( + self, + params: GPGeneratorParams, + length: int = 1024, + random_seed: Optional[int] = None, + ): + self.params = params + self.length = length + self.rng = np.random.default_rng(random_seed) + self.frequency = params.frequency + self.max_kernels = params.max_kernels + self.likelihood_noise_level = params.likelihood_noise_level + self.noise_level = params.noise_level + self.use_original_gp = params.use_original_gp + self.gaussians_periodic = params.gaussians_periodic + self.peak_spike_ratio = params.peak_spike_ratio + self.subfreq_ratio = params.subfreq_ratio + self.periods_per_freq = params.periods_per_freq + self.gaussian_sampling_ratio = params.gaussian_sampling_ratio + self.kernel_periods = params.kernel_periods + self.max_period_ratio = params.max_period_ratio + self.kernel_bank = params.kernel_bank + + def generate_time_series( + self, + random_seed: Optional[int] = None, + ) -> Dict[str, np.ndarray]: + with torch.inference_mode(): + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + torch.manual_seed(random_seed) + + # Determine kernel_bank and gaussians_periodic + if self.use_original_gp: + kernel_bank = KERNEL_BANK + gaussians_periodic = False + else: + # Convert kernel_bank from {str: float} format to {int: (str, float)} format + kernel_bank = { + i: (kernel_name, weight) + for i, (kernel_name, weight) in enumerate(self.kernel_bank.items()) + } + gaussians_periodic = self.gaussians_periodic + + # Map frequency to freq and subfreq + freq, subfreq, timescale = FREQUENCY_MAPPING.get( + self.frequency, ("D", "", 0) + ) + + # Decide if using exact frequencies + exact_freqs = self.rng.random() < self.periods_per_freq + if exact_freqs and freq in KERNEL_PERIODS_BY_FREQ: + kernel_periods = KERNEL_PERIODS_BY_FREQ[freq] + if subfreq: + subfreq_int = int(subfreq) + kernel_periods = [ + p // subfreq_int for p in kernel_periods if p >= subfreq_int + ] + else: + kernel_periods = self.kernel_periods + + # Sample number of kernels + num_kernels = self.rng.integers(1, self.max_kernels + 1) + # Always expect kernel_bank as dict {int: (str, float)} + kernel_weights = np.array([v[1] for v in kernel_bank.values()]) + kernel_ids = self.rng.choice( + list(kernel_bank.keys()), + size=num_kernels, + p=kernel_weights / kernel_weights.sum(), + ) + kernel_names = [kernel_bank[i][0] for i in kernel_ids] + + # Create composite kernel + composite_kernel = functools.reduce( + lambda a, b: random_binary_map(a, b, rng=self.rng), + [ + create_kernel( + k, + self.length, + int(self.max_period_ratio * self.length), + gaussians_periodic, + kernel_periods, + rng=self.rng, + ) + for k in kernel_names + ], + ) + + # Set up GP model + train_x = torch.linspace(0, 1, self.length) + trend = self.rng.choice([True, False]) + mean_module = ( + gpytorch.means.LinearMean(input_size=1) + if trend + else gpytorch.means.ConstantMean() + ) + likelihood = gpytorch.likelihoods.GaussianLikelihood( + noise_covar=torch.diag( + torch.full_like(train_x, self.likelihood_noise_level**2) + ) + ) + model = GPModel(train_x, None, likelihood, mean_module, composite_kernel) + + # Determine noise level + noise = {"high": 1e-1, "moderate": 1e-2, "low": 1e-3}.get( + self.noise_level, + self.rng.choice([1e-1, 1e-2, 1e-3], p=[0.1, 0.2, 0.7]), + ) + + # Sample from GP prior with robust error handling + model.eval() + max_retries = 3 + for attempt in range(max_retries): + try: + with ( + torch.no_grad(), + gpytorch.settings.fast_pred_var(), + gpytorch.settings.cholesky_jitter( + max(noise * (10**attempt), 1e-4) + ), # Increase jitter on retries, with a minimum floor + gpytorch.settings.max_cholesky_size( + self.length + ), # Limit decomposition size + ): + y_sample = model(train_x).sample().numpy() + # y_sample shape: (self.length,) (should be 1D) + break + except (RuntimeError, IndexError) as e: + if attempt == max_retries - 1: + # If all attempts fail, generate a simple fallback + print(f"GP sampling failed after {max_retries} attempts: {e}") + print("Generating fallback sample with simpler kernel") + # Create a simple RBF kernel as fallback + simple_kernel = gpytorch.kernels.RBFKernel() + simple_model = GPModel( + train_x, None, likelihood, mean_module, simple_kernel + ) + simple_model.eval() + with torch.no_grad(): + y_sample = simple_model(train_x).sample().numpy() + break + else: + print( + f"GP sampling attempt {attempt + 1} failed: {e}. Retrying with higher jitter..." + ) + + # Optionally add peak spikes + if self.rng.random() < self.peak_spike_ratio: + periodicities = extract_periodicities(composite_kernel, self.length) + if len(periodicities) > 0: + p = int(np.round(max(periodicities))) + spikes_type = self.rng.choice(["regular", "patchy"], p=[0.3, 0.7]) + spikes = generate_peak_spikes( + self.length, p, spikes_type=spikes_type + ) + # y_sample is 1D, so use y_sample[:p].argmax() + spikes_shift = ( + p - y_sample[:p].argmax() if p > 0 and p <= len(y_sample) else 0 + ) + spikes = np.roll(spikes, -spikes_shift) + if spikes.max() < 0: + y_sample = y_sample + spikes + 1 + else: + y_sample = y_sample * spikes + + return y_sample diff --git a/src/synthetic_generation/gp_prior/gp_generator_wrapper.py b/src/synthetic_generation/gp_prior/gp_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..189d7e986a03f042df0d85e9e6e975457abdc00e --- /dev/null +++ b/src/synthetic_generation/gp_prior/gp_generator_wrapper.py @@ -0,0 +1,67 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.generator_params import GPGeneratorParams +from src.synthetic_generation.gp_prior.gp_generator import GPGenerator + + +class GPGeneratorWrapper(GeneratorWrapper): + def __init__(self, params: GPGeneratorParams): + super().__init__(params) + self.params: GPGeneratorParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + params = super()._sample_parameters(batch_size) + + params.update( + { + "length": self.params.length, + "max_kernels": self.params.max_kernels, + "likelihood_noise_level": self.params.likelihood_noise_level, + "noise_level": self.params.noise_level, + "use_original_gp": self.params.use_original_gp, + "gaussians_periodic": self.params.gaussians_periodic, + "peak_spike_ratio": self.params.peak_spike_ratio, + "subfreq_ratio": self.params.subfreq_ratio, + "periods_per_freq": self.params.periods_per_freq, + "gaussian_sampling_ratio": self.params.gaussian_sampling_ratio, + "kernel_periods": self.params.kernel_periods, + "max_period_ratio": self.params.max_period_ratio, + "kernel_bank": self.params.kernel_bank, + } + ) + return params + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = GPGenerator( + params=self.params, + length=params["length"], + random_seed=seed, + ) + + batch_values = [] + + for i in range(batch_size): + batch_seed = None if seed is None else seed + i + values = generator.generate_time_series(random_seed=batch_seed) + + batch_values.append(values) + + return TimeSeriesContainer( + values=np.array(batch_values), + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/gp_prior/utils.py b/src/synthetic_generation/gp_prior/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e42c2eed6733ef52ddf368962a4693c00b510bcb --- /dev/null +++ b/src/synthetic_generation/gp_prior/utils.py @@ -0,0 +1,164 @@ +import gpytorch +import numpy as np +from gpytorch.kernels import AdditiveKernel, PeriodicKernel, ProductKernel, ScaleKernel + + +def custom_gaussian_sample( + max_period_length, + kernel_periods=None, + gaussian_sample=True, + allow_extension=True, + rng=None, +): + if rng is None: + rng = np.random.default_rng() + means = ( + np.array(kernel_periods) + if kernel_periods is not None + else np.array([3, 5, 7, 14, 20, 21, 24, 30, 60, 90, 120]) + ) + + if allow_extension: + if max_period_length > 200: + st = ( + max_period_length // 2 + if max(means) < max_period_length // 2 + else max(means) + 100 + ) + means = np.append(means, np.arange(st, max_period_length, 100)) + else: + if max(means) < max_period_length / 2: + means = np.append( + means, np.array([max_period_length // 2, max_period_length]) + ) + elif max(means) < max_period_length: + means = np.append(means, max_period_length) + + means = means[means <= max_period_length] + selected_mean = rng.choice(means) + + if gaussian_sample: + # Define corresponding standard deviations using np.sqrt(means) * 2 + std_devs = np.sqrt(means) ** 1.2 # / (means *0.008) + selected_std = std_devs[np.where(means == selected_mean)][0] + sample = rng.normal(selected_mean, selected_std) + else: + sample = selected_mean + + if sample < 1: + sample = np.ceil(np.abs(sample)) + + return int(sample) + + +def create_kernel( + kernel: str, + seq_len: int, + max_period_length: int = 365, + max_degree: int = 5, + gaussians_periodic: bool = False, + kernel_periods=None, + kernel_counter=None, + freq=None, + exact_freqs=False, + gaussian_sample=True, + subfreq="", + rng=None, +): + if rng is None: + rng = np.random.default_rng() + scale_kernel = rng.choice([True, False]) + lengthscale = rng.uniform(0.1, 5.0) + if kernel == "linear_kernel": + sigma_prior = gpytorch.priors.GammaPrior(rng.uniform(1, 6), rng.uniform(0.1, 1)) + kernel = gpytorch.kernels.LinearKernel(variance_prior=sigma_prior) + elif kernel == "rbf_kernel": + kernel = gpytorch.kernels.RBFKernel() + kernel.lengthscale = lengthscale + elif kernel == "periodic_kernel": + if gaussians_periodic: + if exact_freqs and freq != "Y" and kernel_counter is not None: + period_length = custom_gaussian_sample( + max_period_length, + kernel_periods=kernel_periods[:-3] + if (kernel_counter["periodic_kernel"] <= 2) and (subfreq == "") + else kernel_periods, + gaussian_sample=gaussian_sample, + allow_extension=(kernel_counter["periodic_kernel"] > 2), + rng=rng, + ) + kernel_counter["periodic_kernel"] -= 1 + else: + period_length = custom_gaussian_sample( + max_period_length, kernel_periods, gaussian_sample=True, rng=rng + ) + else: + period_length = rng.integers(1, max_period_length) + kernel = gpytorch.kernels.PeriodicKernel() + kernel.period_length = period_length / seq_len + kernel.lengthscale = lengthscale + elif kernel == "polynomial_kernel": + offset_prior = gpytorch.priors.GammaPrior( + rng.uniform(1, 4), rng.uniform(0.1, 1) + ) + degree = rng.integers(1, max_degree) + kernel = gpytorch.kernels.PolynomialKernel( + offset_prior=offset_prior, power=degree + ) + elif kernel == "matern_kernel": + nu = rng.choice([0.5, 1.5, 2.5]) # Roughness parameter + kernel = gpytorch.kernels.MaternKernel(nu=nu) + kernel.lengthscale = lengthscale + elif kernel == "rational_quadratic_kernel": + alpha = rng.uniform(0.1, 10.0) # Scale mixture parameter + kernel = gpytorch.kernels.RQKernel(alpha=alpha) + kernel.lengthscale = lengthscale + elif kernel == "spectral_mixture_kernel": + num_mixtures = rng.integers(2, 6) # Number of spectral mixture components + kernel = gpytorch.kernels.SpectralMixtureKernel(num_mixtures=num_mixtures) + else: + raise ValueError(f"Unknown kernel: {kernel}") + + if scale_kernel: + kernel = gpytorch.kernels.ScaleKernel(kernel) + return kernel + + +def extract_periodicities(kernel, seq_len): + periodicities = [] + + # Base case: if the kernel is a PeriodicKernel, extract its period_length + if isinstance(kernel, PeriodicKernel): + periodicities.append(kernel.period_length.item() * seq_len) + + # If the kernel is a composite kernel (Additive, Product, Scale), recursively extract periodicities + elif isinstance(kernel, (AdditiveKernel, ProductKernel)): + for sub_kernel in kernel.kernels: + periodicities.extend(extract_periodicities(sub_kernel, seq_len)) + + elif isinstance(kernel, ScaleKernel): + periodicities.extend(extract_periodicities(kernel.base_kernel, seq_len)) + + return periodicities + + +def random_binary_map(a: gpytorch.kernels.Kernel, b: gpytorch.kernels.Kernel, rng=None): + """ + Applies a random binary operator (+ or *) with equal probability + on kernels ``a`` and ``b``. + + Parameters + ---------- + a + A GP kernel. + b + A GP kernel. + + Returns + ------- + The composite kernel `a + b` or `a * b`. + """ + if rng is None: + rng = np.random.default_rng() + binary_maps = [lambda x, y: x + y, lambda x, y: x * y] + return rng.choice(binary_maps)(a, b) diff --git a/src/synthetic_generation/kernel_synth/kernel_generator_wrapper.py b/src/synthetic_generation/kernel_synth/kernel_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..5a95850396ebabf21328e8cd7d1bd459f1db986f --- /dev/null +++ b/src/synthetic_generation/kernel_synth/kernel_generator_wrapper.py @@ -0,0 +1,84 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.generator_params import KernelGeneratorParams +from src.synthetic_generation.kernel_synth.kernel_synth import KernelSynthGenerator + + +class KernelGeneratorWrapper(GeneratorWrapper): + """ + Wrapper for KernelSynthGenerator to generate batches of multivariate time series data + by stacking multiple univariate series. Accepts a KernelGeneratorParams dataclass for configuration. + """ + + def __init__(self, params: KernelGeneratorParams): + super().__init__(params) + self.params: KernelGeneratorParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + """ + Sample parameter values for batch generation with KernelSynthGenerator. + + Returns + ------- + Dict[str, Any] + Dictionary containing sampled parameter values. + """ + params = super()._sample_parameters(batch_size) + + params.update( + { + "length": self.params.length, + "max_kernels": self.params.max_kernels, + } + ) + return params + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + """ + Generate a batch of synthetic multivariate time series using KernelSynthGenerator. + + Parameters + ---------- + batch_size : int + Number of time series to generate. + seed : int, optional + Random seed for this batch (default: None). + params : Dict[str, Any], optional + Pre-sampled parameters to use. If None, parameters will be sampled. + + Returns + ------- + BatchTimeSeriesContainer + A container with the generated time series data. + """ + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = KernelSynthGenerator( + length=params["length"], + max_kernels=params["max_kernels"], + random_seed=seed, + ) + + batch_values = [] + for i in range(batch_size): + batch_seed = None if seed is None else seed + i + values = generator.generate_time_series(random_seed=batch_seed) + batch_values.append(values) + + return TimeSeriesContainer( + values=np.array(batch_values), + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/kernel_synth/kernel_synth.py b/src/synthetic_generation/kernel_synth/kernel_synth.py new file mode 100644 index 0000000000000000000000000000000000000000..94a4a95f72237a485878247eeb50f5221df84589 --- /dev/null +++ b/src/synthetic_generation/kernel_synth/kernel_synth.py @@ -0,0 +1,131 @@ +import functools +from typing import Optional + +import numpy as np +from sklearn.gaussian_process import GaussianProcessRegressor +from sklearn.gaussian_process.kernels import ( + RBF, + ConstantKernel, + DotProduct, + ExpSineSquared, + Kernel, + RationalQuadratic, + WhiteKernel, +) + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator + + +class KernelSynthGenerator(AbstractTimeSeriesGenerator): + """ + Generate independent synthetic univariate time series using kernel synthesis. + + Each series is sampled from a Gaussian process prior with a random composite kernel. + """ + + def __init__( + self, + length: int = 1024, + max_kernels: int = 5, + random_seed: Optional[int] = None, + ): + """ + Parameters + ---------- + length : int, optional + Number of time steps per series (default: 1024). + max_kernels : int, optional + Maximum number of base kernels to combine (default: 5). + random_seed : int, optional + Seed for the random number generator. + """ + self.length = length + self.max_kernels = max_kernels + self.rng = np.random.default_rng(random_seed) + self.kernel_bank = [ + ExpSineSquared(periodicity=24 / length), # H + ExpSineSquared(periodicity=48 / length), # 0.5H + ExpSineSquared(periodicity=96 / length), # 0.25H + ExpSineSquared(periodicity=24 * 7 / length), # H-week + ExpSineSquared(periodicity=48 * 7 / length), # 0.5H-week + ExpSineSquared(periodicity=96 * 7 / length), # 0.25H-week + ExpSineSquared(periodicity=7 / length), # day + ExpSineSquared(periodicity=14 / length), # 0.5-day + ExpSineSquared(periodicity=30 / length), # day + ExpSineSquared(periodicity=60 / length), # 0.5-day + ExpSineSquared(periodicity=365 / length), # year + ExpSineSquared(periodicity=365 * 2 / length), # 0.5-year + ExpSineSquared(periodicity=4 / length), # week + ExpSineSquared(periodicity=26 / length), # week + ExpSineSquared(periodicity=52 / length), # week + ExpSineSquared(periodicity=4 / length), # month + ExpSineSquared(periodicity=6 / length), # month + ExpSineSquared(periodicity=12 / length), # month + ExpSineSquared(periodicity=4 / length), # quarter + ExpSineSquared(periodicity=4 * 10 / length), # quarter + ExpSineSquared(periodicity=10 / length), # year + DotProduct(sigma_0=0.0), + DotProduct(sigma_0=1.0), + DotProduct(sigma_0=10.0), + RBF(length_scale=0.1), + RBF(length_scale=1.0), + RBF(length_scale=10.0), + RationalQuadratic(alpha=0.1), + RationalQuadratic(alpha=1.0), + RationalQuadratic(alpha=10.0), + WhiteKernel(noise_level=0.1), + WhiteKernel(noise_level=1.0), + ConstantKernel(), + ] + + def _random_binary_map(self, a: Kernel, b: Kernel) -> Kernel: + """ + Randomly combine two kernels with + or *. + """ + ops = [lambda x, y: x + y, lambda x, y: x * y] + return self.rng.choice(ops)(a, b) + + def _sample_from_gp_prior( + self, + kernel: Kernel, + X: np.ndarray, + random_seed: Optional[int] = None, + ) -> np.ndarray: + """ + Draw a sample from GP prior using GaussianProcessRegressor. + """ + if X.ndim == 1: + X = X[:, None] + gpr = GaussianProcessRegressor(kernel=kernel) + ts = gpr.sample_y(X, n_samples=1, random_state=random_seed) + + return ts.squeeze() + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + """ + Generate a single independent univariate time series. + + Parameters + ---------- + random_seed : int, optional + Random seed for reproducible generation. + + Returns + ------- + np.ndarray + Shape: [seq_len] + """ + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + X = np.linspace(0, 1, self.length) + num_kernels = self.rng.integers(1, self.max_kernels + 1) + selected = self.rng.choice(self.kernel_bank, num_kernels, replace=True) + composite = functools.reduce(self._random_binary_map, selected) + try: + values = self._sample_from_gp_prior(composite, X, random_seed=random_seed) + except np.linalg.LinAlgError: + new_seed = (random_seed + 1) if random_seed is not None else None + return self.generate_time_series(new_seed) + + return values diff --git a/src/synthetic_generation/ornstein_uhlenbeck_process/ou_generator.py b/src/synthetic_generation/ornstein_uhlenbeck_process/ou_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..446f2c7703a1766fedfde7143212cc8b04bdf0c0 --- /dev/null +++ b/src/synthetic_generation/ornstein_uhlenbeck_process/ou_generator.py @@ -0,0 +1,540 @@ +from dataclasses import replace +from typing import Callable, Dict, Optional + +import numpy as np + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.generator_params import ( + OrnsteinUhlenbeckProcessGeneratorParams, + TrendConfig, + TrendType, +) + + +class OrnsteinUhlenbeckProcessGenerator(AbstractTimeSeriesGenerator): + """ + Regime-Switching Ornstein-Uhlenbeck (OU) process generator with time-varying parameters. + + Enhanced to support: + - Time-varying mu (trends, drifts, structural changes) + - Time-varying theta and sigma parameters + - Multiple trend types (linear, exponential, logistic, etc.) + - Structural breaks and regime-dependent trends + - Global scaling and level adjustments for real-world applicability + """ + + def __init__(self, params: OrnsteinUhlenbeckProcessGeneratorParams): + self.params = params + self.rng = np.random.default_rng(params.global_seed) + + # == Regime switching == + def _generate_regime_sequence( + self, transition_matrix: np.ndarray, num_steps: int + ) -> np.ndarray: + regimes = np.zeros(num_steps, dtype=int) + regimes[0] = int(self.rng.integers(0, transition_matrix.shape[0])) + for i in range(1, num_steps): + current = regimes[i - 1] + regimes[i] = self.rng.choice( + transition_matrix.shape[0], p=transition_matrix[current, :] + ) + return regimes + + # == Time-varying parameter generation == + def _create_trend_function( + self, trend_config: TrendConfig, t_values: np.ndarray + ) -> Callable[[float], float]: + """Create a trend function based on the specified trend type.""" + trend_type = trend_config.trend_type + + if trend_type == TrendType.NONE: + return lambda t: 0.0 + + elif trend_type == TrendType.LINEAR: + slope = self.rng.uniform(*trend_config.linear_slope_range) + return lambda t: slope * t + + elif trend_type == TrendType.EXPONENTIAL: + rate = self.rng.uniform(*trend_config.exp_rate_range) + asymptote = self.rng.uniform(*trend_config.exp_asymptote_range) + return lambda t: asymptote * (1.0 - np.exp(-rate * t)) + + elif trend_type == TrendType.LOGISTIC: + growth_rate = self.rng.uniform(*trend_config.logistic_growth_rate_range) + capacity = self.rng.uniform(*trend_config.logistic_capacity_range) + midpoint_ratio = self.rng.uniform( + *trend_config.logistic_midpoint_ratio_range + ) + midpoint = midpoint_ratio * t_values[-1] + return lambda t: capacity / (1.0 + np.exp(-growth_rate * (t - midpoint))) + + elif trend_type == TrendType.SINUSOIDAL: + amplitude = self.rng.uniform(*trend_config.sin_amplitude_range) + period_ratio = self.rng.uniform(*trend_config.sin_period_ratio_range) + period = period_ratio * t_values[-1] + phase = self.rng.uniform(*trend_config.sin_phase_range) + return lambda t: amplitude * np.sin(2.0 * np.pi * t / period + phase) + + elif trend_type == TrendType.PIECEWISE_LINEAR: + return self._create_piecewise_linear_trend(trend_config, t_values) + + elif trend_type == TrendType.POLYNOMIAL: + degree = self.rng.integers(*trend_config.poly_degree_range) + coeffs = self.rng.uniform(*trend_config.poly_coeff_range, size=degree + 1) + return lambda t: sum(coeff * (t**i) for i, coeff in enumerate(coeffs)) + + else: + raise ValueError(f"Unknown trend type: {trend_type}") + + def _create_piecewise_linear_trend( + self, trend_config: TrendConfig, t_values: np.ndarray + ) -> Callable[[float], float]: + """Create a piecewise linear trend function.""" + num_segments = self.rng.integers(*trend_config.num_segments_range) + total_time = t_values[-1] + + # Create breakpoints + breakpoints = np.sort(self.rng.uniform(0, total_time, num_segments - 1)) + breakpoints = np.concatenate([[0], breakpoints, [total_time]]) + + # Create slopes for each segment + slopes = self.rng.uniform(*trend_config.segment_slope_range, size=num_segments) + + # Compute y-values at breakpoints to ensure continuity + y_values = [0.0] # Start at 0 + for i in range(num_segments): + segment_length = breakpoints[i + 1] - breakpoints[i] + y_values.append(y_values[-1] + slopes[i] * segment_length) + + def piecewise_trend(t: float) -> float: + # Find which segment t belongs to + segment_idx = np.searchsorted(breakpoints[1:], t) + segment_idx = min(segment_idx, num_segments - 1) + + # Linear interpolation within the segment + t_start = breakpoints[segment_idx] + y_start = y_values[segment_idx] + slope = slopes[segment_idx] + + return y_start + slope * (t - t_start) + + return piecewise_trend + + def _add_structural_changes( + self, base_function: Callable[[float], float], t_values: np.ndarray + ) -> Callable[[float], float]: + """Add structural changes to a base trend function.""" + if not self.params.trend_config.enable_structural_changes: + return base_function + + config = self.params.trend_config + num_changes = self.rng.integers(*config.num_structural_changes_range) + + if num_changes == 0: + return base_function + + # Generate change points ensuring minimum segment length + total_time = t_values[-1] + min_segment = config.min_segment_length * self.params.dt + + if num_changes * min_segment >= total_time: + # Too many changes requested, reduce number + num_changes = max(1, int(total_time / min_segment) - 1) + + change_times = np.sort( + self.rng.uniform(min_segment, total_time - min_segment, num_changes) + ) + change_magnitudes = self.rng.uniform( + *config.structural_change_magnitude_range, size=num_changes + ) + + def structural_trend(t: float) -> float: + base_value = base_function(t) + structural_adjustment = 0.0 + + for change_time, magnitude in zip(change_times, change_magnitudes): + if t >= change_time: + # Smooth step function for structural change + transition_width = ( + min_segment * 0.1 + ) # 10% of minimum segment for smooth transition + if transition_width > 0: + smooth_step = 1.0 / ( + 1.0 + np.exp(-10.0 * (t - change_time) / transition_width) + ) + else: + smooth_step = 1.0 if t >= change_time else 0.0 + structural_adjustment += magnitude * smooth_step + + return base_value + structural_adjustment + + return structural_trend + + def _sample_trend_type(self) -> TrendType: + """Sample a trend type based on realistic probabilities.""" + trend_weights = { + TrendType.NONE: 0.3, + TrendType.LINEAR: 0.15, + TrendType.EXPONENTIAL: 0.15, + TrendType.LOGISTIC: 0.1, + TrendType.SINUSOIDAL: 0.15, + TrendType.PIECEWISE_LINEAR: 0.1, + TrendType.POLYNOMIAL: 0.05, + } + + trend_types = list(trend_weights.keys()) + weights = list(trend_weights.values()) + + return self.rng.choice(trend_types, p=weights) + + # == Seasonality functions == + def _sample_seasonal_components(self) -> list: + """Sample seasonal components inspired by ou.py.""" + if not self.params.enable_seasonality: + return [] + + num_components = self.rng.integers(*self.params.num_seasonal_components_range) + components = [] + + # Sample from available periods + selected_periods = self.rng.choice( + self.params.seasonal_periods, + size=min(num_components, len(self.params.seasonal_periods)), + replace=False, + ) + + for period in selected_periods: + # Add jitter to period for realism + jittered_period = period * ( + 1.0 + + self.rng.uniform( + -self.params.seasonal_period_jitter, + self.params.seasonal_period_jitter, + ) + ) + + # Sample amplitude trend if evolution is enabled + amplitude_trend = 0.0 + if self.params.enable_seasonal_evolution: + amplitude_trend = self.rng.uniform( + *self.params.seasonal_amplitude_trend_range + ) + + component = { + "period": float(jittered_period), + "amplitude": self.rng.uniform(*self.params.seasonal_amplitude_range), + "phase": self.rng.uniform(*self.params.seasonal_phase_range), + "amplitude_trend": amplitude_trend, # For evolving seasonality + } + components.append(component) + + return components + + def _create_seasonal_function(self, components: list) -> Callable[[float], float]: + """Create a seasonal function from components.""" + if not components: + return lambda t: 0.0 + + def seasonal_func(t: float) -> float: + seasonal_value = 0.0 + for comp in components: + # Base amplitude with optional time-varying evolution + amplitude = comp["amplitude"] + if comp.get("amplitude_trend", 0.0) != 0.0: + amplitude += comp["amplitude_trend"] * t + + seasonal_value += amplitude * np.sin( + 2.0 * np.pi * t / comp["period"] + comp["phase"] + ) + return seasonal_value + + return seasonal_func + + def _sample_seasonal_functions( + self, regime_params: Dict + ) -> Dict[str, Dict[str, Callable]]: + """Create seasonal functions for each regime based on sampled components.""" + seasonal_functions = {"regime_0": {}, "regime_1": {}} + + for regime_key in ["regime_0", "regime_1"]: + regime_data = regime_params[regime_key] + + # Create seasonal function for mu if components exist + if "mu_seasonality" in regime_data: + seasonal_functions[regime_key]["mu"] = self._create_seasonal_function( + regime_data["mu_seasonality"] + ) + + # Create seasonal function for sigma if components exist + if "sigma_seasonality" in regime_data: + seasonal_functions[regime_key]["sigma"] = ( + self._create_seasonal_function(regime_data["sigma_seasonality"]) + ) + + return seasonal_functions + + # == Parameter handling == + class _ParameterManager: + def __init__( + self, + params: Dict, + num_steps: int, + trend_functions: Optional[Dict[str, Callable]] = None, + seasonal_functions: Optional[Dict[str, Callable]] = None, + ): + self.num_steps = num_steps + self.params: Dict = {} + self.trend_functions = trend_functions or {} + self.seasonal_functions = seasonal_functions or {} + + for key, value in params.items(): + # Skip seasonal component lists - handle them separately + if key.endswith("_seasonality"): + self.params[key] = value + elif ( + isinstance(value, (tuple, list)) + and len(value) == 2 + and not callable(value) + ): + self.params[key] = np.linspace(value[0], value[1], num_steps) + else: + self.params[key] = value + + def get(self, key: str, idx: int, t_value: float): + value = self.params.get(key) + if value is None: + return None + + # Get base parameter value + base_value = value + if isinstance(value, np.ndarray): + base_value = value[idx] + elif callable(value): + base_value = value(t_value) + + # Apply trend if available + if key in self.trend_functions: + trend_adjustment = self.trend_functions[key](t_value) + base_value += trend_adjustment + + # Apply seasonality if available + if key in self.seasonal_functions: + seasonal_adjustment = self.seasonal_functions[key](t_value) + base_value += seasonal_adjustment + + return base_value + + def _get_params_from_managers(self, idx: int, t_value: float): + if self._regime_sequence is not None: + current_regime = int(self._regime_sequence[idx]) + return self._param_managers[current_regime] + return self._param_manager + + # == Single-step OU update == + def _step_ou( + self, x_value: float, t_value: float, idx: int, dt: float, dW_value: float + ) -> float: + manager = self._get_params_from_managers(idx, t_value) + theta = float(manager.get("theta", idx, t_value)) + mu = float(manager.get("mu", idx, t_value)) + sigma = float(manager.get("sigma", idx, t_value)) + return float(x_value + theta * (mu - x_value) * dt + sigma * dW_value) + + # == Sampling primitives == + def _sample_regime_parameters(self) -> Dict[str, Dict[str, float]]: + """Sample base regime parameters (before applying trends and seasonality).""" + p = self.params + regime0 = { + "theta": self.rng.uniform( + p.regime0_theta_range[0], p.regime0_theta_range[1] + ), + "mu": self.rng.normal(p.regime0_mu_mean_std[0], p.regime0_mu_mean_std[1]), + "sigma": float( + self.rng.lognormal( + p.regime0_sigma_lognormal_params[0], + p.regime0_sigma_lognormal_params[1], + ) + ), + "x0": self.rng.normal(p.x0_mean_std[0], p.x0_mean_std[1]), + } + regime1 = { + "theta": self.rng.uniform( + p.regime1_theta_range[0], p.regime1_theta_range[1] + ), + "mu": self.rng.normal(p.regime1_mu_mean_std[0], p.regime1_mu_mean_std[1]), + "sigma": float( + self.rng.lognormal( + p.regime1_sigma_lognormal_params[0], + p.regime1_sigma_lognormal_params[1], + ) + ), + "x0": self.rng.normal(p.x0_mean_std[0], p.x0_mean_std[1]), + } + + # Add seasonal components if enabled + if self.params.enable_seasonality: + # Sample seasonal components for mu (mean) in each regime + if self.rng.random() < self.params.mu_seasonality_probability: + regime0["mu_seasonality"] = self._sample_seasonal_components() + regime1["mu_seasonality"] = self._sample_seasonal_components() + + # Sample seasonal components for sigma (volatility) in each regime + if self.rng.random() < self.params.sigma_seasonality_probability: + regime0["sigma_seasonality"] = self._sample_seasonal_components() + regime1["sigma_seasonality"] = self._sample_seasonal_components() + + return {"regime_0": regime0, "regime_1": regime1} + + def _sample_trend_functions( + self, t_values: np.ndarray + ) -> Dict[str, Dict[str, Callable]]: + """Sample trend functions for each parameter and regime.""" + trend_functions = {"regime_0": {}, "regime_1": {}} + + # Sample trend types for each parameter + for param in ["mu", "theta", "sigma"]: + prob_key = f"{param}_trend_probability" + if hasattr(self.params, prob_key): + trend_prob = getattr(self.params, prob_key) + + for regime in ["regime_0", "regime_1"]: + if self.rng.random() < trend_prob: + # Sample trend type and create trend config using the global config as base + trend_type = self._sample_trend_type() + print(f"Sampling trend type: {trend_type}") + trend_config = replace( + self.params.trend_config, trend_type=trend_type + ) + + # Create trend function + base_trend = self._create_trend_function(trend_config, t_values) + + # Add structural changes if enabled + final_trend = self._add_structural_changes(base_trend, t_values) + + trend_functions[regime][param] = final_trend + + return trend_functions + + def _generate_fractional_brownian_motion( + self, num_steps: int, hurst: float, dt: float + ) -> np.ndarray: + """Generate fractional Brownian motion for long-term memory effects.""" + if not (0 < hurst < 1): + raise ValueError("Hurst exponent must be between 0 and 1") + + # Simple approximation using cumulative sum of correlated Gaussian noise + # For more accurate fBm, consider using more sophisticated methods + noise = self.rng.normal(0, 1, num_steps) + + # Apply fractional integration (simplified) + if hurst != 0.5: + # Create correlation structure + correlations = np.zeros(num_steps) + for k in range(num_steps): + if k == 0: + correlations[k] = 1.0 + else: + correlations[k] = 0.5 * ( + (k + 1) ** (2 * hurst) + - 2 * k ** (2 * hurst) + + (k - 1) ** (2 * hurst) + ) + + # Apply convolution (simplified approach) + correlated_noise = np.convolve( + noise, correlations[: min(100, num_steps)], mode="same" + ) + return correlated_noise * np.sqrt(dt) + + return noise * np.sqrt(dt) + + def _sample_transition_matrix(self) -> np.ndarray: + p00 = float( + self.rng.uniform(self.params.p00_range[0], self.params.p00_range[1]) + ) + p11 = float( + self.rng.uniform(self.params.p11_range[0], self.params.p11_range[1]) + ) + transition_matrix = np.array([[p00, 1.0 - p00], [1.0 - p11, p11]], dtype=float) + return transition_matrix + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + """Generate a time series with enhanced realism through time-varying parameters.""" + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + num_steps = int(self.params.length) + dt = float(self.params.dt) + t_values = np.linspace(0.0, dt * (num_steps - 1), num_steps) + + # Sample base regime parameters + sampled_regime_params = self._sample_regime_parameters() + + # Sample trend functions for time-varying behavior + trend_functions = self._sample_trend_functions(t_values) + + # Sample seasonal functions for each regime + seasonal_functions = self._sample_seasonal_functions(sampled_regime_params) + + # Generate regime switching + transition_matrix = self._sample_transition_matrix() + self._regime_sequence = self._generate_regime_sequence( + transition_matrix, num_steps + ) + + # Create parameter managers with trend and seasonal support + self._param_managers = [ + OrnsteinUhlenbeckProcessGenerator._ParameterManager( + sampled_regime_params["regime_0"], + num_steps, + trend_functions["regime_0"], + seasonal_functions["regime_0"], + ), + OrnsteinUhlenbeckProcessGenerator._ParameterManager( + sampled_regime_params["regime_1"], + num_steps, + trend_functions["regime_1"], + seasonal_functions["regime_1"], + ), + ] + self._param_manager = None + + # Generate driving noise (with optional long-term memory) + if self.params.enable_long_memory: + hurst = self.rng.uniform(*self.params.hurst_exponent_range) + dW = self._generate_fractional_brownian_motion(num_steps - 1, hurst, dt) + else: + dW = self.rng.normal(0.0, np.sqrt(dt), size=num_steps - 1) + + # Initialize path + initial_regime = int(self._regime_sequence[0]) + x0_value = float(self._param_managers[initial_regime].get("x0", 0, 0.0)) + path = np.zeros(num_steps, dtype=float) + path[0] = x0_value + + # Generate the OU process path + for idx in range(num_steps - 1): + path[idx + 1] = self._step_ou(path[idx], t_values[idx], idx, dt, dW[idx]) + + # Apply global transformations for real-world applicability + path = self._apply_global_transformations(path) + + # Add measurement noise if specified + if self.params.measurement_noise_std_range[1] > 0: + noise_std = self.rng.uniform(*self.params.measurement_noise_std_range) + measurement_noise = self.rng.normal(0, noise_std, size=num_steps) + path += measurement_noise + + return path + + def _apply_global_transformations(self, path: np.ndarray) -> np.ndarray: + """Apply global level and scale transformations to make series more realistic.""" + # Sample global parameters + global_level = self.rng.uniform(*self.params.global_level_range) + global_scale = self.rng.uniform(*self.params.global_scale_range) + + # Apply transformations + transformed_path = global_level + global_scale * path + + return transformed_path diff --git a/src/synthetic_generation/ornstein_uhlenbeck_process/ou_generator_wrapper.py b/src/synthetic_generation/ornstein_uhlenbeck_process/ou_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..4993ff3154d7db61b8b3d6219b8eb4ffff611597 --- /dev/null +++ b/src/synthetic_generation/ornstein_uhlenbeck_process/ou_generator_wrapper.py @@ -0,0 +1,40 @@ +from typing import Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.generator_params import ( + OrnsteinUhlenbeckProcessGeneratorParams, +) +from src.synthetic_generation.ornstein_uhlenbeck_process.ou_generator import ( + OrnsteinUhlenbeckProcessGenerator, +) + + +class OrnsteinUhlenbeckProcessGeneratorWrapper(GeneratorWrapper): + """Wrapper for the regime-switching OU generator.""" + + def __init__(self, params: OrnsteinUhlenbeckProcessGeneratorParams): + super().__init__(params) + self.generator = OrnsteinUhlenbeckProcessGenerator(params) + + def generate_batch( + self, batch_size: int, seed: Optional[int] = None + ) -> TimeSeriesContainer: + if seed is not None: + self._set_random_seeds(seed) + + sampled_params = self._sample_parameters(batch_size) + + values = [] + for i in range(batch_size): + series_seed = (seed + i) if seed is not None else None + series = self.generator.generate_time_series(series_seed) + values.append(series) + + return TimeSeriesContainer( + values=np.array(values), + start=sampled_params["start"], + frequency=sampled_params["frequency"], + ) diff --git a/src/synthetic_generation/sawtooth/__init__.py b/src/synthetic_generation/sawtooth/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9f58dc49af91fe77cef6376db7b307b4b170b277 --- /dev/null +++ b/src/synthetic_generation/sawtooth/__init__.py @@ -0,0 +1,6 @@ +"""Sawtooth wave synthetic time series generation module.""" + +from .sawtooth_generator import SawToothGenerator +from .sawtooth_generator_wrapper import SawToothGeneratorWrapper + +__all__ = ["SawToothGenerator", "SawToothGeneratorWrapper"] diff --git a/src/synthetic_generation/sawtooth/sawtooth_generator.py b/src/synthetic_generation/sawtooth/sawtooth_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..612fc380888c6f4d1ac98ca70563792e825f8b1c --- /dev/null +++ b/src/synthetic_generation/sawtooth/sawtooth_generator.py @@ -0,0 +1,199 @@ +from typing import Dict, Optional, Tuple, Union + +import numpy as np + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator + + +class SawToothGenerator(AbstractTimeSeriesGenerator): + """ + Generate synthetic univariate time series using sawtooth waves with configurable parameters. + + Each series is a sawtooth wave with random amplitude, frequency, phase. The sawtooth direction + is randomly flipped 50% of the time to create both upward-ramping and downward-ramping patterns. + The generator emphasizes straight line components with minimal wiggly seasonality for cleaner patterns. + """ + + def __init__( + self, + length: int = 2048, + periods: Tuple[int, int] = (3, 6), + amplitude_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.5, 3.0), + phase_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.0, 1.0), + trend_slope_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (-0.001, 0.001), + seasonality_amplitude_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.0, 0.02), + add_trend: bool = True, + add_seasonality: bool = True, + random_seed: Optional[int] = None, + ): + """ + Parameters + ---------- + length : int, optional + Number of time steps per series (default: 2048). + periods : tuple, optional + (min_periods, max_periods) for number of sawtooth periods in the series (default: (3, 6)). + amplitude_range : tuple, optional + (min_amplitude, max_amplitude) or ((min_min, min_max), (max_min, max_max)) for sawtooth wave amplitude (default: (0.5, 3.0)). + phase_range : tuple, optional + (min_phase, max_phase) or ((min_min, min_max), (max_min, max_max)) for sawtooth wave phase as fraction of period (default: (0.0, 1.0)). + trend_slope_range : tuple, optional + (min_slope, max_slope) or ((min_min, min_max), (max_min, max_max)) for linear trend slope, emphasizing straight line components (default: (-0.001, 0.001)). + seasonality_amplitude_range : tuple, optional + (min_amplitude, max_amplitude) or ((min_min, min_max), (max_min, max_max)) for minimal seasonal component amplitude to reduce wiggly lines (default: (0.0, 0.02)). + add_trend : bool, optional + Whether to add linear trend component (default: True). + add_seasonality : bool, optional + Whether to add minimal seasonal component (default: True). + random_seed : int, optional + Seed for the random number generator. + """ + self.length = length + self.periods = periods + self.amplitude_range = amplitude_range + self.phase_range = phase_range + self.trend_slope_range = trend_slope_range + self.seasonality_amplitude_range = seasonality_amplitude_range + self.add_trend = add_trend + self.add_seasonality = add_seasonality + self.rng = np.random.default_rng(random_seed) + + def _sample_range_parameter(self, param_range): + """Sample a range parameter that could be a fixed tuple or a tuple of ranges.""" + if isinstance(param_range, tuple) and len(param_range) == 2: + # Check if it's a range of ranges: ((min_min, min_max), (max_min, max_max)) + if isinstance(param_range[0], tuple) and isinstance(param_range[1], tuple): + min_val = self.rng.uniform(param_range[0][0], param_range[0][1]) + max_val = self.rng.uniform(param_range[1][0], param_range[1][1]) + # Ensure min_val <= max_val + if min_val > max_val: + min_val, max_val = max_val, min_val + return (min_val, max_val) + else: + # Fixed range + return param_range + else: + raise ValueError(f"Invalid range parameter format: {param_range}") + + def _generate_sawtooth( + self, + time_idx: np.ndarray, + period: float, + amplitude: float, + phase: float, + flip: bool = False, + ) -> np.ndarray: + """Generate a sawtooth wave using period instead of frequency, optionally flipped.""" + # Convert time indices to actual time (assuming unit time steps) + time = time_idx.astype(float) + + # Calculate frequency from period + frequency = 1.0 / period + + # Calculate cycles with phase shift + cycles = frequency * time + phase + + # Generate sawtooth wave: linear rise from 0 to 1, then drop back to 0 + if flip: + # Flipped sawtooth: linear drop from 1 to 0, then jump back to 1 + sawtooth = amplitude * (1.0 - (cycles - np.floor(cycles))) + else: + # Normal sawtooth: linear rise from 0 to 1, then drop back to 0 + sawtooth = amplitude * (cycles - np.floor(cycles)) + + return sawtooth + + def _generate_trend(self, time_idx: np.ndarray, slope: float) -> np.ndarray: + """Generate linear trend component.""" + return slope * time_idx.astype(float) + + def _generate_seasonality( + self, time_idx: np.ndarray, amplitude: float, period: float + ) -> np.ndarray: + """Generate seasonal component using sine wave.""" + time = time_idx.astype(float) + return amplitude * np.sin(2 * np.pi * time / period) + + def generate_time_series( + self, random_seed: Optional[int] = None + ) -> Dict[str, np.ndarray]: + """ + Generate a single univariate sawtooth wave time series. + + Parameters + ---------- + random_seed : int, optional + Random seed for reproducible generation. + + Returns + ------- + np.ndarray + Shape: [seq_len] + """ + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + # Sample sawtooth wave parameters + sampled_amplitude_range = self._sample_range_parameter(self.amplitude_range) + sampled_phase_range = self._sample_range_parameter(self.phase_range) + + amplitude = self.rng.uniform( + sampled_amplitude_range[0], sampled_amplitude_range[1] + ) + phase = self.rng.uniform(sampled_phase_range[0], sampled_phase_range[1]) + + # Sample number of periods and calculate period length + num_periods = self.rng.uniform(self.periods[0], self.periods[1]) + sawtooth_period = self.length / num_periods + + # Calculate seasonality period (use longer period for minimal seasonality) + seasonality_period = self.length / self.rng.uniform( + 2.0, 4.0 + ) # 2-4 seasonality cycles + + # Randomly decide whether to flip the sawtooth wave (50% chance) + flip_sawtooth = self.rng.random() < 0.5 + + # Generate time indices + time_idx = np.arange(self.length) + + # Generate base sawtooth wave + values = self._generate_sawtooth( + time_idx, sawtooth_period, amplitude, phase, flip_sawtooth + ) + + # Add trend if enabled + if self.add_trend: + sampled_trend_range = self._sample_range_parameter(self.trend_slope_range) + trend_slope = self.rng.uniform( + sampled_trend_range[0], sampled_trend_range[1] + ) + trend = self._generate_trend(time_idx, trend_slope) + values += trend + + # Add minimal seasonality if enabled + if self.add_seasonality: + sampled_seasonality_amplitude_range = self._sample_range_parameter( + self.seasonality_amplitude_range + ) + + seasonality_amplitude = self.rng.uniform( + sampled_seasonality_amplitude_range[0], + sampled_seasonality_amplitude_range[1], + ) + + if seasonality_amplitude > 0: # Only add seasonality if amplitude > 0 + seasonality = self._generate_seasonality( + time_idx, seasonality_amplitude, seasonality_period + ) + values += seasonality + + return values diff --git a/src/synthetic_generation/sawtooth/sawtooth_generator_wrapper.py b/src/synthetic_generation/sawtooth/sawtooth_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..2a8c2f326f7ab43a7657723cf93e840845596da2 --- /dev/null +++ b/src/synthetic_generation/sawtooth/sawtooth_generator_wrapper.py @@ -0,0 +1,97 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.generator_params import SawToothGeneratorParams +from src.synthetic_generation.sawtooth.sawtooth_generator import SawToothGenerator + + +class SawToothGeneratorWrapper(GeneratorWrapper): + """ + Wrapper for SawToothGenerator to generate batches of multivariate time series data + by stacking multiple univariate sawtooth wave series. Accepts a SawToothGeneratorParams + dataclass for configuration. + """ + + def __init__(self, params: SawToothGeneratorParams): + super().__init__(params) + self.params: SawToothGeneratorParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + """ + Sample parameter values for batch generation with SawToothGenerator. + + Returns + ------- + Dict[str, Any] + Dictionary containing sampled parameter values. + """ + params = super()._sample_parameters(batch_size) + params.update( + { + "length": self.params.length, + "periods": self.params.periods, + "amplitude_range": self.params.amplitude_range, + "phase_range": self.params.phase_range, + "trend_slope_range": self.params.trend_slope_range, + "seasonality_amplitude_range": self.params.seasonality_amplitude_range, + "add_trend": self.params.add_trend, + "add_seasonality": self.params.add_seasonality, + } + ) + return params + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + """ + Generate a batch of synthetic multivariate time series using SawToothGenerator. + + Parameters + ---------- + batch_size : int + Number of time series to generate. + seed : int, optional + Random seed for this batch (default: None). + params : Dict[str, Any], optional + Pre-sampled parameters to use. If None, parameters will be sampled. + + Returns + ------- + TimeSeriesContainer + A container with the generated time series data. + """ + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = SawToothGenerator( + length=params["length"], + periods=params["periods"], + amplitude_range=params["amplitude_range"], + phase_range=params["phase_range"], + trend_slope_range=params["trend_slope_range"], + seasonality_amplitude_range=params["seasonality_amplitude_range"], + add_trend=params["add_trend"], + add_seasonality=params["add_seasonality"], + random_seed=seed, + ) + + batch_values = [] + + for i in range(batch_size): + batch_seed = None if seed is None else seed + i + values = generator.generate_time_series(random_seed=batch_seed) + batch_values.append(values) + + return TimeSeriesContainer( + values=np.array(batch_values), + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/sine_waves/__init__.py b/src/synthetic_generation/sine_waves/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..432031fa4d1102f12c971df32aeb7db641c76432 --- /dev/null +++ b/src/synthetic_generation/sine_waves/__init__.py @@ -0,0 +1,6 @@ +"""Sine wave synthetic time series generation module.""" + +from .sine_wave_generator import SineWaveGenerator +from .sine_wave_generator_wrapper import SineWaveGeneratorWrapper + +__all__ = ["SineWaveGenerator", "SineWaveGeneratorWrapper"] diff --git a/src/synthetic_generation/sine_waves/sine_wave_generator.py b/src/synthetic_generation/sine_waves/sine_wave_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..4f6399578411a05229870fd532c9520eda277647 --- /dev/null +++ b/src/synthetic_generation/sine_waves/sine_wave_generator.py @@ -0,0 +1,249 @@ +from typing import Dict, List, Optional, Tuple, Union + +import numpy as np + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator + + +class SineWaveGenerator(AbstractTimeSeriesGenerator): + """ + Generate synthetic univariate time series using sinusoidal patterns with configurable parameters. + + This generator creates diverse sinusoidal series with: + - Multiple sinusoidal components (seasonalities) + - Linear trends + - Small additive noise + - Time-varying parameters for realism + + The output maintains clear sinusoidal characteristics while adding realistic variations. + """ + + def __init__( + self, + length: int = 1024, + # Core sinusoidal parameters + num_components_range: Tuple[int, int] = (1, 3), + period_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (10, 200), + amplitude_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0.5, 3.0), + phase_range: Union[ + Tuple[float, float], Tuple[Tuple[float, float], Tuple[float, float]] + ] = (0, 2 * np.pi), + # Trend parameters + trend_slope_range: Tuple[float, float] = (-0.01, 0.01), + base_level_range: Tuple[float, float] = (0.0, 2.0), + # Noise parameters + noise_probability: float = 0.7, # Probability of adding noise (70% of series have noise) + noise_level_range: Tuple[float, float] = ( + 0.05, + 0.2, + ), # Small noise as fraction of amplitude (when noise is applied) + # Time-varying parameters (subtle) + enable_amplitude_modulation: bool = True, + amplitude_modulation_strength: float = 0.1, # Max 10% amplitude variation + enable_frequency_modulation: bool = True, + frequency_modulation_strength: float = 0.05, # Max 5% frequency variation + random_seed: Optional[int] = None, + ): + """ + Parameters + ---------- + length : int, optional + Number of time steps per series (default: 1024). + num_components_range : tuple, optional + Range for number of sinusoidal components to combine (default: (1, 3)). + period_range : tuple, optional + Period range for sinusoidal components (default: (10, 200)). + amplitude_range : tuple, optional + Amplitude range for sinusoidal components (default: (0.5, 3.0)). + phase_range : tuple, optional + Phase range for sinusoidal components (default: (0, 2*pi)). + trend_slope_range : tuple, optional + Range for linear trend slope (default: (-0.01, 0.01)). + base_level_range : tuple, optional + Range for base level offset (default: (0.0, 2.0)). + noise_probability : float, optional + Probability of adding noise to a series (default: 0.7). + noise_level_range : tuple, optional + Range for noise level as fraction of total amplitude when noise is applied (default: (0.05, 0.2)). + enable_amplitude_modulation : bool, optional + Whether to enable subtle amplitude modulation (default: True). + amplitude_modulation_strength : float, optional + Strength of amplitude modulation (default: 0.1). + enable_frequency_modulation : bool, optional + Whether to enable subtle frequency modulation (default: True). + frequency_modulation_strength : float, optional + Strength of frequency modulation (default: 0.05). + random_seed : int, optional + Seed for the random number generator. + """ + self.length = length + self.num_components_range = num_components_range + self.period_range = period_range + self.amplitude_range = amplitude_range + self.phase_range = phase_range + self.trend_slope_range = trend_slope_range + self.base_level_range = base_level_range + self.noise_probability = noise_probability + self.noise_level_range = noise_level_range + self.enable_amplitude_modulation = enable_amplitude_modulation + self.amplitude_modulation_strength = amplitude_modulation_strength + self.enable_frequency_modulation = enable_frequency_modulation + self.frequency_modulation_strength = frequency_modulation_strength + self.rng = np.random.default_rng(random_seed) + + def _sample_range_parameter(self, param_range): + """Sample a range parameter that could be a fixed tuple or a tuple of ranges.""" + if isinstance(param_range, tuple) and len(param_range) == 2: + # Check if it's a range of ranges: ((min_min, min_max), (max_min, max_max)) + if isinstance(param_range[0], tuple) and isinstance(param_range[1], tuple): + min_val = self.rng.uniform(param_range[0][0], param_range[0][1]) + max_val = self.rng.uniform(param_range[1][0], param_range[1][1]) + # Ensure min_val <= max_val + if min_val > max_val: + min_val, max_val = max_val, min_val + return (min_val, max_val) + else: + # Fixed range + return param_range + else: + raise ValueError(f"Invalid range parameter format: {param_range}") + + def _sample_scalar_parameter(self, param): + """Sample a scalar parameter that could be a fixed value or a range.""" + if isinstance(param, (int, float)): + return param + elif isinstance(param, tuple) and len(param) == 2: + return self.rng.uniform(param[0], param[1]) + else: + raise ValueError(f"Invalid scalar parameter format: {param}") + + def _generate_sinusoidal_components( + self, t_array: np.ndarray, components: List[Dict] + ) -> np.ndarray: + """Generate sinusoidal signal from multiple components.""" + signal = np.zeros_like(t_array) + + for comp in components: + amplitude = comp["amplitude"] + period = comp["period"] + phase = comp["phase"] + + # Basic sinusoidal component + base_signal = amplitude * np.sin(2 * np.pi * t_array / period + phase) + + # Apply subtle amplitude modulation if enabled + if self.enable_amplitude_modulation: + # Use a slow modulation (period is 5-10x the main period) + mod_period = period * self.rng.uniform(5, 10) + mod_phase = self.rng.uniform(0, 2 * np.pi) + amp_modulation = 1 + self.amplitude_modulation_strength * np.sin( + 2 * np.pi * t_array / mod_period + mod_phase + ) + base_signal *= amp_modulation + + # Apply subtle frequency modulation if enabled + if self.enable_frequency_modulation: + # Frequency modulation creates slight warping in the sine wave + mod_period = period * self.rng.uniform(8, 15) + mod_phase = self.rng.uniform(0, 2 * np.pi) + freq_modulation = self.frequency_modulation_strength * np.sin( + 2 * np.pi * t_array / mod_period + mod_phase + ) + # Apply frequency modulation by modifying the phase + instantaneous_freq = 2 * np.pi / period * (1 + freq_modulation) + modulated_phase = ( + np.cumsum(instantaneous_freq) * (t_array[1] - t_array[0]) + phase + ) + base_signal = amplitude * np.sin(modulated_phase) + + # Apply amplitude modulation on top if both are enabled + if self.enable_amplitude_modulation: + mod_period_amp = period * self.rng.uniform(5, 10) + mod_phase_amp = self.rng.uniform(0, 2 * np.pi) + amp_modulation = 1 + self.amplitude_modulation_strength * np.sin( + 2 * np.pi * t_array / mod_period_amp + mod_phase_amp + ) + base_signal *= amp_modulation + + signal += base_signal + + return signal + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + """ + Generate a single univariate sinusoidal time series with trends and noise. + + Parameters + ---------- + random_seed : int, optional + Random seed for reproducible generation. + + Returns + ------- + np.ndarray + Shape: [seq_len] + """ + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + # Generate time array + t_array = np.linspace(0, self.length - 1, self.length) + + # Sample number of sinusoidal components + num_components = self.rng.integers( + self.num_components_range[0], self.num_components_range[1] + 1 + ) + + # Sample parameters for each component + components = [] + total_amplitude = 0 + + for _ in range(num_components): + sampled_period_range = self._sample_range_parameter(self.period_range) + sampled_amplitude_range = self._sample_range_parameter(self.amplitude_range) + sampled_phase_range = self._sample_range_parameter(self.phase_range) + + period = self.rng.uniform(sampled_period_range[0], sampled_period_range[1]) + amplitude = self.rng.uniform( + sampled_amplitude_range[0], sampled_amplitude_range[1] + ) + phase = self.rng.uniform(sampled_phase_range[0], sampled_phase_range[1]) + + components.append( + {"period": period, "amplitude": amplitude, "phase": phase} + ) + total_amplitude += amplitude + + # Generate sinusoidal signal + signal = self._generate_sinusoidal_components(t_array, components) + + # Add linear trend + trend_slope = self.rng.uniform( + self.trend_slope_range[0], self.trend_slope_range[1] + ) + trend = trend_slope * t_array + + # Add base level + base_level = self.rng.uniform( + self.base_level_range[0], self.base_level_range[1] + ) + + # Combine signal, trend, and base level + values = signal + trend + base_level + + # Add noise with specified probability (70% of series have noise, 30% are noise-free) + if self.rng.random() < self.noise_probability: + noise_level = self.rng.uniform( + self.noise_level_range[0], self.noise_level_range[1] + ) + noise_std = ( + noise_level * total_amplitude + ) # Noise proportional to total amplitude + noise = self.rng.normal(0, noise_std, size=self.length) + values += noise + + return values diff --git a/src/synthetic_generation/sine_waves/sine_wave_generator_wrapper.py b/src/synthetic_generation/sine_waves/sine_wave_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..aa1b548e7d0c659edc0575cb2730b0bbce09b517 --- /dev/null +++ b/src/synthetic_generation/sine_waves/sine_wave_generator_wrapper.py @@ -0,0 +1,114 @@ +from typing import Any, Dict, Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.generator_params import SineWaveGeneratorParams +from src.synthetic_generation.sine_waves.sine_wave_generator import SineWaveGenerator + + +class SineWaveGeneratorWrapper(GeneratorWrapper): + """ + Wrapper for SineWaveGenerator to generate batches of multivariate time series data + by stacking multiple univariate sine wave series. Accepts a SineWaveGeneratorParams + dataclass for configuration. + """ + + def __init__(self, params: SineWaveGeneratorParams): + super().__init__(params) + self.params: SineWaveGeneratorParams = params + + def _sample_parameters(self, batch_size: int) -> Dict[str, Any]: + """ + Sample parameter values for batch generation with SineWaveGenerator. + + Returns + ------- + Dict[str, Any] + Dictionary containing sampled parameter values. + """ + params = super()._sample_parameters(batch_size) + params.update( + { + "length": self.params.length, + # Core sinusoidal parameters + "num_components_range": self.params.num_components_range, + "period_range": self.params.period_range, + "amplitude_range": self.params.amplitude_range, + "phase_range": self.params.phase_range, + # Trend parameters + "trend_slope_range": self.params.trend_slope_range, + "base_level_range": self.params.base_level_range, + # Noise parameters + "noise_probability": self.params.noise_probability, + "noise_level_range": self.params.noise_level_range, + # Time-varying parameters (subtle modulation) + "enable_amplitude_modulation": self.params.enable_amplitude_modulation, + "amplitude_modulation_strength": self.params.amplitude_modulation_strength, + "enable_frequency_modulation": self.params.enable_frequency_modulation, + "frequency_modulation_strength": self.params.frequency_modulation_strength, + } + ) + return params + + def generate_batch( + self, + batch_size: int, + seed: Optional[int] = None, + params: Optional[Dict[str, Any]] = None, + ) -> TimeSeriesContainer: + """ + Generate a batch of synthetic multivariate time series using SineWaveGenerator. + + Parameters + ---------- + batch_size : int + Number of time series to generate. + seed : int, optional + Random seed for this batch (default: None). + params : Dict[str, Any], optional + Pre-sampled parameters to use. If None, parameters will be sampled. + + Returns + ------- + BatchTimeSeriesContainer + A container with the generated time series data. + """ + if seed is not None: + self._set_random_seeds(seed) + if params is None: + params = self._sample_parameters(batch_size) + + generator = SineWaveGenerator( + length=params["length"], + # Core sinusoidal parameters + num_components_range=params["num_components_range"], + period_range=params["period_range"], + amplitude_range=params["amplitude_range"], + phase_range=params["phase_range"], + # Trend parameters + trend_slope_range=params["trend_slope_range"], + base_level_range=params["base_level_range"], + # Noise parameters + noise_probability=params["noise_probability"], + noise_level_range=params["noise_level_range"], + # Time-varying parameters (subtle modulation) + enable_amplitude_modulation=params["enable_amplitude_modulation"], + amplitude_modulation_strength=params["amplitude_modulation_strength"], + enable_frequency_modulation=params["enable_frequency_modulation"], + frequency_modulation_strength=params["frequency_modulation_strength"], + random_seed=seed, + ) + + batch_values = [] + for i in range(batch_size): + batch_seed = None if seed is None else seed + i + values = generator.generate_time_series(random_seed=batch_seed) + batch_values.append(values) + + return TimeSeriesContainer( + values=np.array(batch_values), + start=params["start"], + frequency=params["frequency"], + ) diff --git a/src/synthetic_generation/spikes/spikes_generator.py b/src/synthetic_generation/spikes/spikes_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..fd694e975a888fb90c9fd81ff015b938916d68c3 --- /dev/null +++ b/src/synthetic_generation/spikes/spikes_generator.py @@ -0,0 +1,333 @@ +from typing import Dict, List, Optional, Tuple, Union + +import numpy as np + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.generator_params import SpikesGeneratorParams, SpikeShape + + +class SpikesGenerator(AbstractTimeSeriesGenerator): + """Generates spike-based time series with V-shaped or chopped spikes.""" + + def __init__(self, params: SpikesGeneratorParams): + self.params = params + np.random.seed(params.global_seed) + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + """Generate a time series with baseline and random spikes.""" + if random_seed is not None: + np.random.seed(random_seed) + + # Initialize signal + signal = np.full( + self.params.length, + self._sample_scalar(self.params.baseline), + dtype=np.float64, + ) + series_params = self._sample_series_parameters() + + if series_params["spike_count"] > 0: + positions = self._generate_spike_positions( + series_params["spike_count"], series_params["burst_mode"] + ) + for pos in positions: + spike = self._generate_single_spike( + series_params["amplitude"], + series_params["angle_deg"], + series_params["spike_shapes"], + series_params["spikes_above_baseline"], + ) + self._inject_spike(signal, spike, pos) + + if series_params["add_noise"] and self.params.noise_std > 0: + signal += self._generate_colored_noise() + + return signal + + def _sample_series_parameters(self) -> Dict: + """Sample consistent parameters for the entire series.""" + series_type = np.random.choice( + list(self.params.series_type_probabilities.keys()), + p=list(self.params.series_type_probabilities.values()), + ) + + spike_shapes = [] + if series_type == "v_only": + shape = ( + SpikeShape.V_SHAPE + if np.random.random() < 0.5 + else SpikeShape.INVERTED_V + ) + spike_shapes = [shape] # Only one shape type for consistency + elif series_type == "chopped_only": + shape = ( + SpikeShape.CHOPPED_V + if np.random.random() < 0.5 + else SpikeShape.CHOPPED_INVERTED_V + ) + spike_shapes = [shape] + else: # mixed + above = np.random.random() < self.params.spikes_above_baseline_probability + shape1 = SpikeShape.V_SHAPE if above else SpikeShape.INVERTED_V + shape2 = SpikeShape.CHOPPED_V if above else SpikeShape.CHOPPED_INVERTED_V + spike_shapes = [shape1, shape2] + + # Determine mode and corresponding spike count + burst_mode = np.random.random() < self.params.burst_mode_probability + if burst_mode: + spike_count_range = self.params.spike_count_burst + else: + spike_count_range = self.params.spike_count_uniform + + return { + "add_noise": np.random.random() < self.params.noise_probability, + "burst_mode": burst_mode, + "spike_count": self._sample_scalar(spike_count_range, is_int=True), + "amplitude": self._sample_scalar(self.params.spike_amplitude), + "angle_deg": np.random.uniform(*self.params.spike_angle_range), + "spikes_above_baseline": np.random.random() + < self.params.spikes_above_baseline_probability, + "spike_shapes": spike_shapes, + } + + def _sample_scalar( + self, value: Union[float, int, Tuple], is_int: bool = False + ) -> float: + """Sample a scalar from a value or range.""" + if isinstance(value, tuple): + return np.random.randint(*value) if is_int else np.random.uniform(*value) + return float(value) + + def _generate_colored_noise(self) -> np.ndarray: + """Generate colored noise with brown/pink characteristics.""" + white_noise = np.random.normal(0, 1, self.params.length) + fft_noise = np.fft.fft(white_noise) + freqs = np.fft.fftfreq(self.params.length) + freqs[0] = freqs[1] # Avoid DC division by zero + + filter_response = 1.0 / (np.abs(freqs) ** (self.params.brown_noise_alpha / 2.0)) + filter_response[np.abs(freqs) > self.params.noise_cutoff_freq] *= np.exp( + -( + ( + ( + np.abs(freqs)[np.abs(freqs) > self.params.noise_cutoff_freq] + - self.params.noise_cutoff_freq + ) + / self.params.noise_cutoff_freq + ) + ** 2 + ) + ) + + colored_noise = np.real(np.fft.ifft(fft_noise * filter_response)) + return colored_noise / np.std(colored_noise) * self.params.noise_std + + def _generate_spike_positions( + self, spike_count: int, burst_mode: bool + ) -> List[int]: + """Generate spike positions with minimum separation.""" + if spike_count == 0: + return [] + + # Adjust spike count based on available space + min_separation = self.params.max_spike_width + self.params.min_spike_margin + margin = self.params.max_spike_width + usable_length = self.params.length - 2 * margin + max_spikes = max(1, usable_length // min_separation) + spike_count = min(spike_count, max_spikes) + + if burst_mode: + burst_width = max( + spike_count * min_separation, + int( + np.random.uniform(*self.params.burst_width_fraction) + * self.params.length + ), + ) + burst_width = min(burst_width, usable_length) + burst_start = np.random.randint( + margin, self.params.length - burst_width - margin + 1 + ) + positions = self._distribute_positions_burst_mode( + spike_count, burst_start, burst_start + burst_width + ) + else: + positions = self._distribute_positions_spread_mode( + spike_count, margin, self.params.length - margin + ) + + return self._enforce_separation(positions, min_separation, margin) + + def _distribute_positions_spread_mode( + self, count: int, start: int, end: int + ) -> List[int]: + """ + Distribute positions with perfectly consistent spacing between spikes + while using smaller edge margins. + + Strategy: + - Let S be the inter-spike spacing and M be the edge margin. + - We enforce M = r * S, where r = edge_margin_ratio in params. + - Total usable span T = end - start must satisfy: T = (count - 1) * S + 2 * M + => S = T / (count - 1 + 2r) + - Positions: p_i = start + M + i * S for i in [0, count-1] + + If the spacing S would violate the minimum separation implied by spike + geometry, we fall back to a safe placement. + """ + if count <= 1: + return [(start + end) // 2] + + # Minimum separation required so spikes cannot overlap once injected + min_separation = self.params.max_spike_width + self.params.min_spike_margin + + total_space = end - start + ratio = max(0.0, float(getattr(self.params, "edge_margin_ratio", 0.0))) + + # Compute ideal spacing S given desired margin ratio + denominator = (count - 1) + 2.0 * ratio + if denominator <= 0: + return self._distribute_positions_fallback( + count, start, end, min_separation + ) + + base_spacing = total_space / denominator + + # Ensure spacing respects the minimum separation + if base_spacing < min_separation: + return self._distribute_positions_fallback( + count, start, end, min_separation + ) + + edge_margin = ratio * base_spacing + + positions = [ + int(round(start + edge_margin + i * base_spacing)) for i in range(count) + ] + + # Clamp within bounds and ensure strictly increasing order + positions = sorted(max(start, min(end, p)) for p in positions) + + return positions + + def _distribute_positions_burst_mode( + self, count: int, start: int, end: int + ) -> List[int]: + """Original burst mode distribution logic.""" + if count <= 1: + return [(start + end) // 2] + + # For burst mode, use the original logic with some randomness + min_separation = self.params.max_spike_width + self.params.min_spike_margin + available_space = end - start + + if available_space < (count - 1) * min_separation: + return self._distribute_positions_fallback( + count, start, end, min_separation + ) + + # Distribute positions with some randomness for burst mode + positions = [] + if count == 1: + positions.append((start + end) // 2) + else: + interval = (end - start) / (count - 1) + for i in range(count): + base_pos = start + i * interval + # Add some jitter for burst mode + jitter_range = min(interval * 0.2, min_separation * 0.3) + jitter = np.random.uniform(-jitter_range, jitter_range) + pos = int(base_pos + jitter) + pos = max(start, min(end, pos)) + positions.append(pos) + + return positions + + def _distribute_positions_fallback( + self, count: int, start: int, end: int, min_separation: int + ) -> List[int]: + """Fallback method when there's not enough space for optimal distribution.""" + positions = [] + current_pos = start + + for i in range(count): + if current_pos <= end: + positions.append(current_pos) + current_pos += min_separation + else: + break + + return positions + + def _enforce_separation( + self, positions: List[int], min_separation: int, margin: int + ) -> List[int]: + """Ensure minimum separation between spike positions.""" + if len(positions) <= 1: + return positions + + positions = sorted(positions) + adjusted = [max(margin, positions[0])] + + for pos in positions[1:]: + next_pos = max(pos, adjusted[-1] + min_separation) + if next_pos <= self.params.length - margin: + adjusted.append(next_pos) + else: + # If we can't fit this spike, stop adding more + break + + return adjusted + + def _generate_single_spike( + self, + amplitude: float, + angle_deg: float, + spike_shapes: List[SpikeShape], + spikes_above_baseline: bool, + ) -> np.ndarray: + """Generate a single spike with specified shape and angle.""" + shape = np.random.choice(spike_shapes) + slope = np.tan(np.radians(angle_deg)) + rise_time = np.clip( + int(np.round(amplitude / slope)), + self.params.min_spike_width // 2, + self.params.max_spike_width // 2, + ) + fall_time = rise_time + plateau_duration = ( + np.random.randint(*self.params.plateau_duration) + if shape in (SpikeShape.CHOPPED_V, SpikeShape.CHOPPED_INVERTED_V) + else 0 + ) + final_amplitude = amplitude if spikes_above_baseline else -amplitude + + spike_length = rise_time + plateau_duration + fall_time + spike = np.zeros(spike_length) + + # Rise phase + spike[:rise_time] = np.linspace(0, final_amplitude, rise_time, endpoint=False) + + # Plateau phase + if plateau_duration: + spike[rise_time : rise_time + plateau_duration] = final_amplitude + + # Fall phase + spike[rise_time + plateau_duration :] = np.linspace( + final_amplitude, 0, fall_time, endpoint=False + ) + + return spike + + def _inject_spike( + self, signal: np.ndarray, spike: np.ndarray, position: int + ) -> None: + """Inject a spike into the signal at the given position.""" + half_length = len(spike) // 2 + start = max(0, position - half_length) + end = min(len(signal), position + half_length + len(spike) % 2) + spike_start = max(0, half_length - position) + spike_end = spike_start + (end - start) + + if end > start and spike_end > spike_start: + signal[start:end] += spike[spike_start:spike_end] diff --git a/src/synthetic_generation/spikes/spikes_generator_wrapper.py b/src/synthetic_generation/spikes/spikes_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..379fe8f7b6b8dc0d781ace5ab03c553eff6855b5 --- /dev/null +++ b/src/synthetic_generation/spikes/spikes_generator_wrapper.py @@ -0,0 +1,64 @@ +from typing import Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.generator_params import SpikesGeneratorParams +from src.synthetic_generation.spikes.spikes_generator import SpikesGenerator + + +class SpikesGeneratorWrapper(GeneratorWrapper): + """ + Wrapper for SpikesGenerator that handles batch generation and formatting. + """ + + def __init__(self, params: SpikesGeneratorParams): + """ + Initialize the SpikesGeneratorWrapper. + + Parameters + ---------- + params : SpikesGeneratorParams + Parameters for the spikes generator. + """ + super().__init__(params) + self.generator = SpikesGenerator(params) + + def generate_batch( + self, batch_size: int, seed: Optional[int] = None + ) -> TimeSeriesContainer: + """ + Generate a batch of spikes time series. + + Parameters + ---------- + batch_size : int + Number of time series to generate. + seed : int, optional + Random seed for reproducibility. + + Returns + ------- + TimeSeriesContainer + TimeSeriesContainer containing the generated time series. + """ + if seed is not None: + self._set_random_seeds(seed) + + # Sample parameters for the batch + sampled_params = self._sample_parameters(batch_size) + + # Generate time series + values = [] + for i in range(batch_size): + # Use a different seed for each series in the batch + series_seed = (seed + i) if seed is not None else None + series = self.generator.generate_time_series(series_seed) + values.append(series) + + return TimeSeriesContainer( + values=np.array(values), + start=sampled_params["start"], + frequency=sampled_params["frequency"], + ) diff --git a/src/synthetic_generation/steps/step_generator.py b/src/synthetic_generation/steps/step_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..96131ef93e5ed7d17d04d841930759b6e7b3acb1 --- /dev/null +++ b/src/synthetic_generation/steps/step_generator.py @@ -0,0 +1,473 @@ +from typing import List, Optional, Tuple + +import numpy as np +from scipy.ndimage import gaussian_filter1d + +from src.synthetic_generation.abstract_classes import AbstractTimeSeriesGenerator +from src.synthetic_generation.generator_params import ( + StepGeneratorParams, + StepPatternType, + SubseriesConfig, +) + + +class StepGenerator(AbstractTimeSeriesGenerator): + """ + Generator for step function time series. + Creates realistic step functions with optional seasonality, trend, and noise. + """ + + def __init__(self, params: StepGeneratorParams): + """ + Initialize the StepGenerator. + + Parameters + ---------- + params : StepGeneratorParams + Parameters controlling the step function generation. + """ + self.params = params + self.rng = np.random.default_rng(params.global_seed) + + def _select_subseries_configs(self) -> List[Tuple[SubseriesConfig, int]]: + """ + Select which subseries patterns to use and their lengths. + + Returns + ------- + List[Tuple[SubseriesConfig, int]] + List of (config, length) tuples for each subseries. + """ + # Determine number of subseries + num_subseries = self.rng.integers( + self.params.min_subseries, self.params.max_subseries + 1 + ) + + # Calculate weights for pattern selection + configs = self.params.subseries_configs + weights = np.array([config.weight for config in configs]) + weights = weights / weights.sum() + + # Select patterns + selected_configs = [] + remaining_length = self.params.length + + for i in range(num_subseries): + # Select pattern + config_idx = self.rng.choice(len(configs), p=weights) + config = configs[config_idx] + + # Determine length for this subseries + if i == num_subseries - 1: + # Last subseries gets remaining length + length = remaining_length + else: + # Sample length from range, but ensure we don't exceed remaining + min_length = min( + config.length_range[0], remaining_length // (num_subseries - i) + ) + max_length = min( + config.length_range[1], + remaining_length - (num_subseries - i - 1) * 50, + ) + max_length = max(min_length, max_length) + + length = self.rng.integers(min_length, max_length + 1) + remaining_length -= length + + selected_configs.append((config, length)) + + return selected_configs + + def _generate_changepoints_for_pattern( + self, config: SubseriesConfig, length: int + ) -> np.ndarray: + """ + Generate changepoints for a specific pattern type. + + Parameters + ---------- + config : SubseriesConfig + Configuration for this subseries + length : int + Length of the subseries + + Returns + ------- + np.ndarray + Array of changepoint positions + """ + num_changepoints = self.rng.integers( + config.num_changepoints_range[0], config.num_changepoints_range[1] + 1 + ) + + if num_changepoints == 0: + return np.array([]) + + # Ensure minimum spacing between changepoints + min_spacing = max(1, length // (num_changepoints * 2)) + + if config.pattern_type == StepPatternType.STABLE: + # Few changepoints, mostly at the beginning or end + if num_changepoints > 0: + changepoints = self.rng.choice( + np.arange(length // 4, 3 * length // 4), + size=min(num_changepoints, length // 2), + replace=False, + ) + else: + changepoints = np.array([]) + + elif config.pattern_type in [ + StepPatternType.GRADUAL_INCREASE, + StepPatternType.GRADUAL_DECREASE, + ]: + # More evenly distributed + changepoints = np.linspace( + length // 10, 9 * length // 10, num_changepoints + ).astype(int) + # Add some randomness + noise = self.rng.integers( + -min_spacing, min_spacing + 1, size=num_changepoints + ) + changepoints = np.clip(changepoints + noise, 0, length - 1) + + elif config.pattern_type in [ + StepPatternType.SPIKE_UP, + StepPatternType.SPIKE_DOWN, + ]: + # Concentrated in the first third, then spread out + first_third = length // 3 + num_first_third = max(1, num_changepoints // 2) + num_rest = num_changepoints - num_first_third + + if num_first_third > 0: + changepoints_first = np.linspace( + length // 20, first_third, num_first_third + ).astype(int) + else: + changepoints_first = np.array([]) + + if num_rest > 0: + changepoints_rest = np.linspace( + first_third + 1, 9 * length // 10, num_rest + ).astype(int) + else: + changepoints_rest = np.array([]) + + changepoints = np.concatenate([changepoints_first, changepoints_rest]) + + elif config.pattern_type == StepPatternType.OSCILLATING: + # Regular spacing + changepoints = np.linspace( + length // 10, 9 * length // 10, num_changepoints + ).astype(int) + + else: # RANDOM_WALK + # Random distribution + changepoints = self.rng.choice( + np.arange(length // 10, 9 * length // 10), + size=min(num_changepoints, length // 2), + replace=False, + ) + + return np.sort(changepoints) + + def _generate_step_sizes_for_pattern( + self, config: SubseriesConfig, num_changepoints: int + ) -> np.ndarray: + """ + Generate step sizes for a specific pattern type. + + Parameters + ---------- + config : SubseriesConfig + Configuration for this subseries + num_changepoints : int + Number of changepoints + + Returns + ------- + np.ndarray + Array of step sizes + """ + if num_changepoints == 0: + return np.array([]) + + # Generate base step sizes + step_sizes = self.rng.uniform( + config.step_size_range[0], config.step_size_range[1], num_changepoints + ) + + if config.pattern_type == StepPatternType.STABLE: + # Very small steps + return step_sizes * 0.1 + + elif config.pattern_type == StepPatternType.GRADUAL_INCREASE: + # All positive steps with optional decay + step_sizes = np.abs(step_sizes) + if config.step_size_decay != 1.0: + decay_factors = np.power( + config.step_size_decay, np.arange(num_changepoints) + ) + step_sizes = step_sizes * decay_factors + return step_sizes + + elif config.pattern_type == StepPatternType.GRADUAL_DECREASE: + # All negative steps with optional decay + step_sizes = -np.abs(step_sizes) + if config.step_size_decay != 1.0: + decay_factors = np.power( + config.step_size_decay, np.arange(num_changepoints) + ) + step_sizes = step_sizes * decay_factors + return step_sizes + + elif config.pattern_type == StepPatternType.SPIKE_UP: + # Large positive steps early, then smaller negative steps + step_sizes = np.abs(step_sizes) + mid_point = num_changepoints // 2 + step_sizes[mid_point:] = -step_sizes[mid_point:] * 0.5 + + # Apply decay + if config.step_size_decay != 1.0: + decay_factors = np.power( + config.step_size_decay, np.arange(num_changepoints) + ) + step_sizes = step_sizes * decay_factors + return step_sizes + + elif config.pattern_type == StepPatternType.SPIKE_DOWN: + # Large negative steps early, then smaller positive steps + step_sizes = -np.abs(step_sizes) + mid_point = num_changepoints // 2 + step_sizes[mid_point:] = -step_sizes[mid_point:] * 0.5 + + # Apply decay + if config.step_size_decay != 1.0: + decay_factors = np.power( + config.step_size_decay, np.arange(num_changepoints) + ) + step_sizes = step_sizes * decay_factors + return step_sizes + + elif config.pattern_type == StepPatternType.OSCILLATING: + # Alternating positive and negative steps + step_sizes = np.abs(step_sizes) + step_sizes[1::2] *= -1 # Make every other step negative + return step_sizes + + else: # RANDOM_WALK + return step_sizes + + def _generate_subseries( + self, config: SubseriesConfig, length: int, start_level: float + ) -> np.ndarray: + """ + Generate a single subseries with the specified pattern. + + Parameters + ---------- + config : SubseriesConfig + Configuration for this subseries + length : int + Length of the subseries + start_level : float + Starting level for this subseries + + Returns + ------- + np.ndarray + Generated subseries + """ + # Generate changepoints and step sizes + changepoints = self._generate_changepoints_for_pattern(config, length) + step_sizes = self._generate_step_sizes_for_pattern(config, len(changepoints)) + + # Initialize subseries with start level + subseries = np.full(length, start_level) + + # Apply steps + current_level = start_level + for changepoint, step_size in zip(changepoints, step_sizes): + current_level += step_size + subseries[changepoint:] = current_level + + # Apply level drift if specified + if config.level_drift_range[0] != 0 or config.level_drift_range[1] != 0: + drift = self.rng.uniform( + config.level_drift_range[0], config.level_drift_range[1] + ) + drift_array = np.linspace(0, drift, length) + subseries += drift_array + + return subseries + + def _create_combined_step_function(self) -> np.ndarray: + """ + Create a combined step function from multiple subseries. + + Returns + ------- + np.ndarray + Combined step function + """ + # Select subseries configurations + subseries_configs = self._select_subseries_configs() + + # Generate base level + base_level = self.rng.uniform( + self.params.base_level_range[0], self.params.base_level_range[1] + ) + + # Generate subseries + combined_series = [] + current_level = base_level + + for config, length in subseries_configs: + # Generate subseries + subseries = self._generate_subseries(config, length, current_level) + + # Ensure level continuity if required + if ( + self.params.maintain_level_continuity + and len(combined_series) > 0 + and len(subseries) > 0 + ): + level_diff = subseries[0] - current_level + if abs(level_diff) > self.params.max_level_jump_between_subseries: + # Adjust subseries to maintain continuity + adjustment = ( + level_diff + - np.sign(level_diff) + * self.params.max_level_jump_between_subseries + ) + subseries -= adjustment + + combined_series.append(subseries) + current_level = subseries[-1] + + # Concatenate all subseries + combined_series = np.concatenate(combined_series) + + # Apply transition smoothing if enabled + if self.params.enable_smooth_transitions and len(subseries_configs) > 1: + # Find transition points + transition_points = [] + cumulative_length = 0 + for config, length in subseries_configs[:-1]: # Exclude last + cumulative_length += length + transition_points.append(cumulative_length) + + # Smooth transitions + for transition_point in transition_points: + start_idx = max( + 0, transition_point - self.params.transition_length // 2 + ) + end_idx = min( + len(combined_series), + transition_point + self.params.transition_length // 2, + ) + + if end_idx - start_idx > 2: + # Apply light Gaussian smoothing only to transition regions + combined_series[start_idx:end_idx] = gaussian_filter1d( + combined_series[start_idx:end_idx], + sigma=1.0, # Very light smoothing + ) + + # Ensure exact length + if len(combined_series) > self.params.length: + combined_series = combined_series[: self.params.length] + elif len(combined_series) < self.params.length: + # Pad with the last value + padding = np.full( + self.params.length - len(combined_series), combined_series[-1] + ) + combined_series = np.concatenate([combined_series, padding]) + + return combined_series + + def generate_time_series(self, random_seed: Optional[int] = None) -> np.ndarray: + """ + Generate a single step function time series. + + Parameters + ---------- + random_seed : int, optional + Random seed for reproducibility. + + Returns + ------- + np.ndarray + Generated time series of shape (length,). + """ + if random_seed is not None: + self.rng = np.random.default_rng(random_seed) + + # Create the main step function + step_function = self._create_combined_step_function() + + # Add noise + if self.params.noise_level_range[0] > 0 or self.params.noise_level_range[1] > 0: + noise_level = self.rng.uniform( + self.params.noise_level_range[0], self.params.noise_level_range[1] + ) + noise = self.rng.normal(0, noise_level, size=len(step_function)) + step_function += noise + + # Add seasonality using simple sine waves if enabled + if self.params.add_seasonality: + # Daily seasonality + if self.params.daily_seasonality_amplitude_range[1] > 0: + daily_amplitude = self.rng.uniform( + self.params.daily_seasonality_amplitude_range[0], + self.params.daily_seasonality_amplitude_range[1], + ) + daily_period = 288 # 5-minute intervals in a day + t = np.arange(len(step_function)) + daily_seasonality = daily_amplitude * np.sin( + 2 * np.pi * t / daily_period + ) + step_function += daily_seasonality + + # Weekly seasonality + if self.params.weekly_seasonality_amplitude_range[1] > 0: + weekly_amplitude = self.rng.uniform( + self.params.weekly_seasonality_amplitude_range[0], + self.params.weekly_seasonality_amplitude_range[1], + ) + weekly_period = 288 * 7 # 7 days + t = np.arange(len(step_function)) + weekly_seasonality = weekly_amplitude * np.sin( + 2 * np.pi * t / weekly_period + ) + step_function += weekly_seasonality + + # Add trend if enabled + if self.params.add_trend: + slope = self.rng.uniform( + self.params.trend_slope_range[0], self.params.trend_slope_range[1] + ) + trend = slope * np.arange(len(step_function)) + step_function += trend + + # Scale the signal + scale_factor = self.rng.uniform( + self.params.scale_range[0], self.params.scale_range[1] + ) + step_function *= scale_factor + + # Inject anomalies if enabled + if self.params.inject_anomalies: + anomaly_indicators = ( + self.rng.random(len(step_function)) < self.params.anomaly_probability + ) + anomaly_magnitudes = self.rng.uniform( + self.params.anomaly_magnitude_range[0], + self.params.anomaly_magnitude_range[1], + size=len(step_function), + ) + step_function[anomaly_indicators] += anomaly_magnitudes[anomaly_indicators] + + return step_function diff --git a/src/synthetic_generation/steps/step_generator_wrapper.py b/src/synthetic_generation/steps/step_generator_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..de20136b4bfa622e00d65391ef58c3977ca982d3 --- /dev/null +++ b/src/synthetic_generation/steps/step_generator_wrapper.py @@ -0,0 +1,64 @@ +from typing import Optional + +import numpy as np + +from src.data.containers import TimeSeriesContainer +from src.synthetic_generation.abstract_classes import GeneratorWrapper +from src.synthetic_generation.generator_params import StepGeneratorParams +from src.synthetic_generation.steps.step_generator import StepGenerator + + +class StepGeneratorWrapper(GeneratorWrapper): + """ + Wrapper for StepGenerator that handles batch generation and formatting. + """ + + def __init__(self, params: StepGeneratorParams): + """ + Initialize the StepGeneratorWrapper. + + Parameters + ---------- + params : StepGeneratorParams + Parameters for the step generator. + """ + super().__init__(params) + self.generator = StepGenerator(params) + + def generate_batch( + self, batch_size: int, seed: Optional[int] = None + ) -> TimeSeriesContainer: + """ + Generate a batch of step function time series. + + Parameters + ---------- + batch_size : int + Number of time series to generate. + seed : int, optional + Random seed for reproducibility. + + Returns + ------- + TimeSeriesContainer + TimeSeriesContainer containing the generated time series. + """ + if seed is not None: + self._set_random_seeds(seed) + + # Sample parameters for the batch + sampled_params = self._sample_parameters(batch_size) + + # Generate time series + values = [] + for i in range(batch_size): + # Use a different seed for each series in the batch + series_seed = (seed + i) if seed is not None else None + series = self.generator.generate_time_series(series_seed) + values.append(series) + + return TimeSeriesContainer( + values=np.array(values), + start=sampled_params["start"], + frequency=sampled_params["frequency"], + ) diff --git a/src/synthetic_generation/utils.py b/src/synthetic_generation/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2d96a085aae76ee2cdc3d2b9c5fed424e15cc0dd --- /dev/null +++ b/src/synthetic_generation/utils.py @@ -0,0 +1,100 @@ +from typing import Optional + +import numpy as np +import torch + + +def generate_spikes( + size: int, + spikes_type: str = "choose_randomly", + spike_intervals: Optional[int] = None, + n_spikes: Optional[int] = None, + to_keep_rate: float = 0.4, +): + spikes = np.zeros(size) + if size < 120: + build_up_points = 1 + elif size < 250: + build_up_points = np.random.choice([2, 1], p=[0.3, 0.7]) + else: + build_up_points = np.random.choice([3, 2, 1], p=[0.15, 0.45, 0.4]) + + spike_duration = build_up_points * 2 + + if spikes_type == "choose_randomly": + spikes_type = np.random.choice( + ["regular", "patchy", "random"], p=[0.4, 0.5, 0.1] + ) + + if spikes_type == "patchy" and size < 64: + spikes_type = "regular" + + if spikes_type in ["regular", "patchy"]: + if spike_intervals is None: + upper_bound = np.ceil( + spike_duration / 0.05 + ) ## at least 1 spike every 24 periods (120 if 5 spike duration) #np.ceil(spike_duration * size/(size*0.05)) + lower_bound = np.ceil( + spike_duration / 0.15 + ) ## at most 3 spikes every 24 periods + spike_intervals = np.random.randint(lower_bound, upper_bound) + n_spikes = np.ceil(size / spike_intervals) + spike_intervals = np.arange(spike_intervals, size, spike_intervals) + if spikes_type == "patchy": + patch_size = np.random.randint(2, max(n_spikes * 0.7, 3)) + to_keep = np.random.randint(np.ceil(patch_size * to_keep_rate), patch_size) + else: + n_spikes = ( + n_spikes + if n_spikes is not None + else np.random.randint(4, min(max(size // (spike_duration * 3), 6), 20)) + ) + spike_intervals = np.sort( + np.random.choice( + np.arange(spike_duration, size), size=n_spikes, replace=False + ) + ) + + constant_build_rate = False + if spikes_type in ["regular", "patchy"]: + random_ = np.random.random() + constant_build_rate = True + + patch_count = 0 + spike_intervals -= 1 + for interval in spike_intervals: + interval = np.round(interval).astype(int) + if spikes_type == "patchy": + if patch_count >= patch_size: + patch_count = 0 + if patch_count < to_keep: + patch_count += 1 + else: + patch_count += 1 + continue + if not constant_build_rate: + random_ = np.random.random() + build_up_rate = ( + np.random.uniform(0.5, 2) if random_ < 0.7 else np.random.uniform(2.5, 5) + ) + + spike_start = interval - build_up_points + 1 + for i in range(build_up_points): + if 0 <= spike_start + i < len(spikes): + spikes[spike_start + i] = build_up_rate * (i + 1) + + for i in range(1, build_up_points): + if (interval + i) < len(spikes): + spikes[interval + i] = spikes[interval - i] + + # randomly make it positive or negative + spikes += 1 + spikes = spikes * np.random.choice([1, -1], 1, p=[0.7, 0.3]) + + return torch.Tensor(spikes) + + +def generate_peak_spikes(ts_size, peak_period, spikes_type="regular"): + return generate_spikes( + ts_size, spikes_type=spikes_type, spike_intervals=peak_period + ) diff --git a/src/training/__init__.py b/src/training/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/training/trainer_dist.py b/src/training/trainer_dist.py new file mode 100644 index 0000000000000000000000000000000000000000..3c6856a25e1388368cb418cb0e55bfad0f097c4f --- /dev/null +++ b/src/training/trainer_dist.py @@ -0,0 +1,808 @@ +import argparse +import logging +import os +import warnings +from pathlib import Path +from typing import Dict, List + +import matplotlib.pyplot as plt +import torch +import torch.distributed as dist +import torch.optim as optim +import torchmetrics +import yaml +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.optim.lr_scheduler import CosineAnnealingLR +from torch.utils.data.distributed import DistributedSampler +from tqdm import tqdm + +import wandb +from src.data.containers import BatchTimeSeriesContainer +from src.data.loaders import SyntheticValidationDataset, create_synthetic_dataset +from src.gift_eval.aggregate_results import aggregate_results +from src.gift_eval.constants import ALL_DATASETS +from src.gift_eval.evaluate import evaluate_in_memory +from src.models.model import TimeSeriesModel +from src.optim.lr_scheduler import WarmupStableDecayScheduler, get_scheduler +from src.plotting.plot_multivariate_timeseries import plot_from_container +from src.utils.utils import ( + generate_descriptive_model_name, + seed_everything, +) + +warnings.filterwarnings("ignore", category=FutureWarning) +warnings.filterwarnings("ignore", category=DeprecationWarning) + +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger(__name__) + +# Suppress debug messages from external libraries +logging.getLogger("matplotlib").setLevel(logging.WARNING) +logging.getLogger("matplotlib.font_manager").setLevel(logging.WARNING) +logging.getLogger("PIL").setLevel(logging.WARNING) +logging.getLogger("PIL.PngImagePlugin").setLevel(logging.WARNING) + + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" + + +def setup_distributed(): + """Initializes the distributed process group.""" + dist.init_process_group(backend="nccl") + local_rank = int(os.environ["LOCAL_RANK"]) + torch.cuda.set_device(local_rank) + return local_rank + + +def cleanup_distributed(): + """Cleans up the distributed process group safely.""" + try: + if dist.is_available() and dist.is_initialized(): + try: + dist.barrier() + except Exception: + pass + try: + if torch.cuda.is_available(): + torch.cuda.synchronize() + except Exception: + pass + try: + dist.destroy_process_group() + except Exception as e: + logger.warning(f"Error during destroy_process_group: {e}") + except Exception: + pass + + +def is_main_process(): + return dist.get_rank() == 0 + + +class TrainingPipeline: + def __init__(self, config: Dict): + self.config = config + self.grad_accum_enabled = bool( + self.config.get("gradient_accumulation_enabled", False) + ) + self.accumulation_steps = ( + max(1, int(self.config.get("accumulation_steps", 1))) + if self.grad_accum_enabled + else 1 + ) + + # --- Distributed Setup --- + self.local_rank = setup_distributed() + self.rank = dist.get_rank() + self.world_size = dist.get_world_size() + self.device = torch.device(f"cuda:{self.local_rank}") + + self.initial_epoch = 0 + self.wandb_step_offset = 0 + self._setup() + + if is_main_process(): + logger.info("Loaded config:") + for key, value in self.config.items(): + logger.info(f"{key}: {value}") + + def _setup(self) -> None: + seed_everything(self.config["seed"]) + self.config["model_name"] = generate_descriptive_model_name(self.config) + + # Resolve run output directory + self.run_output_dir = ( + self.config.get("run_output_dir") + or f"{self.config['model_path']}/{self.config['model_name']}" + ) + self.config["resolved_run_output_dir"] = self.run_output_dir + + + if is_main_process() and self.config.get("wandb"): + init_kwargs = { + "name": self.config["model_name"], + "resume": "allow", # Allows resuming a run if the ID exists + } + + # Allow selecting which account/team (entity) to log runs to + # If not provided, W&B will use the default entity for the API key + if self.config.get("wandb_entity"): + init_kwargs["entity"] = self.config.get("wandb_entity") + + # If continuing training, try to load the previous run ID + if self.config.get("continue_training"): + if self.config.get("wandb_run_id"): + init_kwargs["id"] = self.config["wandb_run_id"] + logger.info( + f"Attempting to resume wandb run with ID: {self.config['wandb_run_id']}" + ) + + # Initialize Weights & Biases + wandb.init( + project=self.config.get("wandb_project_name", "TimeSeriesForecasting"), + config=self.config, + **init_kwargs, + ) + + self.num_training_iterations = self.config.get("num_training_iterations") + + self.model = TimeSeriesModel(**self.config["TimeSeriesModel"]).to(self.device) + if is_main_process(): + logger.info("=" * 80) + logger.info( + f"Initializing model with {sum(p.numel() for p in self.model.parameters()) / 1e6:.2f}M parameters" + ) + logger.info("=" * 80) + logger.info(f"Run output directory: {self.run_output_dir}") + + dist.barrier(device_ids=[self.local_rank]) + self._setup_optimizer() + self._load_checkpoint() + + dist.barrier(device_ids=[self.local_rank]) + logger.info( + f"Distributed training setup: rank {self.rank}, world size {self.world_size}, local rank {self.local_rank}, device {self.device}" + ) + self.model = DDP( + self.model, device_ids=[self.local_rank], find_unused_parameters=True + ) + logger.info( + f"Distributed Data Parallel model initialized on rank {self.local_rank} with device {self.device}" + ) + + augmentations_config = self.config.get("data_augmentation", {}) + nan_stats_path = augmentations_config.get("nan_stats_path") + nan_patterns_path = augmentations_config.get("nan_patterns_path") + + chosen_scaler_name = self.config.get("TimeSeriesModel", {}).get("scaler") + + # 1. Create the dataset object with rank-based file sharding for scalability + self.train_dataset = create_synthetic_dataset( + base_data_dir=self.config.get("train_data_path"), + batch_size=self.config.get("batch_size", 128), + num_batches_per_epoch=self.num_training_iterations, + generator_proportions=self.config.get("generator_proportions"), + augmentations=augmentations_config, + augmentation_probabilities=self.config.get("augmentation_probabilities"), + global_seed=self.config["seed"] + int(os.environ["LOCAL_RANK"]), + nan_stats_path=nan_stats_path, + nan_patterns_path=nan_patterns_path, + chosen_scaler_name=chosen_scaler_name, + rank=self.rank, + world_size=self.world_size, + ) + + # 2. Create the DistributedSampler + train_sampler = DistributedSampler( + self.train_dataset, + num_replicas=self.world_size, + rank=self.rank, + shuffle=True, + ) + + # 3. Create the custom collate function + def collate_fn(batch): + # Each item from ComposedDataset is already a complete batch container + return batch[0] + + # 4. Create the final DataLoader + self.train_loader = torch.utils.data.DataLoader( + self.train_dataset, + batch_size=1, # Each dataset item is a full batch + sampler=train_sampler, + num_workers=self.config.get("num_workers", 1), + pin_memory=True, + collate_fn=collate_fn, + ) + print( + f"Distributed DataLoader created with {len(self.train_loader)} batches and num workers={self.config.get('num_workers', 0)}" + ) + + # Validation loader with per-rank file sharding for scalability + val_dataset = SyntheticValidationDataset( + base_data_dir=self.config.get("train_data_path"), + batch_size=self.config.get("validation_batch_size", 64), + num_batches=self.config.get("num_validation_batches", 1), + future_length=512, + generator_proportions=self.config.get("generator_proportions"), + device=self.device, + global_seed=self.config["seed"], + augmentations=augmentations_config, + augmentation_probabilities=self.config.get("augmentation_probabilities"), + chosen_scaler_name=chosen_scaler_name, + nan_stats_path=nan_stats_path, + nan_patterns_path=nan_patterns_path, + rank=self.rank, + world_size=self.world_size, + ) + val_sampler = DistributedSampler(val_dataset, shuffle=False) + + self.val_loader = torch.utils.data.DataLoader( + val_dataset, + batch_size=1, # Each item from val_dataset is already a complete batch + shuffle=False, + sampler=val_sampler, + collate_fn=collate_fn, + num_workers=0, + ) + + self._setup_metrics() + + def _setup_optimizer(self): + """Setup optimizer and learning rate scheduler with enhanced WSD support.""" + optimizer_config = { + "lr": float(self.config["peak_lr"]), + "weight_decay": float(self.config.get("weight_decay", 0.01)), + "betas": ( + float(self.config.get("beta1", 0.9)), + float(self.config.get("beta2", 0.98)), + ), + "eps": float(self.config.get("optimizer_eps", 1e-6)), + } + self.optimizer = optim.AdamW(self.model.parameters(), **optimizer_config) + + # Calculate scheduler parameters + effective_accum_steps = self.accumulation_steps + total_steps = int( + self.num_training_iterations // effective_accum_steps // self.world_size + ) + + scheduler_type = self.config.get("lr_scheduler", "warmup_stable_decay") + + if scheduler_type == "warmup_stable_decay": + # Calculate phase durations + warmup_ratio = float( + self.config.get("warmup_ratio", 0.01) + ) # 1% of training + stable_ratio = float( + self.config.get("stable_ratio", 0.85) + ) # 85% of training + + num_warmup_steps = int(total_steps * warmup_ratio) + num_stable_steps = int(total_steps * stable_ratio) + + # Use the standalone scheduler class for better control + self.scheduler = WarmupStableDecayScheduler( + optimizer=self.optimizer, + num_warmup_steps=num_warmup_steps, + num_stable_steps=num_stable_steps, + total_steps=total_steps, + min_lr_ratio=self.config.get("min_lr_ratio", 0.01), + decay_type=self.config.get("decay_type", "cosine"), + verbose=is_main_process(), + ) + + if is_main_process(): + logger.info("WSD Scheduler configured:") + logger.info(f" Total steps: {total_steps}") + logger.info( + f" Warmup steps: {num_warmup_steps} ({warmup_ratio * 100:.1f}%)" + ) + logger.info( + f" Stable steps: {num_stable_steps} ({stable_ratio * 100:.1f}%)" + ) + logger.info( + f" Decay steps: {total_steps - num_warmup_steps - num_stable_steps}" + ) + logger.info(f" Peak LR: {self.config['peak_lr']}") + logger.info( + f" Min LR: {self.config['peak_lr'] * float(self.config.get('min_lr_ratio', 0.01))}" + ) + + elif scheduler_type == "cosine_with_warmup": + num_warmup_steps = int(total_steps * self.config.get("warmup_ratio", 0.01)) + + self.scheduler = get_scheduler( + scheduler_type="cosine_with_warmup", + optimizer=self.optimizer, + num_warmup_steps=num_warmup_steps, + num_training_steps=total_steps, + scheduler_kwargs={ + "min_lr_ratio": float(self.config.get("min_lr_ratio", 0.01)), + "num_cycles": float(self.config.get("num_cycles", 0.5)), + }, + ) + + elif scheduler_type == "cosine_with_restarts": + num_warmup_steps = int(total_steps * self.config.get("warmup_ratio", 0.01)) + + self.scheduler = get_scheduler( + scheduler_type="cosine_with_restarts", + optimizer=self.optimizer, + num_warmup_steps=num_warmup_steps, + num_training_steps=total_steps, + scheduler_kwargs={ + "min_lr_ratio": float(self.config.get("min_lr_ratio", 0.01)), + "num_cycles": int(self.config.get("num_restart_cycles", 4)), + }, + ) + + elif scheduler_type == "cosine": + self.scheduler = CosineAnnealingLR( + self.optimizer, + T_max=total_steps, + eta_min=float(self.config["peak_lr"]) + * float(self.config.get("min_lr_ratio", 0.01)), + ) + + else: + raise ValueError(f"Unsupported scheduler type: {scheduler_type}") + + if is_main_process(): + logger.info(f"Optimizer configured with {scheduler_type} scheduler") + + def _setup_metrics(self): + self.train_metrics = { + "mape": torchmetrics.MeanAbsolutePercentageError( + dist_sync_on_step=False, compute_on_cpu=False, sync_on_compute=True + ).to(self.device), + "mse": torchmetrics.MeanSquaredError( + dist_sync_on_step=False, compute_on_cpu=False, sync_on_compute=True + ).to(self.device), + "smape": torchmetrics.SymmetricMeanAbsolutePercentageError( + dist_sync_on_step=False, compute_on_cpu=False, sync_on_compute=True + ).to(self.device), + } + self.val_metrics = { + "mape": torchmetrics.MeanAbsolutePercentageError( + dist_sync_on_step=False, compute_on_cpu=False, sync_on_compute=True + ).to(self.device), + "mse": torchmetrics.MeanSquaredError( + dist_sync_on_step=False, compute_on_cpu=False, sync_on_compute=True + ).to(self.device), + "smape": torchmetrics.SymmetricMeanAbsolutePercentageError( + dist_sync_on_step=False, compute_on_cpu=False, sync_on_compute=True + ).to(self.device), + } + + def _load_checkpoint(self): + # Only attempt to load a checkpoint when continuing training and a path is provided + if not self.config.get("continue_training"): + return + + checkpoint_path_value = self.config.get("checkpoint_path") + if not checkpoint_path_value: + if is_main_process(): + logger.info( + "continue_training=True but no checkpoint_path provided; starting from scratch." + ) + return + + checkpoint_path = Path(checkpoint_path_value) + if not checkpoint_path.exists(): + if is_main_process(): + logger.warning( + f"Checkpoint path does not exist at {checkpoint_path}. Starting from scratch." + ) + return + + if is_main_process(): + logger.info(f"Loading checkpoint from: {checkpoint_path}") + + ckpt = torch.load(checkpoint_path, map_location=self.device) + self.model.load_state_dict(ckpt["model_state_dict"]) + + def _save_checkpoint(self, epoch: int): + dist.barrier() + if is_main_process(): + model_dir = self.run_output_dir + os.makedirs(model_dir, exist_ok=True) + + unwrapped_model = self.model.module + checkpoint = { + "epoch": epoch, + "model_state_dict": unwrapped_model.state_dict(), + "optimizer_state_dict": self.optimizer.state_dict(), + "wandb_run_id": self.config.get("wandb_run_id"), + } + + if hasattr(self.scheduler, "state_dict"): + checkpoint["scheduler_state_dict"] = self.scheduler.state_dict() + elif hasattr(self.scheduler, "current_step"): + checkpoint["wsd_scheduler_state"] = self.scheduler.state_dict() + + checkpoint_path = f"{model_dir}/checkpoint.pth" + torch.save(checkpoint, checkpoint_path) + logger.info(f"Checkpoint saved for step {epoch} to {checkpoint_path}") + + config_path = f"{model_dir}/config.yaml" + with open(config_path, "w") as config_file: + yaml.dump(self.config, config_file) + + def _inverse_scale(self, model, output: dict) -> torch.Tensor: + # Use the unwrapped model (module) to access scaler + return model.module.scaler.inverse_scale( + output["result"], output["scale_statistics"] + ) + + def _train_epoch(self, epoch: int) -> float: + self.model.train() + self.train_loader.sampler.set_epoch(epoch) + + train_loss, total_loss_sum, total_samples = 0.0, 0.0, 0.0 + + pbar = tqdm( + self.train_loader, + desc=f"Training (start_step={epoch})", + disable=not is_main_process(), + ) + + self.optimizer.zero_grad() + + for i, batch in enumerate(pbar): + batch_size = batch.history_values.size(0) + batch.to(self.device) + + with torch.autocast(dtype=torch.bfloat16, device_type="cuda"): + output = self.model(batch) + loss = self.model.module.compute_loss(batch.future_values, output) + + if self.accumulation_steps > 1: + loss = loss / self.accumulation_steps + + loss.backward() + + total_loss_sum += loss.item() * batch_size + total_samples += batch_size + + if ((i + 1) % self.accumulation_steps == 0) or ( + (i + 1) == len(self.train_loader) + ): + torch.nn.utils.clip_grad_norm_( + self.model.parameters(), self.config.get("gradient_clip_val", 1.0) + ) + + self.optimizer.step() + + if hasattr(self.scheduler, "step") and callable(self.scheduler.step): + if isinstance(self.scheduler, WarmupStableDecayScheduler): + self.scheduler.step() + else: + self.scheduler.step() + + self.optimizer.zero_grad() + + if (i + 1) % self.config.get("log_interval", 10) == 0: + dist.barrier() + self._validate_epoch(i) + + total_loss_tensor = torch.tensor( + [total_loss_sum, total_samples], device=self.device + ) + dist.all_reduce(total_loss_tensor, op=dist.ReduceOp.SUM) + global_loss_sum, global_samples = total_loss_tensor.tolist() + + train_loss = ( + global_loss_sum / global_samples if global_samples > 0 else 0.0 + ) + if self.accumulation_steps > 1: + train_loss *= self.accumulation_steps + + if is_main_process(): + current_lr = self.optimizer.param_groups[0]["lr"] + step_metrics = { + "train/step_loss": train_loss, + "train/learning_rate": current_lr, + "train/lr_schedule_step": i, + } + + if hasattr(self.scheduler, "get_phase"): + step_metrics["train/lr_phase"] = self.scheduler.get_phase() + step_metrics["train/lr_factor"] = self.scheduler.get_lr_factor( + self.scheduler.current_step - 1 + ) + + if self.config.get("wandb"): + wandb.log(step_metrics, step=i) + + logger.info( + f"Step {i} | Training Loss: {train_loss:.4f} | LR: {current_lr:.2e}" + ) + + total_loss_sum, total_samples = 0.0, 0 + + if (i + 1) % self.config.get("save_every", 10) == 0: + self._save_checkpoint(i) + + return train_loss + + def _validate_epoch(self, epoch: int) -> float: + self.model.eval() + + for metric in self.val_metrics.values(): + metric.reset() + + first_batch_for_plotting = None + + total_loss_sum, total_samples = 0.0, 0 + with torch.no_grad(): + self.val_loader.sampler.set_epoch(epoch) + for batch_idx, batch in enumerate(self.val_loader): + if is_main_process() and batch_idx == 0: + first_batch_for_plotting = batch.to(torch.device("cpu")) + + batch = batch.to(self.device) + batch_size = batch.history_values.size(0) + + with torch.autocast(dtype=torch.bfloat16, device_type="cuda"): + output = self.model.module(batch) # Use unwrapped model + loss = self.model.module.compute_loss(batch.future_values, output) + + inv_scaled_output = self._inverse_scale(self.model, output) + total_loss_sum += loss.item() * batch_size + total_samples += batch_size + + self._update_metrics( + self.val_metrics, + inv_scaled_output, + batch.future_values, + distributed=False, + ) + + total_stats = torch.tensor([total_loss_sum, total_samples], device=self.device) + dist.all_reduce(total_stats, op=dist.ReduceOp.SUM) + global_loss_sum, global_samples = total_stats.tolist() + avg_val_loss = global_loss_sum / global_samples if global_samples > 0 else 0.0 + + val_computed_metrics = { + name: metric.compute() for name, metric in self.val_metrics.items() + } + + if is_main_process(): + log_metrics = {"val/loss": avg_val_loss} + log_metrics.update( + { + f"val/{name}": value.item() + for name, value in val_computed_metrics.items() + } + ) + + if self.config.get("wandb"): + wandb.log(log_metrics, step=epoch + self.wandb_step_offset) + + logger.info( + f"Epoch {epoch} | Validation Loss: {avg_val_loss:.4f} | Validation MAPE: {val_computed_metrics.get('mape', -1).item():.4f}" + ) + + if first_batch_for_plotting is not None: + self._plot_validation_examples( + epoch, first_batch_for_plotting, plot_all=True + ) + + # Ensure all ranks finish validation before returning to training + dist.barrier() + return avg_val_loss + + def _update_metrics( + self, + metrics: Dict, + predictions: torch.Tensor, + targets: torch.Tensor, + distributed: bool = True, + ): + """ + Gathers tensors if in distributed mode and updates the metric objects. + """ + if distributed and dist.is_initialized(): + world_size = dist.get_world_size() + predictions_list = [ + torch.zeros_like(predictions) for _ in range(world_size) + ] + targets_list = [torch.zeros_like(targets) for _ in range(world_size)] + + dist.all_gather(predictions_list, predictions) + dist.all_gather(targets_list, targets) + + predictions_gathered = torch.cat(predictions_list, dim=0) + targets_gathered = torch.cat(targets_list, dim=0) + else: + predictions_gathered = predictions + targets_gathered = targets + + unwrapped_model = self.model.module + + if unwrapped_model.loss_type == "quantile": + try: + median_idx = unwrapped_model.quantiles.index(0.5) + predictions_gathered = predictions_gathered[..., median_idx] + except (ValueError, AttributeError): + if is_main_process(): + logger.warning( + "Median (0.5) quantile not found for metric calculation. Skipping." + ) + return # Exit if we can't get a point forecast + + if predictions_gathered.dim() == 3: + b, p, c = predictions_gathered.shape + predictions_flat = predictions_gathered.permute(0, 2, 1).reshape(b * c, p) + targets_flat = targets_gathered.permute(0, 2, 1).reshape(b * c, p) + + for metric in metrics.values(): + metric.update(predictions_flat, targets_flat) + + def _plot_validation_examples( + self, + epoch: int, + plot_batch: BatchTimeSeriesContainer, + plot_indices: List[int] = [0, 1, 2, 3, 4], + plot_all: bool = False, + ) -> None: + """ + Plots validation examples from a given batch and logs them to WandB. + This method should only be called from the main process. + """ + if (not self.config.get("wandb")) or ( + not self.config.get("wandb_plots", False) + ): + return + + model = self.model.module + + with torch.inference_mode(): + plot_batch.to(self.device) + + with torch.autocast(dtype=torch.bfloat16, device_type="cuda"): + output = model(plot_batch) + + inv_scaled_output = self._inverse_scale(self.model, output) + pred_future = inv_scaled_output.cpu().numpy() + + batch_size = plot_batch.history_values.size(0) + if plot_all: + indices_to_plot = list(range(batch_size)) + else: + indices_to_plot = [i for i in plot_indices if i < batch_size] + + for i in indices_to_plot: + fig = plot_from_container( + batch=plot_batch, + sample_idx=i, + predicted_values=pred_future, + model_quantiles=model.quantiles + if model.loss_type == "quantile" + else None, + title=f"Epoch {epoch} - Val Sample {i}", + output_file=None, + show=False, + ) + + wandb.log( + {f"val_plots/sample_{i}": wandb.Image(fig)}, + step=epoch + self.wandb_step_offset, + ) + plt.close(fig) + + def train(self) -> None: + if is_main_process(): + per_rank_iterations = len(self.train_loader) + optimizer_steps_per_rank = ( + per_rank_iterations + self.accumulation_steps - 1 + ) // self.accumulation_steps + logger.info( + f"Starting training: configured_iterations={self.num_training_iterations}, " + f"world_size={self.world_size}, per_rank_iterations={per_rank_iterations}, " + f"accumulation_steps={self.accumulation_steps}, " + f"optimizer_steps_per_rank={optimizer_steps_per_rank}" + ) + + self._train_epoch(self.initial_epoch) + + dist.barrier() + + if not is_main_process(): + try: + if torch.cuda.is_available(): + try: + torch.cuda.synchronize() + except Exception: + pass + try: + torch.cuda.empty_cache() + except Exception: + pass + except Exception: + pass + cleanup_distributed() + return + + cleanup_distributed() + + gift_eval_config = self.config.get("gift_eval") + if gift_eval_config.get("evaluate_on_gift_eval"): + output_dir = f"{self.run_output_dir}/gift_eval_results" + + evaluate_in_memory( + model=self.model.module if isinstance(self.model, DDP) else self.model, + config=self.config, + datasets=ALL_DATASETS, + terms=["short", "medium", "long"], + dataset_storage_path=gift_eval_config.get("dataset_storage_path"), + batch_size=self.config.get("batch_size"), + max_context_length=gift_eval_config.get("max_context_length"), + output_dir=output_dir, + create_plots=gift_eval_config.get("create_plots"), + max_plots=gift_eval_config.get("max_plots"), + ) + + aggregate_results( + result_root_dir=output_dir, + ) + + if self.config.get("wandb"): + logger.info("TRAINING COMPLETED SUCCESSFULLY!") + wandb.finish() + + try: + if torch.cuda.is_available(): + try: + torch.cuda.synchronize() + except Exception: + pass + try: + torch.cuda.empty_cache() + except Exception: + pass + except Exception: + pass + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-c", "--config", default="./configs/train.yaml", help="Path to config file" + ) + parser.add_argument( + "--run_output_dir", + default=None, + help=( + "Optional output directory to store checkpoints and artifacts. " + "If provided, overrides model_path/model_name for saving." + ), + ) + args = parser.parse_args() + with open(args.config) as config_file: + config = yaml.safe_load(config_file) + + # Allow CLI to override output directory for artifacts/logical run folder + if getattr(args, "run_output_dir", None): + config["run_output_dir"] = args.run_output_dir + + try: + pipeline = TrainingPipeline(config) + pipeline.train() + finally: + # Protect final CUDA ops to avoid raising if device already torn down + try: + if torch.cuda.is_available(): + try: + torch.cuda.synchronize() + except Exception: + pass + try: + torch.cuda.empty_cache() + except Exception: + pass + except Exception: + pass \ No newline at end of file diff --git a/src/utils/__init__.py b/src/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/utils/utils.py b/src/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0281adb66671f135e4098d18fec35be71aa929fd --- /dev/null +++ b/src/utils/utils.py @@ -0,0 +1,34 @@ +import os +import random +from datetime import datetime + +import numpy as np +import torch + +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + + +def seed_everything(seed: int): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + os.environ["PYTHONHASHSEED"] = str(seed) + + +def generate_descriptive_model_name(config): + return ( + f"{config['model_name']}_" + f"BATCH{config['batch_size']}_" + f"ITER{config['num_training_iterations']}_" + f"ACCUM_{config['gradient_accumulation_enabled']}_" + f"ACC_STEPS{config['accumulation_steps']}_" + f"Emb{config['TimeSeriesModel']['embed_size']}_" + f"L{config['TimeSeriesModel']['num_encoder_layers']}_" + f"H{config['TimeSeriesModel']['encoder_config']['num_householder']}_" + f"LR_SCHEDULER_{config['lr_scheduler']}_" + f"PEAK_LR{config['peak_lr']}_" + f"{datetime.now().strftime('_%Y%m%d_%H%M%S')}" + )