From 1629620b25b18b933363373b8625477bfeff3540 Mon Sep 17 00:00:00 2001 From: aoyulong Date: Fri, 6 Dec 2024 09:09:39 +0800 Subject: [PATCH] Squashed 'megatron/' changes from 772faca1f..bd677bfb1 bd677bfb1 Merge branch 'helenn-fix-inference-test-20241204' into 'main' 2b6b8ac25 ADLR/megatron-lm!2425 - Fix test after new inference default added e97d48606 Merge branch 'pikaminski/fp8-export' into 'main' ca1a3df69 ADLR/megatron-lm!2179 - TRT-LLM export for TE FP8-trained checkpoints 2f67f35b4 Merge branch 'ko3n1g/ci/unit-tests-extended' into 'main' d65f7e6ce ADLR/megatron-lm!2424 - ci: Fix notifications daa54eab0 Merge branch 'ko3n1g/ci/fix-skip-tests' into 'main' 1e51980b4 ADLR/megatron-lm!2423 - ci: Adjust model config path 844119f5c Merge branch 'ko3n1g/ci/job-runners-2' into 'main' 21cc9b0f9 ADLR/megatron-lm!2416 - tests: Add barrier for destroy ae832c7be Merge branch 'ko3n1g/ci/job-runners-2' into 'main' 9ceaab63b ADLR/megatron-lm!2415 - ci: Unlock all cluster runners 9f1ef8548 Merge branch 'ko3n1g/ci/unit-tests-on-slurm' into 'main' 522e567ea ADLR/megatron-lm!2410 - ci: Run unit tests on Slurm 22f9a79d8 Merge branch 'jbarker/internvit_dist_ckpt' into 'main' 2ed67b201 ADLR/megatron-lm!2402 - Add dist-ckpt support to InternViT 7d7213d85 Merge branch 'dnarayanan/converter_bugfix' into 'main' 7b43f738f ADLR/megatron-lm!2407 - Bugfix: allow both blend and blend_per_split to be None in get_blend_and_blend_per_split utility function 1115e0626 Merge branch 'drop-optim-async' into 'main' 443a193b3 ADLR/megatron-lm!2325 - Add `separation_hint` to support writing optimizer states to separate file 4ad7a97ab Merge branch 'destroy_pg_if_valid' into 'main' 337c34f44 ADLR/megatron-lm!2308 - Check if Gloo process group is already destroyed before calling destroy_process_group d0dae2a68 Merge branch 'ko3n1g/ci/job-runners' into 'main' cd02b4bb0 ADLR/megatron-lm!2412 - ci: Fix job runners 529404e8a Merge branch 'ko3n1g/ci/add-coreutils' into 'main' d5318c11d ADLR/megatron-lm!2411 - ci: Add coreutils to notify job a79466284 Merge branch 'ko3n1g/ci/cluster-runners' into 'main' 382fa6a80 ADLR/megatron-lm!2395 - ci: Use cluster-specific runners 090e2eed9 Merge branch 'dnarayanan/fix_distributed_test' into 'main' 0d3d3178e ADLR/megatron-lm!2405 - Update distributed tests to only use public facing APIs 99f999a46 Merge branch 'dnarayanan/pp_assertion' into 'main' 915797035 ADLR/megatron-lm!2398 - Check if num_layers is divisible by PP size even when using non-interleaved schedule 64d816a39 Merge branch 'partial_dp_distopt' into 'main' bb84eb93f ADLR/megatron-lm!2170 - MCore Partial DistOpt Feature 38f7a8c07 Merge branch 'trintamaki/sequence-packing' into 'main' 0c4328019 ADLR/megatron-lm!2221 - Multimodal sequence packing support 31a29b87c Merge branch 'torch_norm_alias' into 'main' e842d46d2 ADLR/megatron-lm!2391 - Add TorchLayerNorm alias for backward compatibility 8e9d4dc77 Merge branch 'xiny/fix_router_init' into 'main' 1113758d2 ADLR/megatron-lm!2238 - Fix initialization for gates of router and shared expert 67a50f2f9 Merge branch 'denliu/fix_moe_parallel_states' into 'main' 6bd925538 ADLR/megatron-lm!2393 - Fix compatibility error brought by !1940 for NeMo. f3e1afb8c Merge branch 'ko3n1g/build/caching' into 'main' 39f3bef39 ADLR/megatron-lm!2406 - build: Improve caching 3c2d6f831 Merge branch 'helenn-inference-max-seqlen-config' into 'main' b35cc1c2f ADLR/megatron-lm!2400 - Make inference max sequence length configurable 452d52083 Merge branch 'ko3n1g/build/dependencies' into 'main' 4e627b553 ADLR/megatron-lm!2284 - chore: pip install Mcore's dependencies 48b19420c Merge branch 'ko3n1g/chore/codeowners' into 'main' 42070d269 ADLR/megatron-lm!2394 - chore: Set QAT approval to optional 5b1196bc4 Merge branch 'ko3n1g/ci/restart-pipeline-submission' into 'main' 53654f783 ADLR/megatron-lm!2399 - ci: Restart failed pipeline submission 44a64c0df Merge branch 'dnarayanan/fix_check_param_hashes' into 'main' 2ca57f5d2 ADLR/megatron-lm!2362 - Fix check_param_hashes_across_dp_replicas 29535b9da Merge branch 'lmcafee/loader-mcore-local-partial' into 'main' 0be5646cc ADLR/megatron-lm!1489 - loader_mcore.py local module support. f5afc251e Merge branch 'pmannan/llava_cp_reformat' into 'main' c43671206 ADLR/megatron-lm!2275 - Context Parallelism Support for LLaVA Model 3c17f5c18 Merge branch 'ko3n1g/ci/small-improvements' into 'main' 71d670b32 ADLR/megatron-lm!2389 - ci: Small improvements 8d24655ca Merge branch 'dnarayanan/add_json_data_args' into 'main' 7e9ab5ca2 ADLR/megatron-lm!2373 - Support big blends by passing in filename of JSON file with relevant arguments 081ab4d28 Merge branch 'helenn-remove-interface-test' into 'main' 072cac499 ADLR/megatron-lm!2390 - Remove interface test since we will allow mew default args to TransformerLayer going forward cc207f809 Merge branch 'bugfix_multiple_ctx_managers' into 'main' a1fbf8603 ADLR/megatron-lm!1913 - bugfix for multiple context managers e21ce31db Merge branch 'ko3n1g/ci/fix-notify-image' into 'main' 2f2b1f1b3 ADLR/megatron-lm!2387 - ci: Use `curl-jq` for notify step 47806ab56 Merge branch 'user/brb/minor-fix' into 'main' cc54e4539 ADLR/megatron-lm!2354 - None: Update assertion for invalid layer_type in MambaStack 9a75c72f7 Merge branch 'papakipos/mamba-hybrid-layer-allocation-testing' into 'main' 5a3bd5ada ADLR/megatron-lm!2350 - Add unit tests for mamba-hybrid-layer-allocation cbbfa91c0 Merge branch 'akoumparouli/fix_te_skip_init' into 'main' 9a3e33190 ADLR/megatron-lm!2316 - respect perform_initialization 3a32fbc03 Merge branch 'dnarayanan/training_loop_cleanup' into 'main' c913cd000 ADLR/megatron-lm!2244 - Clean up main MLM training loop c10721e35 Merge branch 'tpoon/pp_llava_evaluation' into 'main' 938e5c8a0 ADLR/megatron-lm!2289 - pp > 1 online evaluation d392f9cac Merge branch 'denliu/moe_parallel_states' into 'main' 7f22e210c ADLR/megatron-lm!1940 - MoE parallel folding: separate MoE parallel states from dense a9d040c35 Merge branch 'jbarker/internvit_bugfix' into 'main' 31a69e1a3 ADLR/megatron-lm!2385 - Make InternViTRMSNorm behave wrt sharded_state_dict 2e355b788 Merge branch 'ko3n1g/tests/disable-checkpoint' into 'main' 103391723 ADLR/megatron-lm!2384 - tests: Fully remove test 54d61d3a2 Merge branch 'ko3n1g/tests/disable-checkpoint' into 'main' 220302e40 ADLR/megatron-lm!2383 - tests: Disable broken ckpts test 5a86aa4b6 Merge branch 'ko3n1g/tests/add-jet-api' into 'main' de7794cd9 ADLR/megatron-lm!2382 - tests: Add `jet-api` ddd920f9d Merge branch 'xiny/fix_multi_tensor_copy' into 'main' 029025c4c ADLR/megatron-lm!2236 - Fix multi tensor copy 69b3e058c Merge branch 'trintamaki/example_fixes' into 'main' 62a032d27 ADLR/megatron-lm!2361 - Multimodal example fixes dbc7a182f Merge branch 'nemo_gpu_export' into 'main' 4821429d2 ADLR/megatron-lm!2327 - Small changes to export 8c6b9a48c Merge branch 'dnarayanan/fix_from_checkpoint_args' into 'main' ba7ea15ab ADLR/megatron-lm!2374 - Fix loading args from checkpoint 779acc0e9 Merge branch 'ko3n1g/ci/swap-runners' into 'main' cef4a419f ADLR/megatron-lm!2380 - ci: Increase interval time e160988bc Merge branch 'zijiey/moe_doc_0.9' into 'main' c230e0d7c ADLR/megatron-lm!2245 - Update MoE Doc bd2cc5526 Merge branch 'xiny/fix_ckpt_te_grouped_linear' into 'main' 2fb82afde ADLR/megatron-lm!2323 - Fix torch native ckpt for TEGroupedLinear 81fee9b00 Merge branch 'ko3n1g/ci/increase-interval-time' into 'main' ee929a578 ADLR/megatron-lm!2372 - ci: Increase interval time cac3ec3f1 Merge branch 'ko3n1g/ci/exempt-non-core-from-legacy' into 'main' 2a34f2a4b ADLR/megatron-lm!2371 - ci: Exempt non-core from legacy tests bbaa03a09 Merge branch 'ko3n1g/ci/jet-fleet-2' into 'main' 69d5c714c ADLR/megatron-lm!2367 - ci: Try small runners 7b79d5b5a Merge branch 'huvu/update_t5_attentionmask_nightly_goldenvalues' into 'main' 886fd129f ADLR/megatron-lm!2364 - update golden values for nightly test b6866aed3 Merge branch 'ko3n1g/ci/jet-fleet' into 'main' a231b87be ADLR/megatron-lm!2365 - ci: JET improvements f214627ac Merge branch 'ko3n1g/chore/add-mypy' into 'main' 4f5aa6d86 ADLR/megatron-lm!2360 - chore: Add mypy optionally 6033e95f2 Merge branch 'yuya/add_attn_bias' into 'main' cd1d30b6a ADLR/megatron-lm!2293 - Add attention bias arg in MCore transformer for TE cuDNN FusedAttention 213824823 Merge branch 'helenn-fix-batch-pipeline-logic' into 'main' 2e975f04d ADLR/megatron-lm!2343 - Clarifications for batch x pipeline parallel logic c4c905704 Merge branch 'torch-rms-norm' into 'main' 693ae8681 ADLR/megatron-lm!2015 - Support RMSNorm when TE and Apex are not installed 68e11fb1a Merge branch 'qknorm' into 'main' 62e2e33fc ADLR/megatron-lm!2347 - QKNorm to work with TENorm 0f389f231 Merge branch 'jbarker/etp_equals_tp' into 'main' 57ed924c0 ADLR/megatron-lm!2260 - Support etp==tp when epp==0 and enforce torch ckpt-format when epp>1 5438d1549 Merge branch 'ko3n1g/ci/retry-download' into 'main' 06c67b476 ADLR/megatron-lm!2357 - ci: Retry download assets 6c88bfce1 Merge branch 'ko3n1g/ci/re-enable-mm-tests' into 'main' 9e9d4f53b ADLR/megatron-lm!2348 - ci: Re-enable llava tests ce507eeaf Merge branch 'trintamaki/nvlm-example-scripts' into 'main' 4131b0734 ADLR/megatron-lm!2306 - NVLM example scripts 63b85200e Merge branch 'jbarker-main-patch-72619' into 'main' 8b7275113 ADLR/megatron-lm!2351 - Add missing arg to save_checkpoint call 2bdc60c4a Merge branch 'add_hierarchical_cp_comm_group' into 'main' 645c329d0 ADLR/megatron-lm!2279 - Add hierarchical cp comm group 216386560 Merge branch 'huvu/update_t5_attentionmasktype' into 'main' c1728c12f ADLR/megatron-lm!2273 - Updating all T5 attention masks (encoder, decoder, encoder-decoder) to be compatible with all 3 TE backends 8e22e5b89 Merge branch 'shanmugamr-main-patch-24278' into 'main' 229e2254c ADLR/megatron-lm!2345 - Update simple_text_generation_controller.py 4c4215fd7 Merge branch 'boxiangw/fsdp2' into 'main' e1993fa6f ADLR/megatron-lm!2150 - Add support for PyTorch FSDP-2 ae9c14119 Merge branch 'lmcafee/distopt-doc-oct24' into 'main' 26b8b649a ADLR/megatron-lm!2240 - Rename optimizer's model_parallel_group -> grad_stats_parallel_group. 00e76eed0 Merge branch 'trintamaki/llava-pp-fixes' into 'main' ff790ad04 ADLR/megatron-lm!2267 - Llava pp > 0 fixes 64cbae55a Merge branch 'dist_common_fix' into 'main' 2e7030e11 ADLR/megatron-lm!2085 - Check common state dict consistancy across ranks and log warning in case of mismatch. 0e29f58d8 Merge branch 'trintamaki/nvlm-tile-tag' into 'main' b94bbb466 ADLR/megatron-lm!2311 - NVLM tile tag support aded519cf Merge branch 'ko3n1g/ci/fix-auto-format-forks' into 'main' 8666fdb8c ADLR/megatron-lm!2337 - ci: Disable auto-format on forks e4e91412f Merge branch 'ko3n1g/ci/fix-weeklies' into 'main' 6b74ef9a2 ADLR/megatron-lm!2336 - ci: Fix weekly functional tests 343bdbcfd Merge branch 'ko3n1g/ci/fix-rules' into 'main' 3c5303708 ADLR/megatron-lm!2335 - ci: Always run formatting 9cc52ac8c Merge branch 'ko3n1g/ci/auto-format' into 'main' 84931f4b5 ADLR/megatron-lm!2333 - ci: Autoformat files 5e4ee1060 Merge branch 'small_changes' into 'main' bb30326f9 ADLR/megatron-lm!2321 - Fixing small stuff for consistancy 47ff44e5b Merge branch 'ko3n1g/ci/restart-nccl-failures' into 'main' 9684d5e6e ADLR/megatron-lm!2334 - ci: Restart on infra issues 134a1d5a9 Merge branch 'diffusion_pp_vpp' into 'main' a387779c4 ADLR/megatron-lm!2202 - all-reduce of conditional embedder grads across pp/vpp ranks for diffusion transformer c887ae53a Merge branch 'ko3n1g/ci/restart-nccl-failures' into 'main' a505e288c ADLR/megatron-lm!2332 - ci: Restart on NCCL failures e504eca43 Merge branch 'ko3n1g/ci/fix-ut-notifications' into 'main' fe43b465d ADLR/megatron-lm!2331 - ci: Add notifications for unit tests 392bc05c7 Merge branch 'ko3n1g/build/fix-modelopt' into 'main' d5b4f6a38 ADLR/megatron-lm!2313 - build: Fix modelopt dependency acf485547 Merge branch 'ko3n1g/tests/optimize' into 'main' 4e7adc2cc ADLR/megatron-lm!2330 - ci: Less buckets for unit tests 5e7b14dc3 Merge branch 'ko3n1g/ci/deprecate-torchrun' into 'main' 66b788ab4 ADLR/megatron-lm!2326 - ci: Deprecate torchrun 1b8fce7e1 Merge branch 'tokenizer_args' into 'main' 5ebcc5a7b ADLR/megatron-lm!2317 - Keep tokenization args in sync between tools/ and training/ 0343d032b Merge branch 'trintamaki/nvlm-task-encoders' into 'main' 71d560063 ADLR/megatron-lm!2315 - NVLM task encoders c2e9fb504 Merge branch 'trintamaki/internvit' into 'main' 95ea6e57e ADLR/megatron-lm!2295 - InternViT support for NVLM 32fc18acc Merge branch 'ko3n1g/ci/make-push-optional' into 'main' c5b5d4144 ADLR/megatron-lm!2324 - ci: Make PyPi push wheel 21cfea4f3 Merge branch 'triton-cache-fix-2' into 'main' 7b6c33851 ADLR/megatron-lm!2320 - Update Triton version limit for cache manager patch bc8c4f356 Merge branch 'hn-fix-mla-kv-cache' into 'main' 369fec6c7 ADLR/megatron-lm!2294 - Fix signature for multi-latent attention KV cache update 19515ac9e Merge branch 'ko3n1g/tests/cp-tests' into 'main' 358fbcfbd ADLR/megatron-lm!2310 - tests: Re-enable CP tests 0594f2088 Merge branch 'ko3n1g/tests/fix-flash-fused-attn' into 'main' 1bb6c9496 ADLR/megatron-lm!2304 - tests: Set flash/fused attn f39c48dba Merge branch 'yueshen/mixtral8x7b_support' into 'main' 0197f6fc7 ADLR/megatron-lm!2200 - Mixtral8x7b modelopt support ad5ce4565 Merge branch 'ksivamani/rm_onnx_export' into 'main' 769b03a86 ADLR/megatron-lm!2296 - Remove `is_onnx_export_mode` import from TE af4b5de57 Merge branch 'ko3n1g/ci/fix-nightly' into 'main' 013d9f99b ADLR/megatron-lm!2303 - tests: Disable modelopt test on dev 3d27a9de6 Merge branch 'ko3n1g/test/flaky-test-bin-reader' into 'main' e81c7bbc9 ADLR/megatron-lm!2302 - tests: Disable flaky test bc3b89045 Merge branch 'ko3n1g/ci/fix-nightly' into 'main' 1b4b86852 ADLR/megatron-lm!2300 - ci: Fix nightly tests 95fde5986 Merge branch 'helenn-transformer-interface-test' into 'main' 0a0baaf41 ADLR/megatron-lm!2297 - Add TestTransformerLayerInterface test 8539eba46 Merge branch 'mblaz/fix-t5-sharding' into 'main' 500b2780e ADLR/megatron-lm!2210 - Add dist-ckpt support to encoder_pipeline_parallel d229a2995 Merge branch 'xiny/fix_rope_config' into 'main' 8a1dc8b45 ADLR/megatron-lm!2298 - [Test] Fix Config for RoPE Fusion 441cb9250 Merge branch 'decouple_send_recv_issue' into 'main' 4295be1a6 ADLR/megatron-lm!2117 - tunable schedule with overlapping 2e2bdf623 Merge branch 'trintamaki/more-evals' into 'main' 9ed847395 ADLR/megatron-lm!2174 - More multimodal evals d54618286 Merge branch 'ko3n1g/ci/retry-wait' into 'main' 7d43d84fd ADLR/megatron-lm!2288 - revert: Try/catch c3eb3be6c Merge branch 'ko3n1g/ci/retry-wait' into 'main' 215d7693f ADLR/megatron-lm!2287 - tests: Fix backoff 92ae1d77f Merge branch 'papakipos/mamba-unit-tests' into 'main' 2e4e0d9ad ADLR/megatron-lm!2233 - Add unit tests for Mamba hybrid model sub-units 66cc8c073 Merge branch 'sasatheesh/eval-harness-completions-offsets-1' into 'main' f8fce3ea2 ADLR/megatron-lm!2212 - openai completions endpoint 8742d095b Merge branch 'xiny/fix_async_grad_allreduce' into 'main' dab850fd7 ADLR/megatron-lm!2247 - Fix async_grad_allreduce deprecation warning a32218d9f Merge branch 'papakipos/mamba-hybrid-override-documentation' into 'main' 9c1e2a017 ADLR/megatron-lm!2280 - improve --hybrid-override-pattern documentation e084ab04c Merge branch 'zijiey/moe_router_2nd' into 'main' ac0474d26 ADLR/megatron-lm!1915 - MoE Refactoring - Switch to mask-based routing for MoE 2e047cff8 Merge branch 'hn-flash-decode' into 'main' 776ed9ab8 ADLR/megatron-lm!1966 - Flash decoding for inference 784060c03 Merge branch 'xiny/fp8_grouped_gemm' into 'main' 56b49a4ca ADLR/megatron-lm!1865 - Enable FP8 for TEGroupedMLP 3e64fdf56 Merge branch 'aanoosheh/distillation-api' into 'main' 1c2c7dc79 ADLR/megatron-lm!1522 - ModelOpt Distillation API 601499f8c Merge branch 'trintamaki/multimodal-tokenizer' into 'main' 179ef49dc ADLR/megatron-lm!1994 - Multimodal tokenizer 213c8a23f Merge branch 'ko3n1g/ci/pushpypi' into 'main' 5f1b5f91b ADLR/megatron-lm!2278 - ci: PyPi push with exp backoff 864be0a65 Merge branch 'zijiey/update_moe_func_tests' into 'main' 806b45a38 ADLR/megatron-lm!2277 - Update MoE functional tests 661d21653 Merge branch 'ko3n1g/ci/better-dependencies' into 'main' ae921183d ADLR/megatron-lm!2274 - ci: Better dependencies 0db2be5da Merge branch 'xiny/fix_warning' into 'main' 2488e20d4 ADLR/megatron-lm!2231 - Fix deprecate/future warnings and typos f1f039224 Merge branch 'ko3n1g/ci/flaky-unit-tests' into 'main' a616d4590 ADLR/megatron-lm!2271 - tests: Verify flaky tests 8ba37c0ad Merge branch 'ko3n1g/tests/flaky-functionals' into 'main' 377a11492 ADLR/megatron-lm!2269 - tests: Flaky functionals f08ab8e5e Merge branch 'ko3n1g/tests/fix-uninstall-test' into 'main' 25940a5e1 ADLR/megatron-lm!2272 - test: Fix uninstall test 38736c2a4 Merge branch 'ko3n1g/ci/faster-unit-tests' into 'main' aa6be133a ADLR/megatron-lm!2270 - ci: Faster unit tests f77632c10 Merge branch 'xiny/fix_rope_with_thd_cp' into 'main' 210162aeb ADLR/megatron-lm!2201 - Make RoPE work with packed sequence and CP and Miscellaneous fixes d357c1883 Merge branch 'ko3n1g/ci/single-test-cases' into 'main' 5b2f5b08e ADLR/megatron-lm!2255 - ci: Improvements around functional triggering 8a27acc76 Merge branch 'remove-no-te-guard' into 'main' d00cc116f ADLR/megatron-lm!2195 - Remove guard blocking distributed optimizer when TE/Apex are not installed 397e9da95 Merge branch 'ko3n1g/ci/fix-defaults' into 'main' 6e05f339f ADLR/megatron-lm!2268 - ci: Fix defaults 653cf809b Merge branch 'ko3n1g/ci/repeat-functionals' into 'main' ef6cba6d0 ADLR/megatron-lm!2266 - ci: Move REPEATS to launcher level d7e82d952 Merge branch 'ko3n1g/ci/fix-notifications-2' into 'main' 8bac43ac3 ADLR/megatron-lm!2265 - ci: Fix notifications 32e427db5 Merge branch 'ko3n1g/ci/fix-stage' into 'main' 2501d5282 ADLR/megatron-lm!2262 - ci: Allow dry-run of publish 345b1022b Merge branch 'ko3n1g/ci/timeouts' into 'main' b5462b127 ADLR/megatron-lm!2257 - ci: Update timeouts 87dfc7c5e Merge branch 'ko3n1g/chore/release-notes-0.9.0' into 'main' e93058c8a ADLR/megatron-lm!2258 - docs: Update changelog 3e167cfcd Merge branch 'ko3n1g/ci/cherry-pick-group-2' into 'main' e942dfeba ADLR/megatron-lm!2256 - ci: Fix Slack message d7e2b68e2 Merge branch 'ko3n1g/chore/release-results-0.9.0' into 'main' 7b5b8b44c ADLR/megatron-lm!2176 - chore: Upload release results 425cdd48d Merge branch 'ko3n1g/ci/enable-ft-for-dev' into 'main' 315909e3b ADLR/megatron-lm!2232 - ci: Re-enable functional tests on pyt(dev) 151a972d7 Merge branch 'ko3n1g/ci/failed-cherrypick-group' into 'main' 4d956176f ADLR/megatron-lm!2249 - ci: @slack-group for failed cherry pick attempt c4cd7f35d Merge branch 'ko3n1g/ci/fix-pipeline-2' into 'main' b0bc3a250 ADLR/megatron-lm!2248 - ci: Disable secrets-check on `main` 227a2b6e6 Merge branch 'fix-uneven' into 'main' b759cd37e ADLR/megatron-lm!2220 - fix uneven pipeline 563d5d172 Merge branch 'lmcafee/converter-tests' into 'main' 68ad185b1 ADLR/megatron-lm!1889 - Checkpoint model converter tests. 775ed0de4 Merge branch 'ko3n1g/ci/fix-pipeline' into 'main' c3b928000 ADLR/megatron-lm!2239 - ci: Don't run secrets on main a95f142d8 Merge branch 'xren/cp_comm_type' into 'main' 91a8a4c8d ADLR/megatron-lm!2215 - Configure per-layer communication type for context parallelism db7d37b54 Merge branch 'qwen25_conversion' into 'main' d28e26ed0 ADLR/megatron-lm!2227 - qwen2.5 conversion 739177ea3 Merge branch 'pmannan/llava_debug' into 'main' 2c950a5df ADLR/megatron-lm!2038 - LLaVA Multimodal SP support db6cb4e4b Merge branch 'jstjohn/improved_missing_key_exception' into 'main' 6adf0bd6b ADLR/megatron-lm!1927 - Improved missing key exception for errors during checkpoint io 02d1762a8 Merge branch 'lit/fix_multi_tensor_scale' into 'main' a9c16c504 ADLR/megatron-lm!1939 - fix an issue when using `multi_tensor_scale` from TE b7814bb41 Merge branch 'duncan/triton-cache-fix' into 'main' 839dff2a4 ADLR/megatron-lm!2075 - Triton cache fix ecf0dbe4a Merge branch 'ko3n1g/ci/test-dependencies' into 'main' cba8bdce1 ADLR/megatron-lm!2234 - ci: Set stronger dependencies 55622ff6a Merge branch 'ko3n1g/ci/refactor-jobs' into 'main' 33d2f452a ADLR/megatron-lm!2223 - ci(refactor): Facelift gitlab-ci 0d89fc4c0 Merge branch 'dnarayanan/fix_import' into 'main' a30d63b7b ADLR/megatron-lm!2226 - Add missing import to megatron/training/initialize.py 6bafe9209 Merge branch 'add_siglip_converter' into 'main' bc4874cb2 ADLR/megatron-lm!2214 - Add siglip converter to multimodal example 4876ee186 Merge branch 'ko3n1g/chore/bump-pyt' into 'main' 831d64d70 ADLR/megatron-lm!2017 - chore: Bump Pytorch container git-subtree-dir: megatron git-subtree-split: bd677bfb13ac2f19deaa927adc6da6f9201d66aa --- .gitlab-ci.yml | 129 +- .gitlab/labeler-config.yml | 6 +- .gitlab/stages/00.pre.yml | 72 +- .gitlab/stages/01.test.yml | 484 + .gitlab/stages/01.tests.yml | 208 - .gitlab/stages/02.functional-tests.yml | 107 +- .gitlab/stages/03.publish.yml | 114 +- CHANGELOG.md | 18 + CODEOWNERS | 8 +- Dockerfile.ci | 63 - Dockerfile.ci.dev | 71 +- Dockerfile.ci.lts | 77 + Dockerfile.linting | 16 +- LICENSE | 13 +- MANIFEST.in | 3 +- README.md | 88 +- docs/source/api-guide/context_parallel.rst | 2 +- .../pretrain_gpt_modelopt.py | 136 + .../export/ptq_and_trtllm_export/README.md | 42 + .../ptq_trtllm_mixtral_8x7b.sh | 84 + .../text_generation_ptq.py | 8 +- examples/multimodal/Dockerfile | 10 +- examples/multimodal/README.md | 11 +- .../combine_lm_vision_checkpoints.sh | 57 + examples/multimodal/combine_mistral_clip.sh | 23 - examples/multimodal/config.py | 65 +- examples/multimodal/conversation.py | 353 - examples/multimodal/dataloader_provider.py | 57 +- examples/multimodal/dataset_helpers.py | 1002 +- examples/multimodal/evaluate_ai2d.py | 46 + examples/multimodal/evaluate_chartqa.py | 2 +- examples/multimodal/evaluate_mathvista.py | 114 + examples/multimodal/evaluate_mmmu.py | 10 +- examples/multimodal/evaluate_ocrbench.py | 129 + examples/multimodal/evaluate_textvqa.py | 2 +- examples/multimodal/evaluate_vqav2.py | 16 +- examples/multimodal/evaluation_datasets.py | 858 + examples/multimodal/image_processing.py | 96 +- examples/multimodal/layer_specs.py | 25 +- examples/multimodal/model.py | 72 +- .../{ => model_converter}/clip_converter.py | 0 .../model_converter/internvit_converter.py | 162 + .../model_converter/siglip_converter.py | 154 + .../model_converter/vision_model_tester.py | 121 + examples/multimodal/multimodal_args.py | 32 + examples/multimodal/nvlm/README.md | 100 + examples/multimodal/nvlm/internvit.py | 273 + examples/multimodal/nvlm/nvlm_prompts.json | 165 + .../nvlm/pp_checkpoint_converter.py | 180 + examples/multimodal/nvlm/pretrain_blend.yaml | 28 + .../nvlm/pretrain_qwen20_72b_internvit_6b.sh | 158 + .../nvlm/pretrain_yi_34b_internvit_6b.sh | 154 + ...text_generation_qwen20_72b_internvit_6b.sh | 141 + ...run_text_generation_yi_34b_internvit_6b.sh | 140 + examples/multimodal/nvlm/sft_34b_internvit.sh | 160 + examples/multimodal/nvlm/sft_blend.yaml | 23 + .../nvlm/sft_qwen20_72b_internvit_6b.sh | 165 + examples/multimodal/pretrain_mistral_clip.sh | 4 +- examples/multimodal/run_text_generation.py | 830 +- examples/multimodal/sft_mistral_clip.sh | 4 +- .../text_generation_mistral_clip.sh | 7 +- examples/multimodal/train.py | 209 +- megatron/core/datasets/masked_dataset.py | 12 +- megatron/core/datasets/t5_dataset.py | 177 +- megatron/core/dist_checkpointing/__init__.py | 1 + .../core/dist_checkpointing/dict_utils.py | 5 +- megatron/core/dist_checkpointing/mapping.py | 13 +- megatron/core/dist_checkpointing/optimizer.py | 33 +- .../core/dist_checkpointing/serialization.py | 21 +- .../state_dict_transformation.py | 23 +- .../dist_checkpointing/strategies/base.py | 4 + .../strategies/filesystem_async.py | 119 +- .../dist_checkpointing/strategies/torch.py | 116 +- .../core/dist_checkpointing/validation.py | 39 +- megatron/core/distributed/README.md | 11 + megatron/core/distributed/__init__.py | 8 +- .../core/distributed/data_parallel_base.py | 96 + .../distributed/distributed_data_parallel.py | 63 +- .../distributed_data_parallel_config.py | 5 + .../core/distributed/finalize_model_grads.py | 132 +- .../core/distributed/param_and_grad_buffer.py | 143 +- .../torch_fully_sharded_data_parallel.py | 115 + .../engine_builder/trtllm_engine_builder.py | 6 + .../default_conversion_dict.py | 46 +- .../model_to_trllm_mapping/falcon_model.py | 26 - .../model_to_trllm_mapping/gemma_model.py | 21 - .../model_to_trllm_mapping/gpt_model.py | 28 - .../model_to_trllm_mapping/gpt_next_model.py | 24 - .../model_to_trllm_mapping/llama_model.py | 22 - .../model_to_trllm_mapping/starcoder_model.py | 30 - megatron/core/export/trtllm/trtllm_helper.py | 193 +- ...tributed_trtllm_model_weights_converter.py | 26 +- ...e_device_trtllm_model_weights_converter.py | 40 +- .../core/extensions/transformer_engine.py | 284 +- .../abstract_model_inference_wrapper.py | 10 +- .../gpt/gpt_inference_wrapper.py | 13 +- .../inference_wrapper_config.py | 12 +- .../t5/t5_inference_wrapper.py | 72 +- .../modelopt_support/gpt/model_specs.py | 30 +- .../simple_text_generation_controller.py | 2 +- megatron/core/jit.py | 5 +- megatron/core/model_parallel_config.py | 71 +- megatron/core/models/T5/t5_model.py | 52 +- megatron/core/models/T5/t5_spec.py | 10 +- megatron/core/models/bert/bert_layer_specs.py | 6 +- megatron/core/models/bert/bert_lm_head.py | 18 +- .../embeddings/language_model_embedding.py | 14 +- .../models/common/embeddings/rope_utils.py | 109 +- .../common/embeddings/rotary_pos_embedding.py | 53 +- .../embeddings/yarn_rotary_pos_embedding.py | 10 + megatron/core/models/gpt/gpt_layer_specs.py | 19 +- megatron/core/models/gpt/gpt_model.py | 27 +- .../core/models/multimodal/llava_model.py | 399 +- megatron/core/models/multimodal/llava_spec.py | 6 +- megatron/core/models/retro/decoder_spec.py | 6 +- megatron/core/models/retro/encoder_spec.py | 6 +- megatron/core/models/vision/clip_vit_model.py | 43 +- .../models/vision/multimodal_projector.py | 11 +- .../core/models/vision/vit_layer_specs.py | 6 +- megatron/core/num_microbatches_calculator.py | 10 + megatron/core/optimizer/__init__.py | 44 +- megatron/core/optimizer/clip_grads.py | 55 +- megatron/core/optimizer/distrib_optimizer.py | 61 +- megatron/core/optimizer/optimizer.py | 86 +- megatron/core/parallel_state.py | 727 +- .../pipeline_parallel/p2p_communication.py | 52 +- megatron/core/pipeline_parallel/schedules.py | 710 +- megatron/core/ssm/mamba_block.py | 3 +- megatron/core/ssm/mamba_layer.py | 43 +- megatron/core/ssm/mamba_mixer.py | 2 +- megatron/core/ssm/triton_cache_manager.py | 85 +- megatron/core/tensor_parallel/__init__.py | 4 - .../core/tensor_parallel/cross_entropy.py | 18 +- megatron/core/tensor_parallel/layers.py | 110 +- megatron/core/tensor_parallel/mappings.py | 263 +- megatron/core/tensor_parallel/random.py | 8 +- megatron/core/tensor_parallel/utils.py | 23 +- megatron/core/timers.py | 55 +- megatron/core/transformer/attention.py | 180 +- .../core/transformer/dot_product_attention.py | 4 + megatron/core/transformer/mlp.py | 24 +- megatron/core/transformer/moe/README.md | 40 +- megatron/core/transformer/moe/experts.py | 138 +- .../moe/legacy_a2a_token_dispatcher.py | 164 +- megatron/core/transformer/moe/moe_layer.py | 18 +- megatron/core/transformer/moe/moe_utils.py | 291 +- megatron/core/transformer/moe/router.py | 58 +- .../core/transformer/moe/shared_experts.py | 26 +- .../core/transformer/moe/token_dispatcher.py | 347 +- .../transformer/multi_latent_attention.py | 7 +- megatron/core/transformer/torch_layer_norm.py | 44 +- megatron/core/transformer/torch_norm.py | 48 + .../core/transformer/transformer_block.py | 21 +- .../core/transformer/transformer_config.py | 87 +- .../core/transformer/transformer_layer.py | 34 +- megatron/core/utils.py | 222 +- megatron/inference/algos/__init__.py | 1 + megatron/inference/algos/distillation.py | 454 + megatron/inference/arguments.py | 16 + megatron/inference/checkpointing.py | 14 +- megatron/inference/docs/distillation.md | 91 + megatron/inference/endpoints/common.py | 18 + megatron/inference/endpoints/completions.py | 186 + megatron/inference/gpt/__init__.py | 3 + megatron/inference/gpt/loss_func.py | 89 + megatron/inference/gpt/model_provider.py | 159 +- megatron/inference/text_generation/api.py | 13 +- .../text_generation/communication.py | 45 +- .../inference/text_generation/forward_step.py | 37 +- .../inference/text_generation/generation.py | 47 +- .../inference/text_generation/tokenization.py | 2 - megatron/inference/text_generation_server.py | 128 +- megatron/legacy/model/transformer.py | 19 +- megatron/training/activations.py | 5 +- megatron/training/arguments.py | 261 +- megatron/training/checkpointing.py | 117 +- megatron/training/global_vars.py | 31 +- megatron/training/initialize.py | 20 +- .../training/tokenizer/gpt2_tokenization.py | 9 +- .../tokenizer/multimodal_tokenizer.py | 274 + megatron/training/tokenizer/tokenizer.py | 28 + megatron/training/training.py | 409 +- megatron/training/utils.py | 129 +- megatron/training/yaml_arguments.py | 9 +- mypy.ini | 11 + pretrain_gpt.py | 35 +- pretrain_mamba.py | 18 +- pretrain_t5.py | 74 +- pretrain_vlm.py | 151 +- pyproject.toml | 3 - requirements/pytorch:24.01/requirements.txt | 15 + requirements/pytorch:24.07/requirements.txt | 14 + setup.py | 32 +- .../jet_recipes/multimodal-llava.yaml | 36 - .../python_test_utils/common.py | 41 +- .../get_test_results_from_tensorboard_logs.py | 10 +- .../jet/generate_jet_trigger_job.py | 113 - .../jet/launch_jet_workload.py | 216 - .../shell_test_utils/_run_training.sh | 32 +- .../shell_test_utils/notify.sh | 192 - .../shell_test_utils/notify_unit_tests.sh | 186 - .../shell_test_utils/run_ci_test.sh | 29 +- .../golden_values_dev.json | 52 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 70 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 1 - .../model_config.yaml | 1 - ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + .../golden_values_lts.json} | 0 .../model_config.yaml | 3 +- .../golden_values_dev.json | 53 + .../golden_values_lts.json} | 0 .../model_config.yaml | 3 +- .../golden_values_dev.json | 53 + .../golden_values_lts.json} | 0 .../model_config.yaml | 3 +- .../golden_values_dev.json | 50 + .../golden_values_lts.json} | 0 .../model_config.yaml | 3 +- .../golden_values_dev.json | 50 + .../golden_values_lts.json} | 0 .../model_config.yaml | 5 +- .../bert_release/golden_values_0.8.0.json | 6590 --- .../bert_release/golden_values_0.9.0.json | 8063 ++++ .../bert/bert_release/model_config.yaml | 44 +- .../common/ckpt_converter/__main__.py | 630 + .../common/ckpt_converter/model_config.yaml | 7 + .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values_0.8.0.json | 19558 +++++++- .../golden_values_0.9.0.json | 32049 ++++++++++++ .../gpt/gpt3_15b_8t_release/model_config.yaml | 21 +- .../gpt3_15b_8t_release_sm/model_config.yaml | 21 +- .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 3 +- .../model_config.yaml | 26 +- .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 53 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 53 + .../model_config.yaml | 3 +- .../model_config.yaml | 3 +- .../model_config.yaml | 3 +- .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 40 +- .../model_config.yaml | 3 +- .../model_config.yaml | 3 +- .../golden_values.json | 53 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 3 +- .../model_config.yaml | 3 +- .../model_config.yaml | 3 +- .../golden_values_dev.json | 50 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 3 +- .../golden_values_dev.json | 50 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- .../golden_values.json | 1 - ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- .../golden_values.json | 1 - .../golden_values_dev.json | 1223 + .../golden_values_lts.json | 1223 + .../golden_values_dev.json | 1220 + ...den_values.json => golden_values_lts.json} | 0 .../golden_values.json | 1 - .../golden_values_dev.json | 1223 + .../golden_values_lts.json | 1223 + ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1223 + .../golden_values.json | 1 - .../golden_values_dev.json | 1223 + .../golden_values_lts.json | 1223 + ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1223 + ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1223 + ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1223 + .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 54 + .../model_config.yaml | 55 + .../model_config.yaml | 1 - .../model_config.yaml | 2 +- .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 2 +- .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 20 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 53 + .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 2 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- .../model_config.yaml | 20 +- .../golden_values_dev.json | 53 + .../golden_values_lts.json | 1 + .../model_config.yaml | 11 +- .../model_config.yaml | 3 +- .../model_config.yaml | 11 +- .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values.json | 1 - .../golden_values.json | 1 - .../golden_values_dev.json | 53 + .../golden_values_lts.json | 37 + .../model_config.yaml | 1 - .../golden_values.json | 1 - .../golden_values.json | 1 - .../golden_values_dev.json | 53 + .../golden_values_lts.json | 37 + .../model_config.yaml | 1 - .../golden_values.json | 1 - .../golden_values_dev.json | 53 + .../golden_values_lts.json | 37 + .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + .../golden_values_lts.json | 53 + .../model_config.yaml | 11 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 3 +- ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 3 +- .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 5 +- .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 1 - .../model_config.yaml | 1 - ...den_values.json => golden_values_dev.json} | 0 .../golden_values_lts.json | 1 + .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 65 + .../golden_values_0.9.0.json | 203 + .../model_config.yaml | 16 +- .../golden_values_0.8.0.json | 15195 +++++- .../golden_values_0.9.0.json | 21878 +++++++++ .../model_config.yaml | 20 +- .../model_config.yaml | 18 +- .../golden_values_0.9.0.json | 275 + .../model_config.yaml | 16 +- .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../golden_values_dev.json | 53 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values.json | 1 - .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values.json | 1 - .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values_dev.json | 83 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../golden_values_dev.json | 83 + ...den_values.json => golden_values_lts.json} | 0 .../model_config.yaml | 1 - .../model_config.yaml | 1 - .../model_config.yaml | 2 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 2 +- .../golden_values_dev.json | 763 + .../golden_values_lts.json | 763 + .../model_config.yaml | 2 +- .../golden_values_dev.json | 83 + .../golden_values_lts.json} | 0 .../model_config.yaml | 2 +- .../model_config.yaml | 2 +- .../golden_values_dev.json | 83 + .../golden_values_lts.json} | 0 .../model_config.yaml | 2 +- .../golden_values.json | 83 - .../golden_values_lts.json | 83 + .../golden_values.json | 83 - .../golden_values_lts.json | 83 + .../t5/t5_release/golden_values_0.9.0.json | 40223 ++++++++++++++++ .../t5/t5_release/model_config.yaml | 33 +- .../retro/pretrain_retro_distributed_test.sh | 169 - .../python_scripts}/common.py | 125 +- .../generate_jet_trigger_job.py | 155 + .../python_scripts}/generate_local_jobs.py | 4 +- .../python_scripts/launch_jet_workload.py | 302 + .../recipes/_build-mcore-dev.yaml} | 4 +- .../test_utils/recipes/_build-mcore-lts.yaml | 11 + .../recipes}/_build-nemo.yaml | 0 .../recipes}/bert.yaml | 30 +- tests/test_utils/recipes/gpt-modelopt.yaml | 37 + .../recipes}/gpt-nemo.yaml | 10 +- .../recipes}/gpt.yaml | 68 +- .../test_utils/recipes/multimodal-llava.yaml | 49 + .../recipes}/t5.yaml | 39 +- tests/test_utils/recipes/unit-tests.yaml | 80 + tests/test_utils/shell_scripts/notify.sh | 215 + tests/unit_tests/conftest.py | 27 +- tests/unit_tests/data/test_bin_reader.py | 1 + tests/unit_tests/data/test_gpt_dataset.py | 1 - tests/unit_tests/data/test_preprocess_data.py | 2 +- .../unit_tests/dist_checkpointing/conftest.py | 5 + .../dist_checkpointing/models/common.py | 6 +- .../dist_checkpointing/models/test_mamba.py | 7 +- .../models/test_moe_experts.py | 193 +- .../models/test_t5_model.py | 131 +- .../unit_tests/dist_checkpointing/test_fp8.py | 1 - .../dist_checkpointing/test_fully_parallel.py | 1 + .../dist_checkpointing/test_local.py | 4 +- .../dist_checkpointing/test_nonpersistent.py | 2 - .../dist_checkpointing/test_optimizer.py | 28 +- .../dist_checkpointing/test_serialization.py | 129 +- tests/unit_tests/dist_checkpointing/utils.py | 3 + ...est_grad_reduce_for_replicated_embedder.py | 47 + .../distributed/test_param_and_grad_buffer.py | 40 +- .../export/trtllm/test_distributed_fp8.py | 271 + .../export/trtllm/test_single_device_fp8.py | 268 + .../test_trtllm_distributed_gpu_converter.py | 21 +- .../export/trtllm/test_trtllm_helper.py | 1 - .../t5/test_t5_inference_wrapper.py | 2 +- .../unit_tests/inference/test_flash_decode.py | 31 + ...oder_decoder_text_generation_controller.py | 2 +- .../test_simple_text_generation_controller.py | 2 +- tests/unit_tests/models/test_bert_model.py | 1 + tests/unit_tests/models/test_llava_model.py | 478 +- tests/unit_tests/models/test_mamba_model.py | 9 + tests/unit_tests/models/test_t5_model.py | 117 + .../pipeline_parallel/test_helpers.py | 124 + tests/unit_tests/ssm/test_mamba_block.py | 80 + .../ssm/test_mamba_hybrid_layer_allocation.py | 76 + tests/unit_tests/ssm/test_mamba_layer.py | 47 + tests/unit_tests/ssm/test_mamba_mixer.py | 50 + .../tensor_parallel/test_initialization.py | 5 +- .../unit_tests/tensor_parallel/test_layers.py | 4 +- .../tensor_parallel/test_mappings.py | 11 +- tests/unit_tests/test_inference.py | 113 + .../unit_tests/test_local_multi_tensor_fns.py | 24 + tests/unit_tests/test_parallel_state.py | 127 +- tests/unit_tests/test_tokenizer.py | 99 +- tests/unit_tests/test_utilities.py | 21 +- tests/unit_tests/transformer/moe/conftest.py | 49 + .../moe/test_a2a_token_dispatcher.py | 18 +- .../transformer/moe/test_aux_loss.py | 3 + .../transformer/moe/test_grouped_mlp.py | 2 + .../transformer/moe/test_moe_layer.py | 50 + .../transformer/moe/test_routers.py | 2 + .../transformer/moe/test_sequential_mlp.py | 9 +- .../transformer/moe/test_token_dispatcher.py | 75 +- .../transformer/moe/test_upcycling.py | 1 - .../transformer/test_retro_attention.py | 1 + tools/autoformat.sh | 4 +- tools/checkpoint/loader_llama_mistral.py | 41 +- tools/checkpoint/loader_mcore.py | 116 +- tools/checkpoint/loader_megatron.py | 8 +- tools/checkpoint/loader_mixtral_hf.py | 3 +- tools/checkpoint/saver_mcore.py | 354 +- tools/checkpoint/saver_megatron.py | 7 +- tools/checkpoint/schema_base.py | 93 + tools/checkpoint/schema_mcore.py | 143 + tools/checkpoint/setter.py | 113 - tools/checkpoint/utils.py | 7 - tools/preprocess_data.py | 20 +- unit-test-job-lts.yaml | 107 + 631 files changed, 173679 insertions(+), 14028 deletions(-) create mode 100644 .gitlab/stages/01.test.yml delete mode 100644 .gitlab/stages/01.tests.yml delete mode 100644 Dockerfile.ci create mode 100644 Dockerfile.ci.lts create mode 100644 examples/export/knowledge_distillation/pretrain_gpt_modelopt.py create mode 100644 examples/export/ptq_and_trtllm_export/ptq_trtllm_mixtral_8x7b.sh create mode 100755 examples/multimodal/combine_lm_vision_checkpoints.sh delete mode 100755 examples/multimodal/combine_mistral_clip.sh delete mode 100644 examples/multimodal/conversation.py create mode 100644 examples/multimodal/evaluate_ai2d.py create mode 100644 examples/multimodal/evaluate_mathvista.py create mode 100644 examples/multimodal/evaluate_ocrbench.py create mode 100644 examples/multimodal/evaluation_datasets.py rename examples/multimodal/{ => model_converter}/clip_converter.py (100%) create mode 100755 examples/multimodal/model_converter/internvit_converter.py create mode 100644 examples/multimodal/model_converter/siglip_converter.py create mode 100644 examples/multimodal/model_converter/vision_model_tester.py create mode 100644 examples/multimodal/nvlm/README.md create mode 100644 examples/multimodal/nvlm/internvit.py create mode 100644 examples/multimodal/nvlm/nvlm_prompts.json create mode 100644 examples/multimodal/nvlm/pp_checkpoint_converter.py create mode 100644 examples/multimodal/nvlm/pretrain_blend.yaml create mode 100644 examples/multimodal/nvlm/pretrain_qwen20_72b_internvit_6b.sh create mode 100644 examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh create mode 100755 examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh create mode 100644 examples/multimodal/nvlm/run_text_generation_yi_34b_internvit_6b.sh create mode 100644 examples/multimodal/nvlm/sft_34b_internvit.sh create mode 100644 examples/multimodal/nvlm/sft_blend.yaml create mode 100644 examples/multimodal/nvlm/sft_qwen20_72b_internvit_6b.sh create mode 100644 megatron/core/distributed/README.md create mode 100644 megatron/core/distributed/data_parallel_base.py create mode 100644 megatron/core/distributed/torch_fully_sharded_data_parallel.py delete mode 100644 megatron/core/export/trtllm/model_to_trllm_mapping/falcon_model.py delete mode 100644 megatron/core/export/trtllm/model_to_trllm_mapping/gemma_model.py delete mode 100644 megatron/core/export/trtllm/model_to_trllm_mapping/gpt_model.py delete mode 100644 megatron/core/export/trtllm/model_to_trllm_mapping/gpt_next_model.py delete mode 100644 megatron/core/export/trtllm/model_to_trllm_mapping/llama_model.py delete mode 100644 megatron/core/export/trtllm/model_to_trllm_mapping/starcoder_model.py create mode 100644 megatron/core/transformer/torch_norm.py create mode 100644 megatron/inference/algos/__init__.py create mode 100644 megatron/inference/algos/distillation.py create mode 100644 megatron/inference/docs/distillation.md create mode 100644 megatron/inference/endpoints/common.py create mode 100644 megatron/inference/endpoints/completions.py create mode 100644 megatron/inference/gpt/loss_func.py create mode 100644 megatron/training/tokenizer/multimodal_tokenizer.py create mode 100644 mypy.ini create mode 100644 requirements/pytorch:24.01/requirements.txt create mode 100644 requirements/pytorch:24.07/requirements.txt delete mode 100644 tests/functional_tests/jet_recipes/multimodal-llava.yaml delete mode 100644 tests/functional_tests/python_test_utils/jet/generate_jet_trigger_job.py delete mode 100644 tests/functional_tests/python_test_utils/jet/launch_jet_workload.py delete mode 100644 tests/functional_tests/shell_test_utils/notify.sh delete mode 100644 tests/functional_tests/shell_test_utils/notify_unit_tests.sh create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_lts.json rename tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values.json => bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2 => bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2}/model_config.yaml (97%) create mode 100644 tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_dev.json rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values.json => bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2 => bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2}/model_config.yaml (97%) create mode 100644 tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values.json => bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1 => bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1}/model_config.yaml (97%) create mode 100644 tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values.json => bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_tp1_pp2 => bert_nightly_dgx_a100_1N8G_tp1_pp2}/model_config.yaml (97%) create mode 100644 tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values.json => bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/bert/{bert_345m_nightly_dgx_a100_1N8G_tp4_pp1 => bert_nightly_dgx_a100_1N8G_tp4_pp1}/model_config.yaml (96%) delete mode 100644 tests/functional_tests/test_cases/bert/bert_release/golden_values_0.8.0.json create mode 100644 tests/functional_tests/test_cases/bert/bert_release/golden_values_0.9.0.json create mode 100644 tests/functional_tests/test_cases/common/ckpt_converter/__main__.py create mode 100644 tests/functional_tests/test_cases/common/ckpt_converter/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.9.0.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/{golden_values.json => golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/gpt/{gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts => gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te}/model_config.yaml (75%) rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/{golden_values.json => golden_values_lts.json} (58%) delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts/golden_values.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/{golden_values.json => golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_lts.json delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts/golden_values.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_lts.json delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/{golden_values.json => golden_values_lts.json} (100%) delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/golden_values_lts.json delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/gpt/{gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G => gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G}/model_config.yaml (75%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/{gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts => gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G}/model_config.yaml (80%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/{gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G => gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G}/model_config.yaml (87%) rename tests/functional_tests/test_cases/gpt/{gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dgx_a100_1N8G => gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G}/model_config.yaml (87%) delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G/golden_values.json delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values.json delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json delete mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/{gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G => gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G}/model_config.yaml (90%) rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json rename tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/{golden_values.json => golden_values_dev.json} (100%) create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/model_config.yaml create mode 100644 tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.9.0.json create mode 100644 tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.9.0.json create mode 100644 tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.9.0.json create mode 100644 tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_dev.json rename tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/{golden_values.json => golden_values_lts.json} (100%) delete mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json delete mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json rename tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/{golden_values.json => golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/t5/{t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch => t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch}/model_config.yaml (98%) create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json rename tests/functional_tests/test_cases/t5/{t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1 => t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1}/model_config.yaml (98%) create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json rename tests/functional_tests/test_cases/t5/{t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel => t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel}/model_config.yaml (98%) create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_dev.json rename tests/functional_tests/test_cases/t5/{t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values.json => t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/t5/{t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1 => t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1}/model_config.yaml (98%) rename tests/functional_tests/test_cases/t5/{t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch => t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch}/model_config.yaml (98%) create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_dev.json rename tests/functional_tests/test_cases/t5/{t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values.json => t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_lts.json} (100%) rename tests/functional_tests/test_cases/t5/{t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1 => t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1}/model_config.yaml (98%) delete mode 100644 tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json delete mode 100644 tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_release/golden_values_0.9.0.json delete mode 100755 tests/functional_tests/test_scripts/retro/pretrain_retro_distributed_test.sh rename tests/{functional_tests/python_test_utils/jet => test_utils/python_scripts}/common.py (53%) create mode 100644 tests/test_utils/python_scripts/generate_jet_trigger_job.py rename tests/{functional_tests/python_test_utils/jet => test_utils/python_scripts}/generate_local_jobs.py (92%) create mode 100644 tests/test_utils/python_scripts/launch_jet_workload.py rename tests/{functional_tests/jet_recipes/_build-mcore.yaml => test_utils/recipes/_build-mcore-dev.yaml} (70%) create mode 100644 tests/test_utils/recipes/_build-mcore-lts.yaml rename tests/{functional_tests/jet_recipes => test_utils/recipes}/_build-nemo.yaml (100%) rename tests/{functional_tests/jet_recipes => test_utils/recipes}/bert.yaml (58%) create mode 100644 tests/test_utils/recipes/gpt-modelopt.yaml rename tests/{functional_tests/jet_recipes => test_utils/recipes}/gpt-nemo.yaml (69%) rename tests/{functional_tests/jet_recipes => test_utils/recipes}/gpt.yaml (80%) create mode 100644 tests/test_utils/recipes/multimodal-llava.yaml rename tests/{functional_tests/jet_recipes => test_utils/recipes}/t5.yaml (55%) create mode 100644 tests/test_utils/recipes/unit-tests.yaml create mode 100644 tests/test_utils/shell_scripts/notify.sh create mode 100644 tests/unit_tests/distributed/test_grad_reduce_for_replicated_embedder.py create mode 100644 tests/unit_tests/export/trtllm/test_distributed_fp8.py create mode 100644 tests/unit_tests/export/trtllm/test_single_device_fp8.py create mode 100644 tests/unit_tests/inference/test_flash_decode.py create mode 100644 tests/unit_tests/pipeline_parallel/test_helpers.py create mode 100644 tests/unit_tests/ssm/test_mamba_block.py create mode 100644 tests/unit_tests/ssm/test_mamba_hybrid_layer_allocation.py create mode 100644 tests/unit_tests/ssm/test_mamba_layer.py create mode 100644 tests/unit_tests/ssm/test_mamba_mixer.py create mode 100644 tests/unit_tests/test_inference.py create mode 100644 tests/unit_tests/transformer/moe/conftest.py create mode 100644 tools/checkpoint/schema_base.py create mode 100644 tools/checkpoint/schema_mcore.py delete mode 100644 tools/checkpoint/setter.py create mode 100644 unit-test-job-lts.yaml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c99b97f69..b24e9dd0b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -10,100 +10,129 @@ workflow: - if: $CI_PIPELINE_SOURCE == "web" - if: $CI_COMMIT_REF_PROTECTED == "true" variables: - FUNCTIONAL_TEST: "no" + FUNCTIONAL_TEST: 'no' - if: $CI_MERGE_REQUEST_LABELS =~ /Run tests/ && $CI_MERGE_REQUEST_TARGET_BRANCH_SHA != "" variables: - UNIT_TEST_REPEAT: 5 - UNIT_TEST_TIMEOUT: 50 - FUNCTIONAL_TEST: "yes" + UNIT_TEST_REPEAT: 1 + UNIT_TEST_TIMEOUT: 15 + FUNCTIONAL_TEST: 'yes' FUNCTIONAL_TEST_SCOPE: mr - FUNCTIONAL_TEST_CLUSTER_A100: "" - FUNCTIONAL_TEST_CLUSTER_H100: "" + FUNCTIONAL_TEST_REPEAT: 5 + FUNCTIONAL_TEST_TIME_LIMIT: 2700 + FUNCTIONAL_TEST_CLUSTER_A100: '' + FUNCTIONAL_TEST_CLUSTER_H100: '' + PUBLISH: 'no' - if: $CI_MERGE_REQUEST_LABELS =~ /Run nightly/ && $CI_MERGE_REQUEST_TARGET_BRANCH_SHA != "" variables: - UNIT_TEST_REPEAT: 5 - UNIT_TEST_TIMEOUT: 50 - FUNCTIONAL_TEST: "yes" + UNIT_TEST_REPEAT: 1 + UNIT_TEST_TIMEOUT: 15 + FUNCTIONAL_TEST: 'yes' FUNCTIONAL_TEST_SCOPE: nightly - FUNCTIONAL_TEST_CLUSTER_A100: "" - FUNCTIONAL_TEST_CLUSTER_H100: "" + FUNCTIONAL_TEST_REPEAT: 5 + FUNCTIONAL_TEST_TIME_LIMIT: 2700 + FUNCTIONAL_TEST_CLUSTER_A100: '' + FUNCTIONAL_TEST_CLUSTER_H100: '' + PUBLISH: 'no' - if: $CI_MERGE_REQUEST_LABELS =~ /Run weekly/ && $CI_MERGE_REQUEST_TARGET_BRANCH_SHA != "" variables: - UNIT_TEST_REPEAT: 5 - UNIT_TEST_TIMEOUT: 50 - FUNCTIONAL_TEST: "yes" + UNIT_TEST_REPEAT: 1 + UNIT_TEST_TIMEOUT: 15 + FUNCTIONAL_TEST: 'yes' FUNCTIONAL_TEST_SCOPE: weekly - FUNCTIONAL_TEST_CLUSTER_A100: "" - FUNCTIONAL_TEST_CLUSTER_H100: "" + FUNCTIONAL_TEST_REPEAT: 1 + FUNCTIONAL_TEST_TIME_LIMIT: 9000 + FUNCTIONAL_TEST_CLUSTER_A100: '' + FUNCTIONAL_TEST_CLUSTER_H100: '' + PUBLISH: 'no' - if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_TARGET_BRANCH_SHA != "" variables: - FUNCTIONAL_TEST: "no" + FUNCTIONAL_TEST: 'no' + PUBLISH: 'no' - when: never auto_cancel: on_new_commit: interruptible + # on_job_failure: all stages: - - test + - test - functional_tests - - convergence_tests - publish default: interruptible: true variables: - FUNCTIONAL_TEST: - value: "yes" + UNIT_TEST: + value: 'yes' options: - - "yes" - - "no" + - 'yes' + - 'no' + description: To run the funtional test suite + UNIT_TEST_REPEAT: + value: '1' + description: 'Number of repetitions' + UNIT_TEST_TIMEOUT: + value: '30' + description: Timeout (minutes) for Unit tests (all repeats) + FUNCTIONAL_TEST: + value: 'yes' + options: + - 'yes' + - 'no' description: To run the funtional test suite FUNCTIONAL_TEST_SCOPE: - value: "mr" + value: 'mr' options: - - "mr" - - "nightly" - - "weekly" - - "pre-release" - - "release" - description: "Testsuite to run (only for FUNCTIONAL_TEST=yes)" + - 'mr' + - 'nightly' + - 'weekly' + - 'pre-release' + - 'release' + description: 'Testsuite to run (only for FUNCTIONAL_TEST=yes)' + FUNCTIONAL_TEST_REPEAT: + value: '5' + description: 'Number of repetitions per test' + FUNCTIONAL_TEST_TIME_LIMIT: + value: '2700' + description: 'Timeout in seconds per test' + FUNCTIONAL_TEST_CASES: + value: 'all' + description: "Comma-separated list of test_cases to run. Use 'all' to run the full suite." FUNCTIONAL_TEST_CLUSTER_A100: - value: "dgxa100_dracooci" + value: 'dgxa100_dracooci' options: - - "dgxa100_dracooci" - - "dgxa100_dracooci-ord" + - 'dgxa100_dracooci' + - 'dgxa100_dracooci-ord' description: 'Cluster for A100 workloads' FUNCTIONAL_TEST_CLUSTER_H100: - value: "dgxh100_eos" + value: 'dgxh100_eos' options: - - "dgxh100_coreweave" - - "dgxh100_eos" + - 'dgxh100_coreweave' + - 'dgxh100_eos' description: 'Cluster for H100 workloads' FUNCTIONAL_TEST_NAME: - description: "Name of functional test run (only for pre-release and release)" - PUBLISH: - value: "no" - options: - - "yes" - - "no" + description: 'Name of functional test run (only for pre-release and release)' + PUBLISH: + value: 'no' + options: + - 'yes' + - 'no' description: Build and publish a wheel to PyPi PUBLISH_SCOPE: - value: "code-freeze" + value: 'code-freeze' options: - - "code-freeze" - - "release" + - 'code-freeze' + - 'release' description: Type of publish (freeze or final release) # CI wide variables - CI_MCORE_IMAGE: ${GITLAB_ENDPOINT}:5005/adlr/megatron-lm/mcore_ci + CI_MCORE_LTS_IMAGE: ${GITLAB_ENDPOINT}:5005/adlr/megatron-lm/mcore_ci_lts CI_MCORE_DEV_IMAGE: ${GITLAB_ENDPOINT}:5005/adlr/megatron-lm/mcore_ci_dev CI_NEMO_IMAGE: ${GITLAB_ENDPOINT}:5005/adlr/megatron-lm/nemo_ci - LINTING_IMAGE: ${GITLAB_ENDPOINT}:5005/adlr/megatron-lm/mcore_linting - UNIT_TEST_TIMEOUT: 15 - UNIT_TEST_REPEAT: 1 + UTILITY_IMAGE: ${GITLAB_ENDPOINT}:5005/adlr/megatron-lm/mcore_utility include: - .gitlab/stages/00.pre.yml - - .gitlab/stages/01.tests.yml + - .gitlab/stages/01.test.yml - .gitlab/stages/02.functional-tests.yml - .gitlab/stages/03.publish.yml diff --git a/.gitlab/labeler-config.yml b/.gitlab/labeler-config.yml index 2577c2b92..3dc4001cd 100644 --- a/.gitlab/labeler-config.yml +++ b/.gitlab/labeler-config.yml @@ -1,7 +1,9 @@ CI: - .gitlab-ci.yml -- Dockerfile.ci -- jet-tests.yml +- Dockerfile.ci.lts +- Dockerfile.ci.dev +- .github/** +- .gitlab/** Datasets: - megatron/core/datasets/** diff --git a/.gitlab/stages/00.pre.yml b/.gitlab/stages/00.pre.yml index a91436be8..65564cf88 100644 --- a/.gitlab/stages/00.pre.yml +++ b/.gitlab/stages/00.pre.yml @@ -1,7 +1,7 @@ include: - template: Security/Secret-Detection.gitlab-ci.yml -.pre_mr_rules: +.pre_rules: rules: - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED != "true" allow_failure: true @@ -10,7 +10,16 @@ include: - when: never stage: .pre -mirror_to_github: +.dind_rules: + image: docker:26.1.4-dind + variables: + DOCKER_HOST: unix:///var/run/docker.sock + before_script: + - docker system prune -a --filter "until=36h" -f || true + - echo "$NGC_API_KEY" | docker login nvcr.io -u '$oauthtoken' --password-stdin + - echo "$CI_REGISTRY_PASSWORD" | docker login $CI_REGISTRY -u $CI_REGISTRY_USER --password-stdin + +pre:mirror_to_github: rules: - if: '$CI_COMMIT_REF_PROTECTED == "true" && $CI_PIPELINE_SOURCE == "push"' - when: never @@ -18,13 +27,13 @@ mirror_to_github: stage: .pre image: python:3.10 variables: - GIT_STRATEGY: "clone" + GIT_STRATEGY: 'clone' script: - git checkout $CI_COMMIT_BRANCH - git remote add github https://ko3n1g:$GH_TOKEN@github.com/NVIDIA/Megatron-LM.git || true - git push -u github $CI_COMMIT_BRANCH -create_ci_branches: +pre:create_ci_branches: rules: - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "push"' - when: never @@ -32,23 +41,22 @@ create_ci_branches: matrix: - branch: ci-unit-test-extended - branch: ci-rebuild-mcore-nemo-image - - branch: ci-mr-a100 - - branch: ci-nightly-a100 - - branch: ci-weekly-a100 - - branch: ci-weekly-h100 + - branch: ci-mr + - branch: ci-nightly + - branch: ci-weekly - branch: ci-pre-release tags: [mcore-docker-node-small] stage: .pre image: python:3.10 variables: - GIT_STRATEGY: "clone" + GIT_STRATEGY: 'clone' script: - git remote set-url origin "https://gitlab-ci-token:${PROJECT_ACCESS_TOKEN_MCORE}@${GITLAB_ENDPOINT}/adlr/megatron-lm.git" - git switch --force-create $branch - git push --force -u origin $branch -label_merge_request: - extends: [.pre_mr_rules] +pre:label_merge_request: + extends: [.pre_rules] image: golang:1.22 tags: - mcore-docker-node-small @@ -67,37 +75,21 @@ label_merge_request: source labels curl --header "PRIVATE-TOKEN: ${PROJECT_ACCESS_TOKEN_MCORE}" --url "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/merge_requests/${CI_MERGE_REQUEST_IID}" --data-urlencode "add_labels=$LABELS" -X PUT -clean_docker_node: - extends: [.pre_mr_rules] - image: docker:26.1.4-dind - tags: - - ${node} - parallel: - matrix: - - node: 8xL40S - - node: mcore-docker-node-small - - node: mcore-docker-node-jet - script: - - export DOCKER_HOST='unix:///var/run/docker.sock' - - docker system prune -a --filter "until=36h" -f || true - -maybe_cherry_pick_commit: +pre:maybe_cherry_pick_commit: rules: - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "push"' - when: never tags: [mcore-docker-node-small] stage: .pre - image: - name: registry.gitlab.com/gitlab-ci-utils/curl-jq - entrypoint: [""] + image: badouralix/curl-jq variables: - GIT_STRATEGY: "clone" - script: + GIT_STRATEGY: 'clone' + script: - set -x - set +e - SHA=$(git rev-list --no-merges -n 1 HEAD) - MESSAGE=$(git log -n 1 --pretty=format:%s $SHA) - - MR_ID=$(echo $MESSAGE | awk -F'!' '{print $2}' | awk '{print $1}' ) + - MR_ID=$(echo $MESSAGE | awk -F'!' '{print $2}' | awk '{print $1}' ) - git remote set-url origin "https://gitlab-ci-token:${PROJECT_ACCESS_TOKEN_MCORE}@${GITLAB_ENDPOINT}/$CI_PROJECT_NAMESPACE/megatron-lm.git" - git config --global user.email "mcore-bot@nvidia.com" - git config --global user.name "Mcore Bot" @@ -115,10 +107,10 @@ maybe_cherry_pick_commit: echo Nothing to cherry pick exit 0 fi - + echo $TARGET_BRANCHES | while read -r RELEASE_BRANCH ; do TARGET_BRANCH_EXISTS_OK=$([[ "$(git ls-remote --heads origin refs/heads/$RELEASE_BRANCH)" != "" ]] && echo true || echo false) - + if [[ "$TARGET_BRANCH_EXISTS_OK" == "false" ]]; then echo Release branch does not yet exist, will not cherry-pick continue @@ -155,7 +147,7 @@ maybe_cherry_pick_commit: "type": "section", "text": { "type": "mrkdwn", - "text": ":alert: Cherrypick bot 🤖: Cherry-pick of <'$URL'|!'$MR_ID'> failed" + "text": ":alert: Cherrypick bot 🤖: Cherry-pick of <'$URL'|!'$MR_ID'> failed\ncc '$SLACK_ADMIN'" } } ] @@ -168,11 +160,10 @@ maybe_cherry_pick_commit: done interruptible: false -check_milestone: - extends: [.pre_mr_rules] - image: ${GITLAB_ENDPOINT}:5005/adlr/megatron-lm/mcore_ci:buildcache - tags: - - mcore-docker-node-small +pre:check_milestone: + extends: [.pre_rules] + image: badouralix/curl-jq + tags: [mcore-docker-node-small] script: - env - | @@ -182,4 +173,3 @@ check_milestone: echo Please assign a Milestone to this MR! exit 1 fi - \ No newline at end of file diff --git a/.gitlab/stages/01.test.yml b/.gitlab/stages/01.test.yml new file mode 100644 index 000000000..47fc43283 --- /dev/null +++ b/.gitlab/stages/01.test.yml @@ -0,0 +1,484 @@ +.test_rules: + rules: + - if: $UNIT_TEST == 'yes' && $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED != "true" + allow_failure: true + when: on_success + - when: on_success + stage: test + +include: + - template: Security/Secret-Detection.gitlab-ci.yml + +test:build_image: + extends: [.test_rules, .dind_rules] + tags: + - arch/amd64 + - origin/jet-fleet + - env/prod + - ${TAG} + services: + - name: docker:24.0.5-dind + variables: + HEALTHCHECK_TCP_PORT: '2376' + timeout: 45m + parallel: + matrix: + - IMAGE: CI_MCORE_LTS_IMAGE + FILE: Dockerfile.ci.lts + BASE_IMAGE: nvcr.io/nvidia/pytorch:24.01-py3 + - IMAGE: CI_MCORE_DEV_IMAGE + FILE: Dockerfile.ci.dev + BASE_IMAGE: nvcr.io/nvidia/pytorch:24.07-py3 + - IMAGE: CI_NEMO_IMAGE + FILE: Dockerfile.ci.lts + BASE_IMAGE: nvcr.io/nvidian/nemo:nightly + - IMAGE: UTILITY_IMAGE + FILE: Dockerfile.linting + BASE_IMAGE: python:3.10 + variables: + DOCKER_HOST: tcp://docker:2376 + DOCKER_TLS_CERTDIR: '/certs' + DOCKER_TLS_VERIFY: 1 + DOCKER_CERT_PATH: '$DOCKER_TLS_CERTDIR/client' + TAG: purpose/builder-large + STAGE: jet + script: + - apk add bash + - | + bash -c ' + set -x + env + eval "IMAGE=\$$IMAGE" + + docker context create tls-environment + docker buildx create --name container --driver=docker-container --use tls-environment + + ADDITIONAL_PARAMS=() + + if [[ "$CI_COMMIT_BRANCH" == "ci-rebuild-mcore-nemo-image" || "$CI_COMMIT_BRANCH" == "main" ]]; then + ADDITIONAL_PARAMS+=("--pull") + ADDITIONAL_PARAMS+=("--cache-to type=registry,ref=${IMAGE}-buildcache:main") + fi + + if [[ "$CI_COMMIT_BRANCH" == "ci-nightly-a100" ]]; then + ADDITIONAL_PARAMS+=("-t ${IMAGE}:nightly") + fi + + echo $(git rev-parse HEAD) + + DOCKER_BUILDKIT=1 docker build \ + --secret id=JET_INDEX_URLS \ + --target $STAGE \ + -f $FILE \ + -t ${IMAGE}:${CI_PIPELINE_ID} \ + --builder=container \ + --build-arg CACHEBUST=$(cat /proc/sys/kernel/random/uuid) \ + --build-arg MCORE_REPO=${CI_REPOSITORY_URL} \ + --build-arg MCORE_REF=$CI_COMMIT_SHA \ + --build-arg MCORE_BACKWARDS_REF="core_r0.9.0" \ + --cache-to type=registry,ref=${IMAGE}-buildcache:${CI_PIPELINE_ID} \ + --cache-to type=registry,ref=${IMAGE}-buildcache:${CI_MERGE_REQUEST_IID:-noop} \ + --cache-from type=registry,ref=${IMAGE}-buildcache:main \ + --cache-from type=registry,ref=${IMAGE}-buildcache:${CI_PIPELINE_ID} \ + --cache-from type=registry,ref=${IMAGE}-buildcache:${CI_MERGE_REQUEST_IID:-noop} \ + --build-arg FROM_IMAGE_NAME=$BASE_IMAGE \ + --push \ + ${ADDITIONAL_PARAMS[@]} . + ' + retry: + max: 2 + +test:unit_tests_configure: + extends: [.test_rules] + needs: + - test:build_image + image: ${UTILITY_IMAGE}:${CI_PIPELINE_ID} + tags: [mcore-docker-node-small] + before_script: + - git rm -r tests/test_utils/local_recipes || true + - git submodule add --force https://gitlab-ci-token:${CI_JOB_TOKEN}@${GITLAB_ENDPOINT}/ADLR/megatron-lm-convergence-tests.git tests/test_utils/local_recipes + - ls tests/test_utils/local_recipes + script: + - set -x + - | + A100_CLUSTER=$([[ "$FUNCTIONAL_TEST_CLUSTER_A100" != "" ]] && echo $FUNCTIONAL_TEST_CLUSTER_A100 || echo $DEFAULT_A100_CLUSTER) + H100_CLUSTER=$([[ "$FUNCTIONAL_TEST_CLUSTER_H100" != "" ]] && echo $FUNCTIONAL_TEST_CLUSTER_H100 || echo $DEFAULT_H100_CLUSTER) + - | + export PYTHONPATH=$(pwd) + python tests/test_utils/python_scripts/generate_jet_trigger_job.py \ + --scope "unit-tests" \ + --environment lts \ + --n-repeat "${UNIT_TEST_REPEAT}" \ + --time-limit "$(( UNIT_TEST_TIMEOUT * 60 ))" \ + --test-cases "all" \ + --a100-cluster "dgxa100_dracooci-ord" \ + --h100-cluster "dgxh100_coreweave" \ + --container-image ${UTILITY_IMAGE} \ + --container-tag ${CI_PIPELINE_ID} \ + --dependent-job "test:unit_tests_configure" \ + --tag "legacy" \ + --output-path "unit-test-job-lts-legacy.yaml" + - | + export PYTHONPATH=$(pwd) + python tests/test_utils/python_scripts/generate_jet_trigger_job.py \ + --scope "unit-tests" \ + --environment lts \ + --n-repeat "${UNIT_TEST_REPEAT}" \ + --time-limit "$(( UNIT_TEST_TIMEOUT * 60 ))" \ + --test-cases "all" \ + --a100-cluster "dgxa100_dracooci-ord" \ + --h100-cluster "dgxh100_coreweave" \ + --container-image ${UTILITY_IMAGE} \ + --container-tag ${CI_PIPELINE_ID} \ + --dependent-job "test:unit_tests_configure" \ + --tag "latest" \ + --output-path "unit-test-job-lts-latest.yaml" + - | + export PYTHONPATH=$(pwd) + python tests/test_utils/python_scripts/generate_jet_trigger_job.py \ + --scope "unit-tests" \ + --environment dev \ + --n-repeat "${UNIT_TEST_REPEAT}" \ + --time-limit "$(( UNIT_TEST_TIMEOUT * 60 ))" \ + --test-cases "all" \ + --a100-cluster "dgxa100_dracooci-ord" \ + --h100-cluster "dgxh100_coreweave" \ + --container-image ${UTILITY_IMAGE} \ + --container-tag ${CI_PIPELINE_ID} \ + --dependent-job "test:unit_tests_configure" \ + --tag "legacy" \ + --output-path "unit-test-job-dev-legacy.yaml" + - | + export PYTHONPATH=$(pwd) + python tests/test_utils/python_scripts/generate_jet_trigger_job.py \ + --scope "unit-tests" \ + --environment dev \ + --n-repeat "${UNIT_TEST_REPEAT}" \ + --time-limit "$(( UNIT_TEST_TIMEOUT * 60 ))" \ + --test-cases "all" \ + --a100-cluster "dgxa100_dracooci-ord" \ + --h100-cluster "dgxh100_coreweave" \ + --container-image ${UTILITY_IMAGE} \ + --container-tag ${CI_PIPELINE_ID} \ + --dependent-job "test:unit_tests_configure" \ + --tag "latest" \ + --output-path "unit-test-job-dev-latest.yaml" + + artifacts: + paths: + - unit-test-job-dev-legacy.yaml + - unit-test-job-dev-latest.yaml + - unit-test-job-lts-legacy.yaml + - unit-test-job-lts-latest.yaml + - tests/test_utils/local_recipes + +.unit_tests_run: + needs: + - test:formatting + - test:copyright + - job: test:secret_detection + optional: true + - test:unit_tests_configure + extends: [.test_rules] + trigger: + include: + - artifact: unit-test-job-$ENVIRONMENT-$TAG.yaml + job: test:unit_tests_configure + strategy: depend + variables: + RO_API_TOKEN: $PAT + CONTAINER_TAG: $CI_PIPELINE_ID + CI_MCORE_LTS_IMAGE: $CI_MCORE_LTS_IMAGE + GITLAB_ENDPOINT: $GITLAB_ENDPOINT + PARENT_PIPELINE_ID: $CI_PIPELINE_ID + inherit: + variables: true + rules: + - if: $UNIT_TEST == 'yes' && $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED != "true" + allow_failure: true + when: on_success + - if: $UNIT_TEST == 'yes' && $UNIT_TEST_REPEAT != '0' + when: on_success + +test:unit_tests_pyt(DEV)_mcore(legacy): + extends: [.unit_tests_run] + variables: + ENVIRONMENT: dev + TAG: legacy + +test:unit_tests_pyt(LTS)_mcore(legacy): + extends: [.unit_tests_run] + variables: + ENVIRONMENT: dev + TAG: legacy + +test:unit_tests_pyt(DEV)_mcore(latest): + extends: [.unit_tests_run] + variables: + ENVIRONMENT: lts + TAG: latest + +test:unit_tests_pyt(LTS)_mcore(latest): + extends: [.unit_tests_run] + variables: + ENVIRONMENT: lts + TAG: latest + +test:notify_unit_tests: + extends: [.test_rules] + image: badouralix/curl-jq + needs: + - test:unit_tests_pyt(DEV)_mcore(latest) + - test:unit_tests_pyt(LTS)_mcore(latest) + tags: + - mcore-docker-node-small + script: + - apk add bash + - apk add --update coreutils + - env + - export WEBHOOK_URL=${MCORE_NOTIFICATION_HOOK} + - export RO_API_TOKEN=${PROJECT_ACCESS_TOKEN_MCORE} + - export GITLAB_ENDPOINT + - export CONTEXT="unit-tests-extended" + - export DATE=$(date +"%Y-%m-%d") + - bash tests/test_utils/shell_scripts/notify.sh ${CI_PIPELINE_ID} "test:unit_tests_pyt" + artifacts: + when: always + paths: + - scripts + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "ci-unit-test-extended" + when: always + - when: never + +test:docs_build: + extends: [.test_rules] + image: ${UTILITY_IMAGE}:${CI_PIPELINE_ID} + tags: [mcore-docker-node-small] + needs: [test:build_image] + script: + - cd .. + - rm -rf documentation && git clone https://gitlab-ci-token:${CI_JOB_TOKEN}@${GITLAB_ENDPOINT}/nemo-megatron-core-tme/documentation.git + - mv megatron-lm/ documentation/ + - cd documentation/ + - ./repo docs + +test:formatting: + extends: [.test_rules] + image: ${UTILITY_IMAGE}:${CI_PIPELINE_ID} + tags: [mcore-docker-node-small] + needs: [test:build_image] + variables: + GIT_STRATEGY: 'clone' + script: + - | + if [[ "$CI_PIPELINE_SOURCE" != "merge_request_event" ]]; then + exit 0 + fi + - set +e + - git fetch origin $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME + - git fetch origin main:main + - git checkout $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME + - | + if [[ "$CI_MERGE_REQUEST_PROJECT_PATH" == "$CI_MERGE_REQUEST_SOURCE_PROJECT_PATH" ]]; then + bash tools/autoformat.sh + set -e + git config --global user.email "mcore-bot@nvidia.com" + git config --global user.name "Mcore Bot" + git remote set-url origin "https://gitlab-ci-token:${PAT}@${GITLAB_ENDPOINT}/$CI_PROJECT_NAMESPACE/megatron-lm.git" + git add -A . + git commit -m "chore: Format files" || true + git push -u origin $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME + fi + - env + - BASE_REF="$CI_MERGE_REQUEST_TARGET_BRANCH_NAME" CHECK_ONLY=true SKIP_DOCS=$([[ "$CI_MERGE_REQUEST_LABELS" == *"Skip docs"* ]] && echo "true" || echo "false") bash tools/autoformat.sh + +test:copyright: + extends: [.test_rules] + tags: [mcore-docker-node-small] + image: ${UTILITY_IMAGE}:${CI_PIPELINE_ID} + needs: [test:build_image] + script: + - git fetch origin main + - bash tools/copyright.sh + +# Override from template +secret_detection: + rules: + - when: never + +# Inherit and modify template +test:secret_detection: + tags: [mcore-docker-node-small] + extends: ['.secret-analyzer'] + variables: + GIT_DEPTH: 0 + SECRET_DETECTION_LOG_OPTIONS: ${CI_MERGE_REQUEST_DIFF_BASE_SHA}..${CI_COMMIT_SHA} + allow_failure: true + rules: + - if: $CI_PIPELINE_SOURCE == "merge_request_event" + - when: never + script: + - apk add jq + - /analyzer run + - | + if [[ $(cat gl-secret-detection-report.json | jq '.vulnerabilities | length > 0') == true ]]; then + echo "Atleast one vulnerability has been found" + cat gl-secret-detection-report.json | jq '.' + exit 1 + fi + +test:pypi_build_wheel: + extends: [.test_rules] + image: + name: quay.io/pypa/manylinux_2_28_x86_64 + entrypoint: [''] + tags: [mcore-docker-node-small] + variables: + PUBLISH_DRYRUN: 'yes' + PY_ENV: pytorch:24.07 + script: + - echo $PUBLISH_DRYRUN + - > + if [ "$PUBLISH_DRYRUN" = "yes" ]; then + PRE_RELEASE=$(sed -n "s/.*PRE_RELEASE = '\(.*\)'/\1/p" megatron/core/package_info.py) + sed -i "/^PRE_RELEASE/c\PRE_RELEASE = '${PRE_RELEASE}.dev$((RANDOM % 900000 + 100000))'" megatron/core/package_info.py + fi + - /opt/python/cp310-cp310/bin/python -m build + - /opt/python/cp311-cp311/bin/python -m build + - auditwheel repair dist/*.whl + artifacts: + paths: + - megatron/core/package_info.py + - wheelhouse/ + +test:pypi_test_wheel: + extends: [.test_rules] + image: nvcr.io/nvidia/pytorch:24.01-py3 + needs: [test:pypi_build_wheel] + tags: [mcore-docker-node-small] + variables: + PUBLISH_DRYRUN: 'yes' + script: + - EXPECTED_RELEASE_NUMBER=$(python -c "from megatron import core; print(core.__version__)") + - rm -rf megatron + - pip install wheelhouse/*cp310*.whl + + - RELEASE_NUMBER=$(python -c "from megatron import core; print(core.__version__)") + - > + echo "$EXPECTED_RELEASE_NUMBER" == "$RELEASE_NUMBER" + - test "$EXPECTED_RELEASE_NUMBER" == "$RELEASE_NUMBER" + - echo "RELEASE_NUMBER=$EXPECTED_RELEASE_NUMBER" | tee -a build.env + artifacts: + reports: + dotenv: build.env + paths: + - wheelhouse/ + +test:pypi_push_wheel: + extends: [.test_rules] + image: python:3.10 + tags: [mcore-docker-node-small] + needs: [test:pypi_test_wheel] + variables: + PUBLISH_DRYRUN: 'yes' + timeout: 3m + script: + - > + if [ "$PUBLISH_DRYRUN" = "yes" ]; then + REPOSITORY=testpypi + export TWINE_USERNAME=$TWINE_TEST_USERNAME + export TWINE_PASSWORT=$TWINE_TEST_PASSWORD + else + REPOSITORY=pypi + export TWINE_USERNAME=$TWINE_PROD_USERNAME + export TWINE_PASSWORT=$TWINE_PROD_PASSWORD + fi + - pip install twine + - > + for i in 1 2 3 4 5; do + twine upload --verbose -u $TWINE_USERNAME -p $TWINE_PASSWORT --repository $REPOSITORY wheelhouse/* && break || sleep $(( 60*2**i )); + done + rules: + - if: $UNIT_TEST == 'yes' && $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED != "true" + allow_failure: true + when: on_success + - when: on_success + allow_failure: true + +test:gh_release: + extends: [.test_rules] + needs: [test:pypi_test_wheel] + tags: [mcore-docker-node-small] + image: badouralix/curl-jq + variables: + PUBLISH_DRYRUN: 'yes' + script: + - NAME="NVIDIA Megatron Core $RELEASE_NUMBER" + - CHANGELOG=$(awk '/^## '"$NAME"'/{flag=1; next} /^## /{flag=0} flag' CHANGELOG.md) + - CHANGELOG=$(echo "$CHANGELOG" | sed '/./!d') + - > + PAYLOAD=$(jq -nc \ + --arg CI_COMMIT_BRANCH "$CI_COMMIT_BRANCH" \ + --arg NAME "$NAME" \ + --arg BODY "$CHANGELOG" \ + '{ + "tag_name": $CI_COMMIT_BRANCH, + "target_commitish": $CI_COMMIT_BRANCH, + "name": $NAME, + "body": $BODY, + "draft": false, + "prerelease": false, + "generate_release_notes": false + }' + ) + - > + CMD=$(echo curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $GH_TOKEN" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/NVIDIA/Megatron-LM/releases \ + -d "$PAYLOAD" + ) + + if [[ "$PUBLISH_DRYRUN" == "yes" ]]; then + echo "$CMD" + else + eval "$CMD" + fi + +test:notify_release: + needs: [test:pypi_test_wheel, test:pypi_push_wheel, test:gh_release] + extends: [.test_rules] + image: badouralix/curl-jq + tags: [mcore-docker-node-small] + variables: + PUBLISH_DRYRUN: 'yes' + script: + - URL="https://github.com/NVIDIA/Megatron-LM/releases/tag/core_r$RELEASE_NUMBER" + - > + MESSAGE='{ + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "Releasebot 🤖: Megatron-Core released <'$URL'|core_r'"$RELEASE_NUMBER"'> 🚀" + } + } + ] + }' + - echo "$MESSAGE" + - > + CMD=$(echo curl \ + -X POST \ + -H "Content-type: application/json" \ + --data "$MESSAGE" ${MCORE_NOTIFICATION_HOOK_MAIN} + ) + + if [[ "$PUBLISH_DRYRUN" == "yes" ]]; then + echo "$CMD" + else + eval "$CMD" + fi diff --git a/.gitlab/stages/01.tests.yml b/.gitlab/stages/01.tests.yml deleted file mode 100644 index ed80e96fe..000000000 --- a/.gitlab/stages/01.tests.yml +++ /dev/null @@ -1,208 +0,0 @@ -.test_mr_rules: - rules: - - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED != "true" - allow_failure: true - when: always - - when: always - stage: test - -include: - - template: Security/Secret-Detection.gitlab-ci.yml - -build_image: - extends: [.test_mr_rules] - tags: - - ${TAG} - image: docker:26.1.4-dind - timeout: 45m - parallel: - matrix: - - IMAGE: CI_MCORE_IMAGE - FILE: Dockerfile.ci - BASE_IMAGE: nvcr.io/nvidia/pytorch:24.01-py3 - TAG: mcore-docker-node-large - - IMAGE: CI_MCORE_DEV_IMAGE - FILE: Dockerfile.ci.dev - BASE_IMAGE: nvcr.io/nvidia/pytorch:24.01-py3 - TAG: mcore-docker-node-large - - IMAGE: CI_NEMO_IMAGE - FILE: Dockerfile.ci - BASE_IMAGE: nvcr.io/nvidian/nemo:nightly - TAG: mcore-docker-node-large - - IMAGE: LINTING_IMAGE - FILE: Dockerfile.linting - BASE_IMAGE: python:3.10 - TAG: mcore-docker-node-small - before_script: - - echo "$NGC_API_KEY" | docker login nvcr.io -u '$oauthtoken' --password-stdin - - echo "$CI_REGISTRY_PASSWORD" | docker login $CI_REGISTRY -u $CI_REGISTRY_USER --password-stdin - variables: - STAGE: main - script: - - apk add bash - - | - bash -c ' - set -x - env - eval "IMAGE=\$$IMAGE" - - docker system prune -a --filter "until=24h" -f || true - - docker buildx create --name container --driver=docker-container - - ADDITIONAL_PARAMS=() - - if [[ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" ]]; then - ADDITIONAL_PARAMS+=("--pull") - ADDITIONAL_PARAMS+=("--cache-to type=registry,ref=${IMAGE}-buildcache:main") - fi - - if [[ "$CI_COMMIT_BRANCH" == "ci-nightly-a100" ]]; then - ADDITIONAL_PARAMS+=("-t ${IMAGE}:nightly") - fi - - DOCKER_BUILDKIT=1 docker build \ - --secret id=JET_INDEX_URLS \ - --target $STAGE \ - -f $FILE \ - -t ${IMAGE}:${CI_PIPELINE_ID} \ - --builder=container \ - --build-arg CACHEBUST=$(cat /proc/sys/kernel/random/uuid) \ - --cache-to type=registry,ref=${IMAGE}-buildcache:${CI_PIPELINE_ID} \ - --cache-to type=registry,ref=${IMAGE}-buildcache:${CI_MERGE_REQUEST_IID:-noop} \ - --cache-from type=registry,ref=${IMAGE}-buildcache:main \ - --cache-from type=registry,ref=${IMAGE}-buildcache:${CI_PIPELINE_ID} \ - --cache-from type=registry,ref=${IMAGE}-buildcache:${CI_MERGE_REQUEST_IID:-noop} \ - --build-arg FROM_IMAGE_NAME=$BASE_IMAGE \ - --push \ - ${ADDITIONAL_PARAMS[@]} . - ' - retry: - max: 2 - -unit_tests: - # This job runs both test suite of ToT and of a historic ref against - # the current code. This is a form of backwards compatibility testing - # and helps in providing stable interfaces. - extends: [.test_mr_rules] - image: ${IMAGE}:${CI_PIPELINE_ID} - needs: [build_image] - timeout: 180m - parallel: - matrix: - - TAG: latest - IMAGE: ${CI_MCORE_IMAGE} - # - TAG: latest - # IMAGE: ${CI_MCORE_DEV_IMAGE} - - TAG: core_r0.9.0 - IMAGE: ${CI_MCORE_IMAGE} - tags: [8xL40S] - variables: - GIT_STRATEGY: clone - GIT_DEPTH: 0 - before_script: - - | - if [[ $TAG != latest ]]; then - git checkout $TAG - rm -rf /opt/megatron-lm/tests - cp -r tests/ /opt/megatron-lm - fi - script: - - | - cd /opt/megatron-lm - if [[ $UNIT_TEST_REPEAT -eq 0 ]]; then - exit 0 - fi - - for i in $(seq $UNIT_TEST_REPEAT); do - SEED=$((RANDOM % 9000 + 1000)); - ARGS=() - if [[ $TAG != latest ]]; then - ARGS+=(-m "not internal") - else - ARGS+=(-m "not flaky") - fi - if [[ $IMAGE == ${CI_MCORE_DEV_IMAGE} ]]; then - ARGS+=(-m "experimental") - fi - timeout ${UNIT_TEST_TIMEOUT}m torchrun --nproc_per_node=8 -m pytest --random-order --random-order-seed ${SEED} -xvs --cov-report=term --cov-report=html --cov=megatron/core --no-cov-on-fail "${ARGS[@]}" tests/unit_tests - done - artifacts: - paths: - - coverage - rules: - - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED != "true" - allow_failure: true - when: always - - when: always - -unit-tests-results-notify: - extends: [.test_mr_rules] - image: ${CI_MCORE_IMAGE}:${CI_PIPELINE_ID} - needs: [unit_tests] - tags: - - mcore-docker-node-small - script: - - env - - export WEBHOOK_URL=${MCORE_NOTIFICATION_HOOK} - - export RO_API_TOKEN=${PROJECT_ACCESS_TOKEN_MCORE} - - export GITLAB_ENDPOINT - - export DATE=$(date +"%Y-%m-%d") - - bash tests/functional_tests/shell_test_utils/notify_unit_tests.sh ${CI_PIPELINE_ID} - artifacts: - when: always - paths: - - scripts - rules: - - if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "ci-unit-test-extended" - when: always - - when: never - -docs_build_test: - extends: [.test_mr_rules] - image: ${CI_MCORE_IMAGE}:${CI_PIPELINE_ID} - tags: [mcore-docker-node-small] - needs: [build_image] - script: - - cd .. - - rm -rf documentation && git clone https://gitlab-ci-token:${CI_JOB_TOKEN}@${GITLAB_ENDPOINT}/nemo-megatron-core-tme/documentation.git - - mv megatron-lm/ documentation/ - - cd documentation/ - - ./repo docs - -formatting: - extends: [.test_mr_rules] - image: ${LINTING_IMAGE}:${CI_PIPELINE_ID} - tags: [mcore-docker-node-small] - needs: [build_image] - script: - - env - - git fetch origin main - - BASE_REF="$CI_MERGE_REQUEST_TARGET_BRANCH_NAME" CHECK_ONLY=true SKIP_DOCS=$([[ "$CI_MERGE_REQUEST_LABELS" == *"Skip docs"* ]] && echo "true" || echo "false") bash tools/autoformat.sh - -copyright: - extends: [.test_mr_rules] - tags: [mcore-docker-node-small] - image: ${CI_MCORE_IMAGE}:${CI_PIPELINE_ID} - needs: [build_image] - script: - - git fetch origin main - - bash tools/copyright.sh - -secret_detection: - tags: [mcore-docker-node-small] - variables: - GIT_DEPTH: 0 - SECRET_DETECTION_LOG_OPTIONS: ${CI_MERGE_REQUEST_DIFF_BASE_SHA}..${CI_COMMIT_SHA} - allow_failure: true - rules: - - if: $CI_PIPELINE_SOURCE == "merge_request_event" - script: - - apk add jq - - /analyzer run - - | - if [[ $(cat gl-secret-detection-report.json | jq '.vulnerabilities | length > 0') == true ]]; then - echo "Atleast one vulnerability has been found" - cat gl-secret-detection-report.json | jq '.' - exit 1 - fi \ No newline at end of file diff --git a/.gitlab/stages/02.functional-tests.yml b/.gitlab/stages/02.functional-tests.yml index 531527b8b..a128345c2 100644 --- a/.gitlab/stages/02.functional-tests.yml +++ b/.gitlab/stages/02.functional-tests.yml @@ -1,4 +1,4 @@ -.jet_common: +.functional_tests_rules: stage: functional_tests rules: - if: $FUNCTIONAL_TEST == "yes" && ($CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED != "true") @@ -16,27 +16,28 @@ include: ref: main file: downstreams.yml -jet-build: - extends: [build_image, .jet_common] - variables: - STAGE: jet - -jet-generate: - needs: [jet-build] - extends: [.jet_common] - image: ${CI_MCORE_IMAGE}:${CI_PIPELINE_ID} +functional:configure: + needs: + - test:build_image + - job: test:unit_tests_pyt(DEV)_mcore(latest) + optional: true + - job: test:unit_tests_pyt(LTS)_mcore(latest) + optional: true + extends: [.functional_tests_rules] + image: ${UTILITY_IMAGE}:${CI_PIPELINE_ID} tags: [mcore-docker-node-small] before_script: - - git rm -r tests/functional_tests/local_recipes || true - - git submodule add --force https://gitlab-ci-token:${CI_JOB_TOKEN}@${GITLAB_ENDPOINT}/ADLR/megatron-lm-convergence-tests.git tests/functional_tests/local_recipes - - ls tests/functional_tests/local_recipes - script: + - git rm -r tests/test_utils/local_recipes || true + - git submodule add --force https://gitlab-ci-token:${CI_JOB_TOKEN}@${GITLAB_ENDPOINT}/ADLR/megatron-lm-convergence-tests.git tests/test_utils/local_recipes + - ls tests/test_utils/local_recipes + script: - set -x - | A100_CLUSTER=$([[ "$FUNCTIONAL_TEST_CLUSTER_A100" != "" ]] && echo $FUNCTIONAL_TEST_CLUSTER_A100 || echo $DEFAULT_A100_CLUSTER) H100_CLUSTER=$([[ "$FUNCTIONAL_TEST_CLUSTER_H100" != "" ]] && echo $FUNCTIONAL_TEST_CLUSTER_H100 || echo $DEFAULT_H100_CLUSTER) - | if [[ "$FUNCTIONAL_TEST_SCOPE" == "release" || "$FUNCTIONAL_TEST_SCOPE" == "pre-release" ]]; then + FUNCTIONAL_TEST_NAME=$(eval echo $FUNCTIONAL_TEST_NAME) RELEASE_ARGS=( "--run-name" $FUNCTIONAL_TEST_NAME @@ -46,57 +47,92 @@ jet-generate: else RELEASE_ARGS=() fi - - | export PYTHONPATH=$(pwd) - python tests/functional_tests/python_test_utils/jet/generate_jet_trigger_job.py \ + python tests/test_utils/python_scripts/generate_jet_trigger_job.py \ --scope $FUNCTIONAL_TEST_SCOPE \ + --environment dev \ + --n-repeat "$FUNCTIONAL_TEST_REPEAT" \ + --time-limit "$FUNCTIONAL_TEST_TIME_LIMIT" \ + --test-cases $FUNCTIONAL_TEST_CASES \ --a100-cluster $A100_CLUSTER \ --h100-cluster $H100_CLUSTER \ + --container-image ${UTILITY_IMAGE} \ --container-tag ${CI_PIPELINE_ID} \ - --container-image ${CI_MCORE_IMAGE} \ - --container-image-dev ${CI_MCORE_DEV_IMAGE} \ - --output-path "jet-trigger-job.yaml" \ + --dependent-job "functional:configure" \ + --output-path "functional-test-job-dev.yaml" \ + ${RELEASE_ARGS[@]} + - | + export PYTHONPATH=$(pwd) + python tests/test_utils/python_scripts/generate_jet_trigger_job.py \ + --scope $FUNCTIONAL_TEST_SCOPE \ + --environment lts \ + --n-repeat "$FUNCTIONAL_TEST_REPEAT" \ + --time-limit "$FUNCTIONAL_TEST_TIME_LIMIT" \ + --test-cases $FUNCTIONAL_TEST_CASES \ + --a100-cluster $A100_CLUSTER \ + --h100-cluster $H100_CLUSTER \ + --container-image ${UTILITY_IMAGE} \ + --container-tag ${CI_PIPELINE_ID} \ + --dependent-job "functional:configure" \ + --output-path "functional-test-job-lts.yaml" \ ${RELEASE_ARGS[@]} artifacts: paths: - - jet-trigger-job.yaml - - tests/functional_tests/local_recipes + - functional-test-job-lts.yaml + - functional-test-job-dev.yaml + - tests/test_utils/local_recipes -jet-trigger: +.run: stage: functional_tests - needs: [jet-generate] - extends: [.jet_common] + needs: [functional:configure] + extends: [.functional_tests_rules] trigger: include: - - artifact: jet-trigger-job.yaml - job: jet-generate + - artifact: functional-test-job-$ENVIRONMENT.yaml + job: functional:configure strategy: depend variables: RO_API_TOKEN: $PAT CONTAINER_TAG: $CI_PIPELINE_ID - CI_MCORE_IMAGE: $CI_MCORE_IMAGE + CI_MCORE_LTS_IMAGE: $CI_MCORE_LTS_IMAGE GITLAB_ENDPOINT: $GITLAB_ENDPOINT PARENT_PIPELINE_ID: $CI_PIPELINE_ID inherit: variables: true - -jet-results-notify: - extends: [.jet_common] - image: ${GITLAB_ENDPOINT}:5005/dl/jet/api:latest - needs: [jet-trigger] + +functional:run_lts: + extends: [.run] + variables: + ENVIRONMENT: lts + +functional:run_dev: + extends: [.run] + variables: + ENVIRONMENT: dev + +functional:notify: + extends: [.functional_tests_rules] + image: badouralix/curl-jq + needs: + - functional:run_lts + - functional:run_dev tags: - mcore-docker-node-small - before_script: - - jet secrets jwt-login jwt/nvidia/gitlab-master adlr-megatron-lm-ci $VAULT_JWT_TOKEN + variables: + WEBHOOK_URL: ${MCORE_NOTIFICATION_HOOK} + RO_API_TOKEN: ${PROJECT_ACCESS_TOKEN_MCORE} + CONTEXT: $FUNCTIONAL_TEST_SCOPE script: + - apk add bash + - apk add --update coreutils - env - export WEBHOOK_URL=${MCORE_NOTIFICATION_HOOK} - export RO_API_TOKEN=${PROJECT_ACCESS_TOKEN_MCORE} - export GITLAB_ENDPOINT - export CONTEXT=$FUNCTIONAL_TEST_SCOPE - export DATE=$(date +"%Y-%m-%d") - - bash tests/functional_tests/shell_test_utils/notify.sh ${CI_PIPELINE_ID} + - bash tests/test_utils/shell_scripts/notify.sh ${CI_PIPELINE_ID} "functional:run_" artifacts: when: always paths: @@ -105,4 +141,3 @@ jet-results-notify: - if: $CI_PIPELINE_SOURCE == "schedule" && $FUNCTIONAL_TEST == "yes" when: always - when: never - diff --git a/.gitlab/stages/03.publish.yml b/.gitlab/stages/03.publish.yml index 1deeee728..4639d7690 100644 --- a/.gitlab/stages/03.publish.yml +++ b/.gitlab/stages/03.publish.yml @@ -1,24 +1,28 @@ .publish_common_freeze: - stage: functional_tests + stage: publish rules: - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $PUBLISH == "yes" && $PUBLISH_SCOPE == "code-freeze" when: manual - when: never .publish_common_release: - stage: functional_tests + stage: publish rules: - if: $CI_COMMIT_BRANCH =~ /^core_r/ && $PUBLISH == "yes" && $PUBLISH_SCOPE == "release" when: manual + - if: $PUBLISH == "yes" && $PUBLISH_SCOPE == "release" + when: manual + variables: + PUBLISH_DRYRUN: "yes" - when: never -create-release-branch: +publish:release_branch: extends: [.publish_common_freeze] - image: ${CI_MCORE_IMAGE}:${CI_PIPELINE_ID} - needs: [build_image] + image: ${CI_MCORE_LTS_IMAGE}:${CI_PIPELINE_ID} + needs: [test:build_image] tags: [mcore-docker-node-small] variables: - GIT_STRATEGY: "clone" + GIT_STRATEGY: "none" script: - git fetch origin $CI_DEFAULT_BRANCH - git config --global user.email "mcore-bot@nvidia.com" @@ -26,8 +30,8 @@ create-release-branch: - git remote set-url origin "https://gitlab-ci-token:${PAT}@${GITLAB_ENDPOINT}/$CI_PROJECT_NAMESPACE/megatron-lm.git" - sed -i "/^PRE_RELEASE/c\PRE_RELEASE = ''" megatron/core/package_info.py - VERSION=$(python -c "from megatron import core; print(core.__version__)") - - git switch --force-create core_r$VERSION origin/$CI_DEFAULT_BRANCH - - git push -u origin core_r$VERSION --force + - RELEASE_BRANCH=core_r$VERSION + - git switch --force-create $RELEASE_BRANCH origin/$CI_DEFAULT_BRANCH - | MESSAGE='{ "blocks": [ @@ -35,61 +39,53 @@ create-release-branch: "type": "section", "text": { "type": "mrkdwn", - "text": "Releasebot 🤖: Megatron Core has been frozen 🎉 to branch `core_r$VERSION`" + "text": "Releasebot 🤖: Megatron Core has been frozen 🎉 to branch `'"$RELEASE_BRANCH"'`" } } ] }' - + - > curl -X POST -H "Content-type: application/json" --data "$MESSAGE" ${MCORE_NOTIFICATION_HOOK_MAIN} + - git switch --force-create bot/chore/bump-version + - git add megatron/core/package_info.py + - > + git commit -m "chore: adjust version version" + - git push -u origin bot/chore/bump-version + - > + curl \ + --header "PRIVATE-TOKEN: $PAT" \ + --url https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/merge_requests \ + -d "source_branch=bot/chore/bump-version" \ + -d "target_branch=$RELEASE_BRANCH" \ + -d "title=chore: Fix version of \`$RELEASE_BRANCH\`" \ + -d "description=[🤖]: Hi @okoenig 👋,

we've adjusted the version number of \`$RELEASE_BRANCH\` for you! 🚀

Please review and approve this cherry pick by your convenience\!" -publish-wheel: - extends: [.publish_common_release] - image: quay.io/pypa/manylinux_2_28_x86_64 - tags: [mcore-docker-node-small] - script: - - export TWINE_USERNAME - - export TWINE_PASSWORT - - /opt/python/cp311-cp311/bin/pip install twine - - /opt/python/cp310-cp310/bin/python -m build - - /opt/python/cp311-cp311/bin/python -m build - - auditwheel repair dist/*.whl - - twine upload --repository pypi wheelhouse/* - -create-gh-release: - extends: [.publish_common_release] - tags: [mcore-docker-node-small] - image: - name: registry.gitlab.com/gitlab-ci-utils/curl-jq - entrypoint: [""] - script: - - | - RELEASE_NUMBER=$(python -c "from megatron import core; print(core.__version__)") - NAME="NVIDIA Megatron Core $RELEASE_NUMBER" - CHANGELOG=$(awk '/^## '$NAME'/{flag=1; next} /^## /{flag=0} flag' CHANGELOG.md) - CHANGELOG=$(echo "$CHANGELOG" | sed '/./!d') - - PAYLOAD=$(jq \ - -n \ - -c \ - --arg CI_COMMIT_BRANCH "$CI_COMMIT_BRANCH" \ - --arg NAME "$NAME" \ - --arg BODY "$CHANGELOG" \ - '{ - "tag_name": $CI_COMMIT_BRANCH, - "target_commitish": $CI_COMMIT_BRANCH, - "name": $NAME, - "body": $BODY, - "draft": false, - "prerelease": false, - "generate_release_notes": false - }' - ) +publish:pypi_build_wheel: + extends: [test:pypi_build_wheel, .publish_common_release] + dependencies: [] + variables: + PUBLISH_DRYRUN: "no" + +publish:pypi_test_wheel: + extends: [test:pypi_test_wheel, .publish_common_release] + needs: [publish:pypi_build_wheel] + variables: + PUBLISH_DRYRUN: "no" + +publish:pypi_push_wheel: + extends: [test:pypi_push_wheel, .publish_common_release] + needs: [publish:pypi_test_wheel] + variables: + PUBLISH_DRYRUN: "no" - curl -L \ - -X POST \ - -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer $GH_TOKEN" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - https://api.github.com/repos/NVIDIA/Megatron-LM/releases \ - -d $PAYLOAD \ No newline at end of file +publish:gh_release: + extends: [test:gh_release, .publish_common_release] + dependencies: [] + variables: + PUBLISH_DRYRUN: "no" + +publish:notify_release: + needs: [publish:pypi_push_wheel, publish:gh_release] + extends: [test:notify_release, .publish_common_release] + variables: + PUBLISH_DRYRUN: "no" \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 78db8212a..796057419 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## NVIDIA Megatron Core 0.9.0 + +- Uneven pipeline parallelism + - Enable pipeline parallelism where first and last ranks have fewer transformer layers than the intermediate ranks +- Per layer CUDAGraph support for GPT training with Transformer Engine modules +- Enable different TP sizes for the vision encoder +- Enable pipeline parallelism for T5 & Llava models +- Support multi-tile multi-image input in Llava models +- MoE + - FP8 support + - Runtime upcycling support + - Dispatcher implementation optimizations + - Shared expert support with overlapping optimizations + - Qwen Model support +- Known Issues + - When using sequence parallel, during the transformer block forward pass, dropout is not using the appropriate rng context. + + ## NVIDIA Megatron Core 0.8.0 - Multimodal diff --git a/CODEOWNERS b/CODEOWNERS index 7e7f730e3..e89c62b06 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -34,14 +34,16 @@ megatron/core/optimizer/distrib_optimizer/ [Inference] @mcore-reviewers/inference megatron/core/inference/ -[Quantization and Inference (QAT)] @mcore-reviewers/quantization-and-inference +^[Quantization and Inference (QAT)] @mcore-reviewers/quantization-and-inference megatron/core/inference/ ; [Context Parallelism] @mcore-reviewers/context-parallelism ; [CI] @mcore-reviewers/ci +.gitlab/ +.github/ .gitlab-ci.yml -Dockerfile.ci -jet-tests.yml +Dockerfile.ci.lts +Dockerfile.ci.dev tests/ diff --git a/Dockerfile.ci b/Dockerfile.ci deleted file mode 100644 index f1b693b9d..000000000 --- a/Dockerfile.ci +++ /dev/null @@ -1,63 +0,0 @@ -# syntax=docker/dockerfile:1.3-labs - -ARG FROM_IMAGE_NAME -FROM $FROM_IMAGE_NAME as build_causal_conv1d -WORKDIR /opt -RUN CAUSAL_CONV1D_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/Dao-AILab/causal-conv1d.git@v1.2.2.post1 - -FROM $FROM_IMAGE_NAME as build_grouped_gemm -WORKDIR /opt -RUN pip3 wheel -v git+https://github.com/fanshiqing/grouped_gemm@v1.1.2 - -FROM $FROM_IMAGE_NAME as build_mamba_ssm -WORKDIR /opt -RUN MAMBA_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/state-spaces/mamba.git@v2.0.3 - -FROM $FROM_IMAGE_NAME as main -ENV DEBIAN_FRONTEND=noninteractive - -RUN apt-get update && \ - apt-get install -y --no-install-recommends gettext python3-venv && \ - apt-get clean && \ - python -m venv /opt/jet && \ - wget https://github.com/mikefarah/yq/releases/download/v4.44.1/yq_linux_amd64 -O /usr/local/bin/yq && \ - chmod a+x /usr/local/bin/yq - -COPY --from=build_causal_conv1d /opt/causal_conv1d-1.2.2.post1-cp310-cp310-linux_x86_64.whl ./ -COPY --from=build_grouped_gemm /opt/grouped_gemm-1.1.2-cp310-cp310-linux_x86_64.whl ./ -COPY --from=build_mamba_ssm /opt/mamba_ssm-2.0.3-cp310-cp310-linux_x86_64.whl ./ - -RUN pip3 install --no-cache-dir --upgrade-strategy only-if-needed -v \ -einops \ -flask-restful \ -nltk \ -pytest \ -pytest-cov \ -pytest_mock \ -pytest-random-order \ -sentencepiece \ -tiktoken \ -wrapt \ -zarr \ -wandb \ -triton==2.1.0 \ -causal_conv1d-1.2.2.post1-cp310-cp310-linux_x86_64.whl \ -mamba_ssm-2.0.3-cp310-cp310-linux_x86_64.whl \ -grouped_gemm-1.1.2-cp310-cp310-linux_x86_64.whl \ -tensorstore==0.1.45 && \ -rm *.whl - -# Since megatron does not have any dependencies (and isn't a dependency to any other package), we can install it separately to make everything a bit quicker -COPY . /opt/megatron-lm -RUN pip install /opt/megatron-lm -ENV PYTHONPATH="/opt/megatron-lm:$PYTHONPATH" - -##### For NVIDIANS only ##### -FROM main as jet -ARG CACHEBUST=0 -RUN --mount=type=secret,id=JET_INDEX_URLS \ - JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ - pip install jet-client --upgrade $JET_INDEX_URLS && \ - /opt/jet/bin/pip install jet-api --upgrade $JET_INDEX_URLS -ENV PATH="$PATH:/opt/jet/bin" -### \ No newline at end of file diff --git a/Dockerfile.ci.dev b/Dockerfile.ci.dev index f1b693b9d..80a4e04c4 100644 --- a/Dockerfile.ci.dev +++ b/Dockerfile.ci.dev @@ -11,7 +11,7 @@ RUN pip3 wheel -v git+https://github.com/fanshiqing/grouped_gemm@v1.1.2 FROM $FROM_IMAGE_NAME as build_mamba_ssm WORKDIR /opt -RUN MAMBA_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/state-spaces/mamba.git@v2.0.3 +RUN MAMBA_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/state-spaces/mamba.git@v2.2.0 FROM $FROM_IMAGE_NAME as main ENV DEBIAN_FRONTEND=noninteractive @@ -23,41 +23,56 @@ RUN apt-get update && \ wget https://github.com/mikefarah/yq/releases/download/v4.44.1/yq_linux_amd64 -O /usr/local/bin/yq && \ chmod a+x /usr/local/bin/yq -COPY --from=build_causal_conv1d /opt/causal_conv1d-1.2.2.post1-cp310-cp310-linux_x86_64.whl ./ -COPY --from=build_grouped_gemm /opt/grouped_gemm-1.1.2-cp310-cp310-linux_x86_64.whl ./ -COPY --from=build_mamba_ssm /opt/mamba_ssm-2.0.3-cp310-cp310-linux_x86_64.whl ./ - -RUN pip3 install --no-cache-dir --upgrade-strategy only-if-needed -v \ -einops \ -flask-restful \ -nltk \ -pytest \ -pytest-cov \ -pytest_mock \ -pytest-random-order \ -sentencepiece \ -tiktoken \ -wrapt \ -zarr \ -wandb \ -triton==2.1.0 \ -causal_conv1d-1.2.2.post1-cp310-cp310-linux_x86_64.whl \ -mamba_ssm-2.0.3-cp310-cp310-linux_x86_64.whl \ -grouped_gemm-1.1.2-cp310-cp310-linux_x86_64.whl \ -tensorstore==0.1.45 && \ -rm *.whl +COPY --from=build_causal_conv1d /opt/causal_conv1d-*.whl ./ +COPY --from=build_grouped_gemm /opt/grouped_gemm-*.whl ./ +COPY --from=build_mamba_ssm /opt/mamba_ssm-*.whl ./ + +RUN \ + --mount=type=bind,source=requirements,target=requirements \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=setup.py,target=setup.py \ + --mount=type=bind,source=megatron/core/package_info.py,target=megatron/core/package_info.py \ + --mount=type=bind,source=megatron/core/README.md,target=megatron/core/README.md \ + --mount=type=bind,source=megatron/core/__init__.py,target=megatron/core/__init__.py <<"EOF" bash -ex + +pip install causal_conv1d-*.whl mamba_ssm-*.whl grouped_gemm-*.whl +PY_ENV=pytorch:24.07 pip install . +EOF # Since megatron does not have any dependencies (and isn't a dependency to any other package), we can install it separately to make everything a bit quicker -COPY . /opt/megatron-lm -RUN pip install /opt/megatron-lm +ARG MCORE_REPO +ARG MCORE_REF +ARG MCORE_BACKWARDS_REF +RUN <<"EOF" bash -exu +# Checkout latest +cd /opt +rm -rf /opt/megatron-lm; mkdir megatron-lm; cd megatron-lm +git init +git remote add origin ${MCORE_REPO} +git fetch origin '+refs/merge-requests/*:refs/remotes/merge-requests/*' +git fetch origin $MCORE_REF +git checkout $MCORE_REF + +# Checkout backwards-ref +cd /opt +rm -rf /opt/megatron-lm-legacy; mkdir megatron-lm-legacy; cd megatron-lm-legacy +git init +git remote add origin ${MCORE_REPO} +git fetch origin $MCORE_BACKWARDS_REF +git checkout $MCORE_BACKWARDS_REF +rm -rf megatron; cp -a /opt/megatron-lm/megatron ./ +EOF + +RUN PY_ENV=pytorch:24.07 pip install -e /opt/megatron-lm ENV PYTHONPATH="/opt/megatron-lm:$PYTHONPATH" +ENV NVTE_FLASH_ATTN=0 +ENV NVTE_FUSED_ATTN=0 ##### For NVIDIANS only ##### FROM main as jet ARG CACHEBUST=0 RUN --mount=type=secret,id=JET_INDEX_URLS \ JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ - pip install jet-client --upgrade $JET_INDEX_URLS && \ - /opt/jet/bin/pip install jet-api --upgrade $JET_INDEX_URLS + pip install jet-client jet-api --upgrade $JET_INDEX_URLS ENV PATH="$PATH:/opt/jet/bin" ### \ No newline at end of file diff --git a/Dockerfile.ci.lts b/Dockerfile.ci.lts new file mode 100644 index 000000000..ea0cf31a0 --- /dev/null +++ b/Dockerfile.ci.lts @@ -0,0 +1,77 @@ +# syntax=docker/dockerfile:1.3-labs + +ARG FROM_IMAGE_NAME +FROM $FROM_IMAGE_NAME as build_causal_conv1d +WORKDIR /opt +RUN CAUSAL_CONV1D_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/Dao-AILab/causal-conv1d.git@v1.2.2.post1 + +FROM $FROM_IMAGE_NAME as build_grouped_gemm +WORKDIR /opt +RUN pip3 wheel -v git+https://github.com/fanshiqing/grouped_gemm@v1.1.2 + +FROM $FROM_IMAGE_NAME as build_mamba_ssm +WORKDIR /opt +RUN MAMBA_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/state-spaces/mamba.git@v2.0.3 + +ARG FROM_IMAGE_NAME +FROM $FROM_IMAGE_NAME as main +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && \ + apt-get install -y --no-install-recommends gettext python3-venv && \ + apt-get clean && \ + python -m venv /opt/jet && \ + wget https://github.com/mikefarah/yq/releases/download/v4.44.1/yq_linux_amd64 -O /usr/local/bin/yq && \ + chmod a+x /usr/local/bin/yq + +COPY --from=build_causal_conv1d /opt/causal_conv1d-*.whl ./ +COPY --from=build_grouped_gemm /opt/grouped_gemm-*.whl ./ +COPY --from=build_mamba_ssm /opt/mamba_ssm-*.whl ./ + +RUN \ + --mount=type=bind,source=requirements,target=requirements \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=setup.py,target=setup.py \ + --mount=type=bind,source=megatron/core/package_info.py,target=megatron/core/package_info.py \ + --mount=type=bind,source=megatron/core/README.md,target=megatron/core/README.md \ + --mount=type=bind,source=megatron/core/__init__.py,target=megatron/core/__init__.py <<"EOF" bash -ex + +pip install causal_conv1d-*.whl mamba_ssm-*.whl grouped_gemm-*.whl +PY_ENV=pytorch:24.07 pip install . +EOF + +# Since megatron does not have any dependencies (and isn't a dependency to any other package), we can install it separately to make everything a bit quicker +ARG MCORE_REPO +ARG MCORE_REF +ARG MCORE_BACKWARDS_REF +RUN <<"EOF" bash -exu +# Checkout latest +cd /opt +rm -rf /opt/megatron-lm; mkdir megatron-lm; cd megatron-lm +git init +git remote add origin ${MCORE_REPO} +git fetch origin '+refs/merge-requests/*:refs/remotes/merge-requests/*' +git fetch origin $MCORE_REF +git checkout $MCORE_REF + +# Checkout backwards-ref +cd /opt +rm -rf /opt/megatron-lm-legacy; mkdir megatron-lm-legacy; cd megatron-lm-legacy +git init +git remote add origin ${MCORE_REPO} +git fetch origin $MCORE_BACKWARDS_REF +git checkout $MCORE_BACKWARDS_REF +rm -rf megatron; cp -a /opt/megatron-lm/megatron ./ +EOF + +RUN PY_ENV=pytorch:24.01 pip install -e /opt/megatron-lm +ENV PYTHONPATH="/opt/megatron-lm:$PYTHONPATH" + +##### For NVIDIANS only ##### +FROM main as jet +ARG CACHEBUST=0 +RUN --mount=type=secret,id=JET_INDEX_URLS \ + JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ + pip install jet-api jet-client --upgrade $JET_INDEX_URLS +ENV PATH="$PATH:/opt/jet/bin" +### \ No newline at end of file diff --git a/Dockerfile.linting b/Dockerfile.linting index b0670af9d..ff1a28cef 100644 --- a/Dockerfile.linting +++ b/Dockerfile.linting @@ -7,15 +7,27 @@ ENV DEBIAN_FRONTEND=noninteractive RUN sed -i -e 's/^APT/# APT/' -e 's/^DPkg/# DPkg/' \ /etc/apt/apt.conf.d/docker-clean +RUN apt-get update && \ + apt-get install -y python3-venv && \ + apt-get clean && \ + python -m venv /opt/jet RUN pip3 install --no-cache-dir \ black==24.4.2 \ isort==5.13.2 \ flake8==7.1.0 \ - pylint==3.2.6 + pylint==3.2.6 \ + mypy COPY . /opt/megatron-lm WORKDIR /opt/megatron-lm -FROM main as jet \ No newline at end of file +##### For NVIDIANS only ##### +FROM main as jet +ARG CACHEBUST=0 +RUN --mount=type=secret,id=JET_INDEX_URLS \ + JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ + pip install jet-client jet-api --upgrade $JET_INDEX_URLS +ENV PATH="$PATH:/opt/jet/bin" +### \ No newline at end of file diff --git a/LICENSE b/LICENSE index 16814e9d1..b4193aff5 100644 --- a/LICENSE +++ b/LICENSE @@ -30,8 +30,9 @@ The following applies to all files unless otherwise noted: This repository also contains code from Hugging Face Inc., Google Research, Facebook (from their Fairseq, Dino, and ParlAI projects), Microsoft (from their -Swin-Transformer project), Philip Popien, and the Mamba project (Tri Dao and -Albert Gu). Files from these organizations have notices at the top of each file. +Swin-Transformer project), Philip Popien, the Mamba project (Tri Dao and +Albert Gu), and the Triton language and compiler project (Philippe Tillet and +OpenAI). Files from these organizations have notices at the top of each file. Below are licenses used in those files, as indicated. @@ -241,7 +242,13 @@ Below are licenses used in those files, as indicated. See the License for the specific language governing permissions and limitations under the License. -------------- LICENSE FOR Facebook, Inc. and its affiliates, Meta Platforms, Inc. and its affiliates, Microsoft Corporation, and OpenGVLab/InternVL -------------- +-------------------------------------------------------------------------------- +LICENSE FOR +Facebook, Inc. and its affiliates, +Meta Platforms, Inc. and its affiliates, +Microsoft Corporation, +OpenGVLab/InternVL, and +Triton language and compiler. MIT License diff --git a/MANIFEST.in b/MANIFEST.in index dbb29b0a1..dbed9c406 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,3 @@ include megatron/core/requirements.txt -include megatron/core/README.md \ No newline at end of file +include megatron/core/README.md +recursive-include requirements * diff --git a/README.md b/README.md index 138944b5c..a8e553dec 100644 --- a/README.md +++ b/README.md @@ -19,41 +19,50 @@ Megatron-LM & Megatron-Core # Table of Contents - * [Megatron Overview](#megatron-overview) - * [Megatron-LM](#megatron-lm) - * [Megatron-Core](#megatron-core) - * [Training Speed and Scalability](#training-speed-and-scalability) - * [Setup](#setup) - * [Downloading Checkpoints](#downloading-checkpoints) - * [Usage](#usage) - * [Training](#training) - * [Data Preprocessing](#data-preprocessing) - * [BERT Pretraining](#bert-pretraining) - * [GPT Pretraining](#gpt-pretraining) - * [T5 Pretraining](#t5-pretraining) - * [Distributed Pretraining](#distributed-pretraining) - * [Activation Checkpointing and Recomputation](#activation-checkpointing-and-recomputation) - * [Distributed Optimizer](#distributed-optimizer) - * [FlashAttention](#flashattention) - * [GPT-3 Example](#gpt-3-example) - * [Retro and InstructRetro](#retro-and-instructretro) - * [Evaluation and Tasks](#evaluation-and-tasks) - * [GPT Text Generation](#gpt-text-generation) - * [GPT Evaluation](#gpt-evaluation) - * [WikiText Perplexity Evaluation](#wikitext-perplexity-evaluation) - * [LAMBADA Cloze Accuracy](#lambada-cloze-accuracy) - * [BERT Task Evaluation](#bert-task-evaluation) - * [RACE Evaluation](#race-evaluation) - * [MNLI Evaluation](#mnli-evaluation) - * [Llama-2 Inference and Finetuning](#llama-2-inference-and-finetuning) - * [Datasets](#datasets) - * [Collecting Wikipedia Training Data](#collecting-wikipedia-training-data) - * [Collecting GPT Webtext Data](#collecting-gpt-webtext-data) - * [Reproducibility](#reproducibility) - * [Projects using Megatron](#projects-using-megatron) +- [Megatron-LM \& Megatron-Core](#megatron-lm--megatron-core) +- [Latest News](#latest-news) +- [Table of Contents](#table-of-contents) +- [Megatron Overview](#megatron-overview) + - [Megatron-LM](#megatron-lm) + - [Megatron-Core](#megatron-core) +- [Training Speed and Scalability](#training-speed-and-scalability) +- [Setup](#setup) + - [Downloading Checkpoints](#downloading-checkpoints) +- [Usage](#usage) +- [Training](#training) + - [Data Preprocessing](#data-preprocessing) + - [BERT Pretraining](#bert-pretraining) + - [GPT Pretraining](#gpt-pretraining) + - [T5 Pretraining](#t5-pretraining) + - [Distributed Pretraining](#distributed-pretraining) + - [Activation Checkpointing and Recomputation](#activation-checkpointing-and-recomputation) + - [Distributed Optimizer](#distributed-optimizer) + - [FlashAttention](#flashattention) + - [GPT-3 Example](#gpt-3-example) + - [Retro and InstructRetro](#retro-and-instructretro) + - [Mamba-based Language Models](#mamba-based-language-models) + - [Mixture of Experts](#mixture-of-experts) + - [Key Features of MoE](#key-features-of-moe) +- [Evaluation and Tasks](#evaluation-and-tasks) + - [GPT Text Generation](#gpt-text-generation) + - [Detoxify GPT via Self-generation](#detoxify-gpt-via-self-generation) + - [GPT Evaluation](#gpt-evaluation) + - [WikiText Perplexity Evaluation](#wikitext-perplexity-evaluation) + - [LAMBADA Cloze Accuracy](#lambada-cloze-accuracy) + - [BERT Task Evaluation](#bert-task-evaluation) + - [RACE Evaluation](#race-evaluation) + - [MNLI Evaluation](#mnli-evaluation) + - [Llama-2 Inference and Finetuning](#llama-2-inference-and-finetuning) +- [Model Optimization and Deployment](#model-optimization-and-deployment) + - [Quantization and TensorRT-LLM Deployment](#quantization-and-tensorrt-llm-deployment) +- [Datasets](#datasets) + - [Collecting Wikipedia Training Data](#collecting-wikipedia-training-data) + - [Collecting GPT Webtext Data](#collecting-gpt-webtext-data) +- [Reproducibility](#reproducibility) + - [Projects Using Megatron](#projects-using-megatron) # Megatron Overview -This repository comprises two essential components: **Megatron-LM** and **Megatron-Core**. Megatron-LM serves as a ressearch-oriented framework leveraging Megatron-Core for large language model (LLM) training. Megatron-Core, on the other hand, is a library of GPU optimized training techniques that comes with formal product support including versioned APIs and regular releases. You can use Megatron-Core alongside Megatron-LM or [Nvidia NeMo Framework](https://docs.nvidia.com/deeplearning/nemo/user-guide/docs/en/main/nlp/nemo_megatron/mcore_customization.html) for an end-to-end and cloud-native solution. Alternatively, you can integrate Megatron-Core's building blocks into your preferred training framework. +This repository comprises two essential components: **Megatron-LM** and **Megatron-Core**. Megatron-LM serves as a research-oriented framework leveraging Megatron-Core for large language model (LLM) training. Megatron-Core, on the other hand, is a library of GPU optimized training techniques that comes with formal product support including versioned APIs and regular releases. You can use Megatron-Core alongside Megatron-LM or [Nvidia NeMo Framework](https://docs.nvidia.com/deeplearning/nemo/user-guide/docs/en/main/nlp/nemo_megatron/mcore_customization.html) for an end-to-end and cloud-native solution. Alternatively, you can integrate Megatron-Core's building blocks into your preferred training framework. ## Megatron-LM First introduced in 2019, Megatron ([1](https://arxiv.org/pdf/1909.08053.pdf), [2](https://arxiv.org/pdf/2104.04473.pdf), and [3](https://arxiv.org/pdf/2205.05198)) sparked a wave of innovation in the AI community, enabling researchers and developers to utilize the underpinnings of this library to further LLM advancements. Today, many of the most popular LLM developer frameworks have been inspired by and built directly leveraging the open-source Megatron-LM library, spurring a wave of foundation models and AI startups. Some of the most popular LLM frameworks built on top of Megatron-LM include [Colossal-AI](https://github.com/hpcaitech/ColossalAI), [HuggingFace Accelerate](https://github.com/huggingface/accelerate), and [NVIDIA NeMo Framework](https://www.nvidia.com/en-us/ai-data-science/generative-ai/nemo-framework/). A list of projects that have directly used Megatron can be found [here](#projects-using-megatron). @@ -362,6 +371,17 @@ python tools/create_doc_index.py \ --> +## Mixture of Experts +MoE (Mixture of Experts) is a powerful LLM architecture implemented in the Megatron-Core framework, designed to enhance the efficiency and scalability of large language models. It leverages **Expert Parallelism**, allowing multiple experts to be distributed across different workers, where each worker processes distinct batches of training samples. This method significantly increases computational throughput, enabling models to achieve high performance metrics, such as 47% MFU during BF16 training for 8x7B on H100. + +Key Features of MoE: +- **Parallelism Techniques**: MoE combines various parallelism strategies, including Expert Parallelism, Data Parallelism, Tensor Parallelism, Sequence Paralleism, Pipeline Parallelism, and Context Parallelism. This combination allows for handling larger model variants effectively. +- **Router and Load Balancing**: The system employs advanced routing mechanisms like the Top-K router and utilizes load balancing algorithms to optimize token distribution among experts. +- **Performance Optimizations**: Techniques such as GroupedGEMM and FP8 training enhance the efficiency of MoE models, particularly when multiple experts are involved. +- **Token Dispatch Mechanism**: MoE supports both dropless and token drop strategies to manage token distribution effectively across experts. + +For a comprehensive overview of MoE training configurations and optimizations, please refer to the detailed README located at [megatron/core/transformer/moe/README.md](./megatron/core/transformer/moe/README.md). + # Evaluation and Tasks We provide several command line arguments, detailed in the scripts listed below, to handle various zero-shot and fine-tuned downstream tasks. However, you can also finetune your model from a pretrained checkpoint on other corpora as desired. To do so, simply add the `--finetune` flag and adjust the input files and training parameters within the original training script. The iteration count will be reset to zero, and the optimizer and internal state will be reinitialized. If the fine-tuning is interrupted for any reason, be sure to remove the `--finetune` flag before continuing, otherwise the training will start again from the beginning. @@ -540,7 +560,7 @@ python tasks/main.py \ The Llama-2 [family of models](https://ai.meta.com/llama/) are an open-source set of pretrained & finetuned (for chat) models that have achieved strong results across a wide set of benchmarks. At the time of release, Llama-2 models achieved among the best results for open-source models, and were competitive with the closed-source GPT-3.5 model (see https://arxiv.org/pdf/2307.09288.pdf). -The Llama-2 checkpoints can be loaded into Megatron for inference and finetuning. See documentation [here](docs/llama2.md). +The Llama-2 checkpoints can be loaded into Megatron for inference and finetuning. See documentation [here](docs/llama_mistral.md). # Model Optimization and Deployment Megatron-Core (MCore) `GPTModel` family supports advanced quantization algorithms and high-performance inference through TensorRT-LLM. diff --git a/docs/source/api-guide/context_parallel.rst b/docs/source/api-guide/context_parallel.rst index c381f66e8..c08defd21 100644 --- a/docs/source/api-guide/context_parallel.rst +++ b/docs/source/api-guide/context_parallel.rst @@ -25,7 +25,7 @@ Context parallelism benefits LLM encounters OOM (out of memory) issue with long context (i.e., long sequence length) because of linearly increasing memory footprint of activations. Recomputing activations in backward can avoid OOM but also introduce significant overheads (~30% with full recompute). Enlarging TP (tensor model parallelism) can fix the OOM issue as well, but it potentially makes compute (e.g., Linear) too short to overlap communication latencies. To be clear, scaling out to more GPUs with bigger TP can hit the overlapping problem no matter if OOM happens. -CP can better address the issues. With CP, each GPU only computes on a part of the sequence, which reduces both computation and communication by CP times. Therefore, there are no concerns about the overlapping between them. The activation memory footprint per GPU is also CP times smaller, hence no OOM issue any more. As Figure 2 shows, the combinations of TP and CP can achieve optimal performance by eliminating recompute overheads and making the best tradeoff between computation and communications. +CP can better address the issues. With CP, each GPU only computes on a part of the sequence, which reduces both computation and communication by CP times. Therefore, there are no concerns about the overlapping between them. The activation memory footprint per GPU is also CP times smaller, hence no OOM issue anymore. As Figure 2 shows, the combinations of TP and CP can achieve optimal performance by eliminating recompute overheads and making the best tradeoff between computation and communications. Enabling context parallelism ---------------------------- diff --git a/examples/export/knowledge_distillation/pretrain_gpt_modelopt.py b/examples/export/knowledge_distillation/pretrain_gpt_modelopt.py new file mode 100644 index 000000000..65d0727d8 --- /dev/null +++ b/examples/export/knowledge_distillation/pretrain_gpt_modelopt.py @@ -0,0 +1,136 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Pretrain GPT.""" +import os +import sys +from functools import partial + +# This file isn't located in project root, but to import, it should pretend to be. +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../"))) + +from megatron.core import mpu +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.gpt_dataset import GPTDataset, GPTDatasetConfig, MockGPTDataset +from megatron.core.datasets.utils import get_blend_from_list +from megatron.core.enums import ModelType +from megatron.core.models.gpt import GPTModel +from megatron.core.utils import StragglerDetector +from megatron.inference.arguments import add_modelopt_args +from megatron.inference.gpt import loss_func, model_provider +from megatron.training import get_args, get_timers, get_tokenizer, pretrain +from megatron.training.utils import ( + get_batch_on_this_cp_rank, + get_batch_on_this_tp_rank, + print_rank_0, +) + +stimer = StragglerDetector() + + +def get_batch(data_iterator): + """Generate a batch.""" + + # TODO: this is pretty hacky, find a better way + if (not mpu.is_pipeline_first_stage()) and (not mpu.is_pipeline_last_stage()): + return None, None, None, None, None + + # get batches based on the TP rank you are on + batch = get_batch_on_this_tp_rank(data_iterator) + + # slice batch along sequence dimension for context parallelism + batch = get_batch_on_this_cp_rank(batch) + + return batch.values() + + +def forward_step(data_iterator, model: GPTModel): + """Forward training step. + + Args: + data_iterator : Input data iterator + model (GPTModel): The GPT Model + """ + timers = get_timers() + + # Get the batch. + timers('batch-generator', log_level=2).start() + global stimer + with stimer(bdata=True): + tokens, labels, loss_mask, attention_mask, position_ids = get_batch(data_iterator) + timers('batch-generator').stop() + + with stimer: + output_tensor = model(tokens, position_ids, attention_mask, labels=labels) + + # [ModelOpt]: model is needed to access ModelOpt distillation losses + return output_tensor, partial(loss_func, loss_mask, model) + + +def is_dataset_built_on_rank(): + return ( + mpu.is_pipeline_first_stage() or mpu.is_pipeline_last_stage() + ) and mpu.get_tensor_model_parallel_rank() == 0 + + +def core_gpt_dataset_config_from_args(args): + tokenizer = get_tokenizer() + + return GPTDatasetConfig( + random_seed=args.seed, + sequence_length=args.seq_length, + blend=get_blend_from_list(args.data_path), + blend_per_split=[ + get_blend_from_list(args.train_data_path), + get_blend_from_list(args.valid_data_path), + get_blend_from_list(args.test_data_path), + ], + split=args.split, + num_dataset_builder_threads=args.num_dataset_builder_threads, + path_to_cache=args.data_cache_path, + mmap_bin_files=args.mmap_bin_files, + tokenizer=tokenizer, + reset_position_ids=args.reset_position_ids, + reset_attention_mask=args.reset_attention_mask, + eod_mask_loss=args.eod_mask_loss, + create_attention_mask=args.create_attention_mask_in_dataloader, + ) + + +def train_valid_test_datasets_provider(train_val_test_num_samples): + """Build the train test and validation datasets. + + Args: + train_val_test_num_samples : A list containing the number of samples in train test and validation. + """ + args = get_args() + + config = core_gpt_dataset_config_from_args(args) + + if args.mock_data: + dataset_type = MockGPTDataset + else: + dataset_type = GPTDataset + + print_rank_0("> building train, validation, and test datasets for GPT ...") + + train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( + dataset_type, train_val_test_num_samples, is_dataset_built_on_rank, config + ).build() + + print_rank_0("> finished creating GPT datasets ...") + + return train_ds, valid_ds, test_ds + + +if __name__ == "__main__": + # Temporary for transition to core datasets + train_valid_test_datasets_provider.is_distributed = True + + pretrain( + train_valid_test_datasets_provider, + model_provider, + ModelType.encoder_or_decoder, + forward_step, + args_defaults={"tokenizer_type": "GPT2BPETokenizer"}, + extra_args_provider=add_modelopt_args, + ) diff --git a/examples/export/ptq_and_trtllm_export/README.md b/examples/export/ptq_and_trtllm_export/README.md index c5255f7cc..abaa0d764 100644 --- a/examples/export/ptq_and_trtllm_export/README.md +++ b/examples/export/ptq_and_trtllm_export/README.md @@ -250,4 +250,46 @@ python examples/export/ptq_and_trtllm_export/trtllm_text_generation.py --tokeniz python examples/export/ptq_and_trtllm_export/trtllm_text_generation.py --tokenizer meta-llama/Meta-Llama-3.1-8B #For llama-3.1 +``` + + +### Mixtral-8x7B FP8 Quantization and TensorRT-LLM Deployment +First download the nemotron checkpoint from https://catalog.ngc.nvidia.com/orgs/nvidia/teams/nemo/models/mixtral-8x7b-v01, extract the +sharded checkpoint from the `.nemo` tarbal. + +```sh +ngc registry model download-version "nvidia/nemo/mixtral-8x7b-v01:1.0" +cd mixtral-8x7b-v01_v1.0 +tar -xvf mixtral.nemo +cd .. +``` + +Then log in to huggingface so that you can access to model + +> **NOTE:** You need a token generated from huggingface.co/settings/tokens and access to mistralai/Mixtral-8x7B-v0.1 on huggingface + +```sh +pip install -U "huggingface_hub[cli]" +huggingface-cli login +``` + +Now launch the PTQ + TensorRT-LLM checkpoint export script, + +```sh +bash examples/export/ptq_and_trtllm_export/ptq_trtllm_mixtral_8x7b.sh ./mixtral-8x7b-v01_v1.0/ +``` + +Then build TensorRT engine and run text generation example using the newly built TensorRT engine + +```sh +export trtllm_options=" \ + --checkpoint_dir /tmp/trtllm_ckpt \ + --output_dir /tmp/trtllm_engine \ + --max_input_len 2048 \ + --max_seq_len 512 \ + --max_batch_size 8 " + +trtllm-build ${trtllm_options} + +python examples/export/ptq_and_trtllm_export/trtllm_text_generation.py --tokenizer mistralai/Mixtral-8x7B-v0.1 ``` \ No newline at end of file diff --git a/examples/export/ptq_and_trtllm_export/ptq_trtllm_mixtral_8x7b.sh b/examples/export/ptq_and_trtllm_export/ptq_trtllm_mixtral_8x7b.sh new file mode 100644 index 000000000..d2a4edee4 --- /dev/null +++ b/examples/export/ptq_and_trtllm_export/ptq_trtllm_mixtral_8x7b.sh @@ -0,0 +1,84 @@ +#!/bin/bash +set -e + +DEFAULT_NAME="/checkpoints/Mistral-NeMo-12B-Base" +NAME="${1:-$DEFAULT_NAME}" + +DEFAULT_QUANT_CFG="fp8" +QUANT_CFG="${2:-$DEFAULT_QUANT_CFG}" + +# NOTE: UNFUSED ATTENTION MUST BE USED TO AVOID ADDITIONAL STATE_DICT KEY MISMATCH. +export NVTE_FLASH_ATTN=0 +export NVTE_FUSED_ATTN=0 +export NVTE_UNFUSED_ATTN=1 + +# CHANGE THE FOLLOWING IF YOU MOUNT YOUR DATA AND CHECKPOINTS DIFFERENTLY IN THE CONTAINER. +TP="8" +INFERENCE_TP=${TP} +DECODER_TYPE="llama" +CHECKPOINT_LOAD_DIR="${NAME}" + +if [ "$QUANT_CFG" = "int4_awq" ]; then + INFERENCE_TP="1" +fi + +additional_options=" \ + --export-quant-cfg ${QUANT_CFG} \ + --export-legacy-megatron \ + --export-te-mcore-model \ + --calib-batch-size 8 \ + --decoder ${DECODER_TYPE} \ + --export-dir /tmp/trtllm_ckpt \ + --inference-tensor-parallel ${INFERENCE_TP} " + +# DO NOT CHANGE THE SETTING BELOW UNLESS YOU KNOW WHAT YOU ARE DOING!!! +export CUDA_DEVICE_MAX_CONNECTIONS=1 + +options=" \ + --untie-embeddings-and-output-weights \ + --no-masked-softmax-fusion \ + --no-position-embedding \ + --use-mcore-models \ + --disable-bias-linear \ + --rotary-percent 1.0 \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --tensor-model-parallel-size ${TP} \ + --pipeline-model-parallel-size 1 \ + --num-layers 32 \ + --hidden-size 4096 \ + --ffn-hidden-size 14336 \ + --num-attention-heads 32 \ + --seq-length 4096 \ + --kv-channels 128 \ + --normalization RMSNorm \ + --swiglu \ + --num-query-groups 8 \ + --num-experts 8 \ + --moe-router-topk 2 \ + --moe-aux-loss-coeff 1e-2 \ + --moe-router-load-balancing-type aux_loss \ + --group-query-attention \ + --position-embedding-type rope \ + --no-rope-fusion \ + --max-position-embeddings 32768 \ + --micro-batch-size 1 \ + --tokenizer-type HuggingFaceTokenizer \ + --tiktoken-pattern v2 \ + --tokenizer-model mistralai/Mixtral-8x7B-Instruct-v0.1 \ + --save-interval 1000000 \ + --load ${CHECKPOINT_LOAD_DIR} \ + --bf16 \ + --rotary-base 1000000 \ + --use-dist-ckpt" + +# Precompile CUDA extentions +python -c "import modelopt.torch.quantization.extensions as ext; print(ext.cuda_ext); print(ext.cuda_ext_fp8)" + +# Acquire launch configuration where variable launch_config will be set +launch_config="--nproc_per_node=${TP}" + +# Launch multi-process with torchrun +torchrun ${launch_config} examples/export/ptq_and_trtllm_export/text_generation_ptq.py ${options} ${additional_options} + + diff --git a/examples/export/ptq_and_trtllm_export/text_generation_ptq.py b/examples/export/ptq_and_trtllm_export/text_generation_ptq.py index 340c9c90f..c915cec79 100644 --- a/examples/export/ptq_and_trtllm_export/text_generation_ptq.py +++ b/examples/export/ptq_and_trtllm_export/text_generation_ptq.py @@ -6,7 +6,7 @@ import sys from pathlib import Path -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../../"))) +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../"))) import modelopt.torch.quantization as mtq import torch @@ -120,6 +120,9 @@ def get_calib_dataloader( print_rank_0("WARNING: Forcing exit_on_missing_checkpoint to True for text generation.") args.exit_on_missing_checkpoint = True + if hasattr(args, 'moe_grouped_gemm') and args.moe_grouped_gemm == True: + print_rank_0("WARNING: Forcing moe_grouped_gemm to False for PTQ and export.") + args.moe_grouped_gemm = False # Set up model and load checkpoint # [ModelOpt]: make sure that output logits are allgathered. @@ -168,7 +171,7 @@ def hf_dataset_forword_loop_func(model): model, prompts=prompts, tokens_to_generate=0, - return_output_log_probs=True, + return_output_log_probs=False, temperature=1.0, ) else: @@ -216,3 +219,4 @@ def hf_dataset_forword_loop_func(model): ) print_rank_0(f"TensorRT-LLM checkpoints saved to {args.export_dir}") + torch.distributed.barrier() diff --git a/examples/multimodal/Dockerfile b/examples/multimodal/Dockerfile index 0ea6edda3..7b54091ae 100644 --- a/examples/multimodal/Dockerfile +++ b/examples/multimodal/Dockerfile @@ -10,17 +10,17 @@ RUN apt update && \ bash \ git \ vim \ + tmux \ python-is-python3 \ default-jre RUN pip install --upgrade pip -RUN pip install einops einops-exts sentencepiece braceexpand webdataset -RUN pip install transformers datasets +RUN pip install einops einops-exts sentencepiece braceexpand webdataset packaging +RUN pip install transformers datasets accelerate timm RUN pip install pytest-cov pytest_mock nltk wrapt RUN pip install zarr "tensorstore==0.1.45" -RUN pip install git+https://github.com/fanshiqing/grouped_gemm@main RUN pip install black isort click==8.0.2 -RUN pip install pycocoevalcap megatron-energon +RUN pip install pycocoevalcap megatron-energon mistral-common tiktoken RUN pip install git+https://github.com/openai/CLIP.git # Use --no-deps for the following to avoid outdated and unnecessary dependencies. -RUN pip install open-flamingo[eval] --no-deps +RUN pip install open_clip_torch open-flamingo[eval] --no-deps diff --git a/examples/multimodal/README.md b/examples/multimodal/README.md index 00be3b46b..62e47567b 100644 --- a/examples/multimodal/README.md +++ b/examples/multimodal/README.md @@ -16,14 +16,15 @@ You can build a docker container using `examples/multimodal/Dockerfile` to run t ### Language model -Follow the instructions in `megatron-lm/docs/llama_mistral.md` to download weights for Mistral-7B-Instruct-v0.3 and convert to mcore format with tensor parallel size 4 +Follow the instructions in [Mistral](../../docs/llama_mistral.md#mistral-7b) to download weights for Mistral-7B-Instruct-v0.3 (Base or Instruct) from HuggingFace and convert to mcore format with tensor parallel size 4. +Please use the tokenizer from HuggingFace. ### Vision model This example uses the OpenAI CLIP `ViT-L/14@336px` Vision model. To download the weights from OpenAI and convert them to a format that can be loaded in megatron, please run the following: ``` -python examples/multimodal/clip_converter.py --download-root /some/download/folder --output /some/output/folder --tensor-parallel-size 4 --use-te +python examples/multimodal/model_converter/clip_converter.py --download-root /some/download/folder --output /some/output/folder --tensor-parallel-size 4 --use-te ``` ### Combined model checkpoint @@ -31,7 +32,7 @@ python examples/multimodal/clip_converter.py --download-root /some/download/fold Update the paths to point to the mcore converted CLIP and Mistral models and run the following script to combine the Mistral and CLIP models into a single multimodal checkpoint folder: ``` -examples/multimodal/combine_mistral_clip.sh /path/to/mistral/model /path/to/clip/model /output/dir +examples/multimodal/combine_lm_vision_checkpoints.sh /path/to/mistral/model /path/to/clip/model /output/dir ``` ## Training @@ -57,7 +58,7 @@ examples/multimodal/combine_mistral_clip.sh /path/to/mistral/model /path/to/clip ``` cd /wds - energon ./ + energon prepare ./ ``` select the following values for the presented options: @@ -112,7 +113,7 @@ Run the following script: ``` examples/multimodal/text_generation_mistral_clip.sh --input-image-path /path/to/input/images --output-path /some/output/directory \ - --model-path /path/to/model.pt --tokenizer-path /path/to/tokenizer.model --gt-path /path/to/groundtruth/file --task generation-task-name + --model-path /path/to/model.pt --tokenizer-path /path/to/tokenizer/ --gt-path /path/to/groundtruth/file --task generation-task-name ``` where `--task generation-task-name` is the name of the evaluation benchmark such as `captioning` or `MMMU`. diff --git a/examples/multimodal/combine_lm_vision_checkpoints.sh b/examples/multimodal/combine_lm_vision_checkpoints.sh new file mode 100755 index 000000000..52de16ecd --- /dev/null +++ b/examples/multimodal/combine_lm_vision_checkpoints.sh @@ -0,0 +1,57 @@ +#/bin/bash +MCORE_LM=$1 # +MCORE_VISION=$2 # +OUTPUT_DIR=$3 # +MODEL_TYPE=$4 # Model type. Default: Mistral CLIP example. + +if [[ $MODEL_TYPE == "nvlm" ]]; then + # NVLM TP=8 + python examples/multimodal/combine_state_dicts.py \ + --input \ + ${MCORE_LM}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_04/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_04/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_05/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_05/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_06/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_06/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_07/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_07/model_optim_rng.pt \ + --prefixes language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model \ + --output \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_04/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_05/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_06/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_07/model_optim_rng.pt +else + # Mistral CLIP example TP=4. + python examples/multimodal/combine_state_dicts.py \ + --input \ + ${MCORE_LM}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + --prefixes language_model vision_model language_model vision_model language_model vision_model language_model vision_model \ + --output \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_03/model_optim_rng.pt +fi + +echo 1 > ${OUTPUT_DIR}/latest_checkpointed_iteration.txt diff --git a/examples/multimodal/combine_mistral_clip.sh b/examples/multimodal/combine_mistral_clip.sh deleted file mode 100755 index ff866c7f7..000000000 --- a/examples/multimodal/combine_mistral_clip.sh +++ /dev/null @@ -1,23 +0,0 @@ -#/bin/bash -MCORE_MISTRAL=$1 # -MCORE_CLIP=$2 # -OUTPUT_DIR=$3 # - -python examples/multimodal/combine_state_dicts.py \ - --input \ - ${MCORE_MISTRAL}/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${MCORE_CLIP}/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${MCORE_MISTRAL}/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${MCORE_CLIP}/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${MCORE_MISTRAL}/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${MCORE_CLIP}/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${MCORE_MISTRAL}/iter_0000001/mp_rank_03/model_optim_rng.pt \ - ${MCORE_CLIP}/iter_0000001/mp_rank_03/model_optim_rng.pt \ - --prefixes language_model vision_model language_model vision_model language_model vision_model language_model vision_model \ - --output \ - ${OUTPUT_DIR}/mistral_instruct_clip336_tp4_combined_mcore/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${OUTPUT_DIR}/mistral_instruct_clip336_tp4_combined_mcore/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${OUTPUT_DIR}/mistral_instruct_clip336_tp4_combined_mcore/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${OUTPUT_DIR}/mistral_instruct_clip336_tp4_combined_mcore/iter_0000001/mp_rank_03/model_optim_rng.pt - -echo 1 > ${OUTPUT_DIR}/mistral_instruct_clip336_tp4_combined_mcore/latest_checkpointed_iteration.txt diff --git a/examples/multimodal/config.py b/examples/multimodal/config.py index cf48b131a..343fcd589 100644 --- a/examples/multimodal/config.py +++ b/examples/multimodal/config.py @@ -60,6 +60,35 @@ def get_language_model_config(config): config.apply_rope_fusion = False config.attention_softmax_in_fp32 = True config.ffn_hidden_size = 14336 + elif config.language_model_type == "yi-34b": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 20480 + elif config.language_model_type == "qwen2.0_72B": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.add_qkv_bias = True + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 29568 + else: + raise ValueError(f"unknown language model type {config.language_model_type}") return config @@ -107,6 +136,29 @@ def get_vision_model_config(config, apply_query_key_layer_scaling): config.apply_rope_fusion = False config.qk_layernorm = False config.layernorm_epsilon = 1e-6 + elif config.vision_model_type == "internvit": + config.num_layers = 45 + config.num_attention_heads = 32 # Padded for TP=8. + config.num_query_groups = 32 # Padded for TP=8. + config.kv_channels = 128 + config.add_bias_linear = True + config.add_qkv_bias = False + config.hidden_size = 3200 + config.hidden_dropout = 0.0 + config.attention_dropout = 0.0 + config.ffn_hidden_size = 12800 + config.gated_linear_unit = False + config.activation_func = torch.nn.functional.gelu + config.layernorm_zero_centered_gamma = False + config.apply_query_key_layer_scaling = apply_query_key_layer_scaling + config.bias_activation_fusion = False + config.bias_dropout_fusion = False + config.attention_softmax_in_fp32 = True + config.normalization = 'RMSNorm' + config.layernorm_epsilon = 1e-6 + config.apply_rope_fusion = False + else: + raise ValueError(f"unknown vision model type {config.vision_model_type}") return config @@ -128,6 +180,17 @@ def get_vision_projection_config(config, hidden_size): elif config.language_model_type == "mistral_7b": config.ffn_hidden_size = 14336 config.activation_func = torch.nn.functional.gelu + config.normalization = None + elif config.language_model_type == "yi-34b": + config.ffn_hidden_size = 20480 + config.normalization = "LayerNorm" + config.activation_func = torch.nn.functional.gelu + elif config.language_model_type == "qwen2.0_72B": + config.ffn_hidden_size = 29568 + config.normalization = "LayerNorm" + config.activation_func = torch.nn.functional.gelu + else: + raise ValueError(f"unknown language model type {config.language_model_type}") return config @@ -151,5 +214,3 @@ class EvaluationConfig: num_partitions: int = 1 partition_id: int = 0 num_samples_per_partition: int = 0 - - prompt_format: str = "mistral" diff --git a/examples/multimodal/conversation.py b/examples/multimodal/conversation.py deleted file mode 100644 index 5139d2033..000000000 --- a/examples/multimodal/conversation.py +++ /dev/null @@ -1,353 +0,0 @@ -# From https://github.com/haotian-liu/LLaVA/blob/c121f0432da27facab705978f83c4ada465e46fd/llava/conversation.py - -import dataclasses -from enum import auto, Enum -from typing import List - - -class SeparatorStyle(Enum): - """Different separator style.""" - SINGLE = auto() - TWO = auto() - MPT = auto() - PLAIN = auto() - LLAMA_2 = auto() - - -@dataclasses.dataclass -class Conversation: - """A class that keeps all conversation history.""" - system: str - roles: List[str] - messages: List[List[str]] - offset: int - sep_style: SeparatorStyle = SeparatorStyle.SINGLE - sep: str = "###" - sep2: str = None - real_sep2: str = None - version: str = "Unknown" - - skip_next: bool = False - - def get_prompt(self): - messages = self.messages - if len(messages) > 0 and type(messages[0][1]) is tuple: - messages = self.messages.copy() - init_role, init_msg = messages[0].copy() - init_msg = init_msg[0].replace("", "").strip() - if 'mmtag' in self.version: - messages[0] = (init_role, init_msg) - messages.insert(0, (self.roles[0], "")) - messages.insert(1, (self.roles[1], "Received.")) - else: - messages[0] = (init_role, "\n" + init_msg) - - if self.sep_style == SeparatorStyle.SINGLE: - ret = self.system + self.sep - for role, message in messages: - if message: - if type(message) is tuple: - message, _, _ = message - ret += role + ": " + message + self.sep - else: - ret += role + ":" - elif self.sep_style == SeparatorStyle.TWO: - seps = [self.sep, self.sep2] - ret = self.system + seps[0] - for i, (role, message) in enumerate(messages): - if message: - if type(message) is tuple: - message, _, _ = message - ret += role + ": " + message + seps[i % 2] - else: - ret += role + ":" - elif self.sep_style == SeparatorStyle.MPT: - ret = self.system + self.sep - for role, message in messages: - if message: - if type(message) is tuple: - message, _, _ = message - ret += role + message + self.sep - else: - ret += role - elif self.sep_style == SeparatorStyle.LLAMA_2: - wrap_sys = lambda msg: f"<>\n{msg}\n<>\n\n" - wrap_inst = lambda msg: f"[INST] {msg} [/INST]" - ret = "" - - for i, (role, message) in enumerate(messages): - if i == 0: - assert message, "first message should not be none" - assert role == self.roles[0], "first message should come from user" - if message: - if type(message) is tuple: - message, _, _ = message - if i == 0: message = wrap_sys(self.system) + message - if i % 2 == 0: - message = wrap_inst(message) - ret += self.sep + message - else: - ret += " " + message + " " + self.sep2 - else: - ret += "" - ret = ret.lstrip(self.sep) - elif self.sep_style == SeparatorStyle.PLAIN: - seps = [self.sep, self.sep2] - ret = self.system - for i, (role, message) in enumerate(messages): - if message: - if type(message) is tuple: - message, _, _ = message - ret += message + seps[i % 2] - else: - ret += "" - else: - raise ValueError(f"Invalid style: {self.sep_style}") - - return ret - - def append_message(self, role, message): - self.messages.append([role, message]) - - def get_images(self, return_pil=False): - images = [] - for i, (role, msg) in enumerate(self.messages[self.offset:]): - if i % 2 == 0: - if type(msg) is tuple: - import base64 - from io import BytesIO - from PIL import Image - msg, image, image_process_mode = msg - if image_process_mode == "Pad": - def expand2square(pil_img, background_color=(122, 116, 104)): - width, height = pil_img.size - if width == height: - return pil_img - elif width > height: - result = Image.new(pil_img.mode, (width, width), background_color) - result.paste(pil_img, (0, (width - height) // 2)) - return result - else: - result = Image.new(pil_img.mode, (height, height), background_color) - result.paste(pil_img, ((height - width) // 2, 0)) - return result - image = expand2square(image) - elif image_process_mode in ["Default", "Crop"]: - pass - elif image_process_mode == "Resize": - image = image.resize((336, 336)) - else: - raise ValueError(f"Invalid image_process_mode: {image_process_mode}") - max_hw, min_hw = max(image.size), min(image.size) - aspect_ratio = max_hw / min_hw - max_len, min_len = 800, 400 - shortest_edge = int(min(max_len / aspect_ratio, min_len, min_hw)) - longest_edge = int(shortest_edge * aspect_ratio) - W, H = image.size - if longest_edge != max(image.size): - if H > W: - H, W = longest_edge, shortest_edge - else: - H, W = shortest_edge, longest_edge - image = image.resize((W, H)) - if return_pil: - images.append(image) - else: - buffered = BytesIO() - image.save(buffered, format="PNG") - img_b64_str = base64.b64encode(buffered.getvalue()).decode() - images.append(img_b64_str) - return images - - def to_gradio_chatbot(self): - ret = [] - for i, (role, msg) in enumerate(self.messages[self.offset:]): - if i % 2 == 0: - if type(msg) is tuple: - import base64 - from io import BytesIO - msg, image, image_process_mode = msg - max_hw, min_hw = max(image.size), min(image.size) - aspect_ratio = max_hw / min_hw - max_len, min_len = 800, 400 - shortest_edge = int(min(max_len / aspect_ratio, min_len, min_hw)) - longest_edge = int(shortest_edge * aspect_ratio) - W, H = image.size - if H > W: - H, W = longest_edge, shortest_edge - else: - H, W = shortest_edge, longest_edge - image = image.resize((W, H)) - buffered = BytesIO() - image.save(buffered, format="JPEG") - img_b64_str = base64.b64encode(buffered.getvalue()).decode() - img_str = f'user upload image' - msg = img_str + msg.replace('', '').strip() - ret.append([msg, None]) - else: - ret.append([msg, None]) - else: - ret[-1][-1] = msg - return ret - - def copy(self): - return Conversation( - system=self.system, - roles=self.roles, - messages=[[x, y] for x, y in self.messages], - offset=self.offset, - sep_style=self.sep_style, - sep=self.sep, - sep2=self.sep2, - real_sep2=self.real_sep2, - version=self.version) - - def dict(self): - if len(self.get_images()) > 0: - return { - "system": self.system, - "roles": self.roles, - "messages": [[x, y[0] if type(y) is tuple else y] for x, y in self.messages], - "offset": self.offset, - "sep": self.sep, - "sep2": self.sep2, - "real_sep2": self.real_sep2 - } - return { - "system": self.system, - "roles": self.roles, - "messages": self.messages, - "offset": self.offset, - "sep": self.sep, - "sep2": self.sep2, - "real_sep2": self.real_sep2 - } - - -conv_mpt = Conversation( - system="""<|im_start|>system -A conversation between a user and an LLM-based AI assistant. The assistant gives helpful and honest answers.""", - roles=("<|im_start|>user\n", "<|im_start|>assistant\n"), - version="mpt", - messages=(), - offset=0, - sep_style=SeparatorStyle.MPT, - sep="<|im_end|>", -) - - -### Used for llava-pretraining -conv_llava_plain = Conversation( - system="", - roles=("", ""), - messages=( - ), - offset=0, - sep_style=SeparatorStyle.PLAIN, - sep="\n", -) - -conv_llava_v0 = Conversation( - system="A chat between a curious human and an artificial intelligence assistant. " - "The assistant gives helpful, detailed, and polite answers to the human's questions.", - roles=("Human", "Assistant"), - messages=( - ), - offset=0, - sep_style=SeparatorStyle.SINGLE, - sep="###", -) - -conv_llava_v0_mmtag = Conversation( - system="A chat between a curious user and an artificial intelligence assistant. " - "The assistant is able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language." - "The visual content will be provided with the following format: visual content.", - roles=("Human", "Assistant"), - messages=( - ), - offset=0, - sep_style=SeparatorStyle.SINGLE, - sep="###", - version="v0_mmtag", -) - -conv_llava_v1 = Conversation( - system="A chat between a curious human and an artificial intelligence assistant. " - "The assistant gives helpful, detailed, and polite answers to the human's questions.", - roles=("USER", "ASSISTANT"), - version="v1", - messages=(), - offset=0, - sep_style=SeparatorStyle.TWO, - sep=" ", - sep2="", -) - -conv_llava_v1_mmtag = Conversation( - system="A chat between a curious user and an artificial intelligence assistant. " - "The assistant is able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language." - "The visual content will be provided with the following format: visual content.", - roles=("USER", "ASSISTANT"), - messages=(), - offset=0, - sep_style=SeparatorStyle.TWO, - sep=" ", - sep2="", - version="v1_mmtag", -) - -chatqa_sft = Conversation( - system="System: This is a chat between a user and an artificial intelligence assistant. " - "The assistant gives helpful, detailed, and polite answers to the user's questions.", - roles=("User", "Assistant"), - version="chatqa", - messages=(), - offset=0, - sep_style=SeparatorStyle.TWO, - sep="\n\n", - sep2="\n\n", - real_sep2="\n\n" -) - -conv_chatml = Conversation( - system="""<|im_start|>system -Answer the questions.""", - roles=("<|im_start|>user\n", "<|im_start|>assistant\n"), - version="mpt", - messages=(), - offset=0, - sep_style=SeparatorStyle.MPT, - sep="<|im_end|>", -) - -mistral_instruct = Conversation( - system="", - roles=("user", "assistant"), - version="mpt", - messages=(), - offset=0, - sep_style=SeparatorStyle.LLAMA_2, - sep="", - sep2="", -) - -llama3_instruct = Conversation( - system="<|start_header_id|>system<|end_header_id|>\n\nAnswer the questions.", - roles=("<|start_header_id|>user<|end_header_id|>\n\n", "<|start_header_id|>assistant<|end_header_id|>\n\n"), - version="mpt", - messages=(), - offset=0, - sep_style=SeparatorStyle.MPT, - sep="<|eot_id|>", -) - -conv_templates = { - "plain": conv_llava_plain, - "v0_plain": conv_llava_plain, - "llava_v0": conv_llava_v0, - "v0_mmtag": conv_llava_v0_mmtag, - "llava_v1": conv_llava_v1, - "v1_mmtag": conv_llava_v1_mmtag, - - "mpt": conv_mpt, -} diff --git a/examples/multimodal/dataloader_provider.py b/examples/multimodal/dataloader_provider.py index 4bd1b29e5..d684c690a 100644 --- a/examples/multimodal/dataloader_provider.py +++ b/examples/multimodal/dataloader_provider.py @@ -15,23 +15,24 @@ get_val_datasets, ) from megatron.core.num_microbatches_calculator import get_num_microbatches -from megatron.core.parallel_state import get_tensor_model_parallel_rank -from megatron.training import get_args, print_rank_0 +from megatron.core.parallel_state import get_tensor_model_parallel_rank, get_pipeline_model_parallel_world_size, get_pipeline_model_parallel_rank +from megatron.training import get_args from megatron.training.checkpointing import get_checkpoint_name def datasets_provider(worker_config=None): """Create multimodal train, validation and test datasets.""" args = get_args() + dname = args.data_path[0] if type(args.data_path) is list else args.data_path train_dataset = get_train_dataset( dname, batch_size=args.micro_batch_size, task_encoder=TaskEncoder(), worker_config=worker_config, - virtual_epoch_length=1000, - max_samples_per_sequence=100, - shuffle_buffer_size=100, + max_samples_per_sequence=None, + shuffle_buffer_size=None, + packing_buffer_size=args.packing_buffer_size, handler=print_error_handler, image_decode="pil", ) @@ -43,6 +44,7 @@ def datasets_provider(worker_config=None): # limit=args.eval_iters * get_num_microbatches(), task_encoder=TaskEncoder(), worker_config=worker_config, + packing_buffer_size=args.packing_buffer_size, handler=print_error_handler, image_decode="pil", ) @@ -61,13 +63,45 @@ def datasets_provider(worker_config=None): return train_dataset, val_datasets_without_source_datasets, None +def is_first_or_last_stage(pp_size, encoder_pipeline_model_parallel_size): + """Check if the current pipeline parallel stage is the first or last stage.""" + if pp_size == 1: # No pipeline parallelism. + return True + + is_valid_rank = False + + if encoder_pipeline_model_parallel_size == 0: + # No separate pipeline stage for the vision model. Run the dataloader on the first and last pipeline stage. + pp_rank = get_pipeline_model_parallel_rank() + is_valid_rank = pp_rank in (0, pp_size-1) + elif encoder_pipeline_model_parallel_size == 1: + # Separate pipeline stage for the vision model. Run the dataloader on the first vision and LM stage and last LM stage. + is_valid_rank = pp_rank in (0, 1, pp_size-1) + else: + raise NotImplementedError("encoder-pipeline-model-parallel-size > 1 is not supported yet") + + return is_valid_rank + + +def is_dataloader_rank(encoder_pipeline_model_parallel_size): + """Check if we should have the dataloader on this tensor and pipeline parallel rank.""" + # Run dataloader only on the first tensor parallel rank (will be broadcasted to others). + is_first_rank = get_tensor_model_parallel_rank() == 0 + + pp_size = get_pipeline_model_parallel_world_size() + is_first_rank = is_first_rank and is_first_or_last_stage(pp_size, encoder_pipeline_model_parallel_size) + + return is_first_rank + + def train_valid_test_dataloaders_provider(train_val_test_num_samples): """Build multimodal train, validation and test dataloaders.""" - if get_tensor_model_parallel_rank() != 0: - return None, None, None - args = get_args() + # Dataloader is only on specific ranks. + if not is_dataloader_rank(args.encoder_pipeline_model_parallel_size): + return None, None, None + worker_debug_path = None worker_log_level = 0 @@ -92,15 +126,18 @@ def train_valid_test_dataloaders_provider(train_val_test_num_samples): data_save_name = get_checkpoint_name( args.dataloader_save, args.iteration, + pipeline_rank=0, # Only the first pipeline parallel rank stores the dataloader checkpoint. basename=f"train_dataloader_dprank{dp_rank:03d}.pt", ) if os.path.exists(data_save_name): try: dataset_state_dict = torch.load(data_save_name, map_location="cpu") train_dataloader.restore_state_rank(dataset_state_dict["dataloader_state_dict"]) - print_rank_0(f"restored dataset state from {data_save_name}") + print(f"restored dataset state from {data_save_name}") except Exception as e: - print_rank_0("loading dataloader checkpoint failed. Skipping. " + str(e)) + print("loading dataset state failed. Skipping. " + str(e)) + else: + print(f"dataset state {data_save_name} does not exist") valid_dataloader = [ EnergonDataloader(get_loader(valid_ds, worker_config=worker_config)) diff --git a/examples/multimodal/dataset_helpers.py b/examples/multimodal/dataset_helpers.py index 6468eef9b..de76f8e45 100644 --- a/examples/multimodal/dataset_helpers.py +++ b/examples/multimodal/dataset_helpers.py @@ -1,295 +1,239 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import bisect import dataclasses -import itertools import json -import random -import re import sys import traceback from dataclasses import dataclass -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Dict, List, Optional, Tuple, Union from image_processing import get_visual_transform -import conversation as conversation_lib import numpy as np import torch -from PIL import Image, ImageDraw -from torchvision import transforms as T -from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, IMAGE_TOKEN_INDEX +from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, IMAGE_TOKEN +from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings from megatron.energon import ( Batch, CaptioningSample, DefaultTaskEncoder, OCRSample, + Sample, SimilarityInterleavedSample, VQASample, + MultiChoiceVQASample ) -from megatron.energon.transforms import CustomTransform, MergeTransform -from megatron.training import get_args -from megatron.training.tokenizer import build_tokenizer +from megatron.energon.task_encoder.base import stateless +from megatron.training import get_args, get_tokenizer -class RandomResize(CustomTransform): - """Resizes the image by a random scale factor in the given interval, but at most max_size""" +@dataclass +class ImageTaskSample(Sample): + __key__: str + __restore_key__: Tuple[Union[str, int, tuple], ...] + __subflavor__: Dict + __subflavors__: Dict + # (c, h, w) + imgs: List[torch.Tensor] + num_tiles: List[int] + tokens: torch.Tensor + total_len: int # Total token count in the sample, including text and image tokens + labels: torch.Tensor = None - def __init__(self, min_scale: float, max_scale: float, max_size: int): - self._min_scale = min_scale - self._max_scale = max_scale - self._max_size = max_size - def apply_transform(self, matrix: np.ndarray, dst_size: np.ndarray) -> Tuple[Any, Any, Any]: - scale = random.uniform(self._min_scale, self._max_scale) - new_size = tuple(int(x * scale) for x in dst_size) +@dataclass +class ImageTaskSamplePacked(Sample): + """Dataclass to store a single packed sample (not a batch). + + P = Number of sub-samples in the packed sample + seq_len = Total sequence length + num_imgs = Number of images across all samples in the packed sample + """ + + __key__: str # Sample name + __restore_key__: Tuple[Union[str, int, tuple], ...] + __subflavor__: Dict # Sample metadata. Deprecated. + __subflavors__: Dict # Sample metadata. + tokens: torch.Tensor # Input tokens packed into a single tensor (seq_len,) + labels: torch.Tensor # Target tokens packed into a single tensor (seq_len,) + imgs: List[torch.Tensor] # Input images + num_tiles: List[int] # Number of tiles for each image of each sample (num_imgs) + max_length: int # Maximum length across sub-samples. + cu_lengths: List[int] # Cumulative length of each sub-sample in this packed sample incl. text and image tokens (P,) - if max(new_size) > self._max_size: - scale = self._max_size / max(new_size) - new_size = tuple(int(x * scale) for x in dst_size) - matrix = self.scale(scale, scale) @ matrix - dst_size = np.array(new_size, dtype=dst_size.dtype) +# Typing for the resulting batch data after encode_batch() +@dataclass +class ImageTaskBatchPacked(Batch): + """Dataclass to store a batch of packed samples. - return matrix, dst_size, (self.__class__.__name__, scale) + N = Batch size + P = Number of samples in the packed sample + seq_len = Maximum sequence length + num_imgs = Number of images across all samples in the packed sample + """ + __key__: List[str] # Sample names + __restore_key__: Tuple[Union[str, int, tuple], ...] + __subflavor__: Dict # Sample metadata. Deprecated. + __subflavors__: List[Dict] # Sample metadatas. + tokens: torch.Tensor # Input tokens packed and padded (N, seq_len) + labels: torch.Tensor # Target tokens packed and padded (N, seq_len) + imgs: torch.Tensor # All image tiles stacked into a single tensor (num_tiles, C, H, W) + num_tiles: List[List[int]] # Number of tiles per image (N, num_imgs) + max_lengths: List[int] # Maximum length across sub-samples (N,) + cu_lengths: List[List[int]] # Cumulative length of each sub-sample in each packed sample of the batch (N, P) -class RandomResizeLongEdge(CustomTransform): - """Resizes the image's longer edge to a random length between min_size and max_size pixels.""" - def __init__(self, min_size: int, max_size: int): - self._min_size = min_size - self._max_size = max_size +# Based on https://github.com/hiyouga/LLaMA-Factory/blob/641d0dab08d96a93c34657742213d8994d9ed476/src/llamafactory/data/processors/processor_utils.py#L19 +# Copyright (c) 2024 LLaMA-Factory. Apache license 2.0. +def search_for_fit(numbers: List[int], capacity: int) -> int: + """Finds the index of largest number that fits into the knapsack with the given capacity.""" + index = bisect.bisect(numbers, capacity) + return -1 if index == 0 else (index - 1) - def apply_transform(self, matrix: np.ndarray, dst_size: np.ndarray) -> Tuple[Any, Any, Any]: - new_long = random.randint(self._min_size, self._max_size) - if dst_size[0] > dst_size[1]: # h > w - new_w, new_h = int(new_long * dst_size[1] / dst_size[0]), new_long - else: # w > h - new_w, new_h = new_long, int(new_long * dst_size[0] / dst_size[1]) - new_size = (new_h, new_w) - matrix = self.scale(new_w / dst_size[1], new_h / dst_size[0]) @ matrix - dst_size = np.array(new_size, dtype=dst_size.dtype) +# Based on https://github.com/hiyouga/LLaMA-Factory/blob/641d0dab08d96a93c34657742213d8994d9ed476/src/llamafactory/data/processors/processor_utils.py#L27 +# Copyright (c) 2024 LLaMA-Factory. Apache license 2.0. +def greedy_knapsack(item_sizes: List[int], samples: List, max_capacity: int) -> List: + """Greedy algorithm with binary search for the knapsack problem. - return matrix, dst_size, (self.__class__.__name__, new_size) + Pack as many samples as possible given a maximum capacity and capacities of individual samples. + Used if sequence packing is enabled. + """ + assert len(item_sizes) == len(samples), "sample lengths and samples must have the same length." + knapsacks = [] -class RandomPad(CustomTransform): - """Pads the image to the given size, randomly choosing the position of the image within the new larger image. - If the image is already larger than the given size, it will not be padded in that direction(s).""" + if len(item_sizes) == 0: + return knapsacks - def __init__(self, size: Tuple[int, int]): - self._new_size = size # h, w + # Sort sample lengths and samples together. + sorted_item_sizes, sorted_samples = zip(*sorted(zip(item_sizes, samples), key=lambda x: x[0])) + sorted_item_sizes = list(sorted_item_sizes) + sorted_samples = list(sorted_samples) - def apply_transform(self, matrix: np.ndarray, dst_size: np.ndarray) -> Tuple[Any, Any, Any]: - h_pad = max(self._new_size[0] - dst_size[0], 0) - w_pad = max(self._new_size[1] - dst_size[1], 0) + # Check if all samples fit in the knapsack capacity. + if sorted_item_sizes[-1] > max_capacity: + raise ValueError(f"knapsack: A sample is larger {sorted_item_sizes[-1]} than the max_sequence_length {max_capacity}.") - if h_pad == 0 and w_pad == 0: - return matrix, dst_size, (self.__class__.__name__, None) - else: - # TODO: fix me - # top = random.randint(0, h_pad) - # left = random.randint(0, w_pad) - top = 0 - left = 0 - - matrix = self.translate(left, top) @ matrix - dst_size = np.array(self._new_size, dtype=dst_size.dtype) - return matrix, dst_size, (self.__class__.__name__, (top, left)) - - -def _get_ocr_document_visual_transform(IMG_H=1024, IMG_W=1024): - document_visual_transform = T.Compose( - [ - MergeTransform( - [ - # T.RandomResizedCrop(size=FINAL_SIZE, scale=(0.5, 1.0), ratio=(0.8, 1.2)), - RandomResizeLongEdge(960, 1008), # Note: 1008 comes from list(range(960, 1024, 16))[-1] - T.RandomRotation(5, interpolation=T.InterpolationMode.BILINEAR), - T.RandomPerspective(distortion_scale=0.1, p=0.1), - RandomPad((IMG_H, IMG_W)), - ] - ), - T.ColorJitter(brightness=(0.8, 1.2), contrast=(0.7, 1.0)), - T.RandomGrayscale(p=0.5), - T.RandomInvert(p=0.5), - T.RandomAdjustSharpness(sharpness_factor=0.0, p=0.5), - T.RandomAdjustSharpness(sharpness_factor=2.0, p=0.5), - # LogImage(), - # T.ToTensor(), - # T.Normalize(IMAGE_MEAN, IMAGE_STD), - ] - ) - return document_visual_transform - -def _get_ocr_document_identity_transform(IMG_H=1024, IMG_W=1024): - long_edge = max(IMG_H, IMG_W) - document_identity_transform = T.Compose( - [ - MergeTransform( - [ - RandomResizeLongEdge(long_edge, long_edge), - RandomPad((long_edge, long_edge)), - ] - ) - ] - ) - return document_identity_transform - -def _get_ocr_paragraph_visual_transform(IMG_H=1024, IMG_W=1024): - paragraph_visual_transform = T.Compose( - [ - MergeTransform( - [ - # T.RandomResizedCrop(size=FINAL_SIZE, scale=(0.5, 1.0), ratio=(0.8, 1.2)), - RandomResize(0.5, 2.0, min(IMG_H, IMG_W)), #FINAL_SIZE), - T.RandomRotation(1, interpolation=T.InterpolationMode.BILINEAR), - T.RandomPerspective(distortion_scale=0.1, p=0.1), - RandomPad((IMG_H, IMG_W)), - ] - ), - T.ColorJitter(brightness=(0.8, 1.2), contrast=(0.7, 1.0)), - T.RandomGrayscale(p=0.5), - T.RandomInvert(p=0.5), - # T.RandomAdjustSharpness(sharpness_factor=0.0, p=0.5), - # T.RandomAdjustSharpness(sharpness_factor=2.0, p=0.5), - # LogImage(), - # T.ToTensor(), - # T.Normalize(IMAGE_MEAN, IMAGE_STD), - ] - ) - return paragraph_visual_transform + while sorted_item_sizes: + current_knapsack = [] + remaining_capacity = max_capacity -# Type for intermediate batch, after batch() -@dataclass -class ImageTaskSample: - __key__: str - __subflavors__: Dict - # (c, h, w) - imgs: List[torch.Tensor] - num_tiles: List[int] - text: np.ndarray - prompt_len: np.int64 - target: torch.Tensor = None + while True: + idx = search_for_fit(sorted_item_sizes, remaining_capacity) + if idx == -1: + break # Can't fit more samples. + remaining_capacity -= sorted_item_sizes[idx] -# Typing for the resulting batch data after encode_batch() -@dataclass -class ImageTaskBatch(Batch): - __keys__: List[str] - __subflavors__: List[Dict] - # (num_tiles, c, h, w) - imgs: torch.Tensor - num_tiles: List[int] - # (n, seq_len) - text: torch.Tensor - # (n, 1) - prompt_len: torch.Tensor - # (n, seq_len) - target: torch.Tensor - -class IdentitySplitter(object): - def tokenize(self, *text): - return text - -class Tokenizer: - def __init__(self): - - args = get_args() - self.args = args - - self.initializer() - - def initializer(self): - # Use Encoder class as a container for global data - Tokenizer.tokenizer = build_tokenizer(self.args) - if hasattr(Tokenizer.tokenizer, 'eod'): - self.eod_token = Tokenizer.tokenizer.eod - elif hasattr(Tokenizer.tokenizer, 'eos_id'): - self.eod_token = Tokenizer.tokenizer.eos_id - else: - raise AttributeError('No eod token found in Tokenizer') - self.split_token = 313131 - - if ( - hasattr(self.args, "split_sentences") and self.args.split_sentences - ): # default false - if not nltk_available: - print("NLTK is not available to split sentences.") - exit() - library = "tokenizers/punkt/{}.pickle".format("english") - # print("loading: " + library) - splitter = nltk.load(library) - if self.args.keep_newlines: - # this prevents punkt from eating newlines after sentences - Tokenizer.splitter = nltk.tokenize.punkt.PunktSentenceTokenizer( - train_text=splitter._params, lang_vars=CustomLanguageVars() - ) - else: - Tokenizer.splitter = splitter - else: - Tokenizer.splitter = IdentitySplitter() + sorted_item_sizes.pop(idx) + sample = sorted_samples.pop(idx) + current_knapsack.append(sample) + + knapsacks.append(current_knapsack) - def __call__(self, text: str, padded: bool = True): # -> torch.Tensor: - sentence = Tokenizer.splitter.tokenize(text)[0] - sentence = Tokenizer.tokenizer.tokenize(sentence) - return sentence + return knapsacks -class TaskEncoder(DefaultTaskEncoder[OCRSample, OCRSample, ImageTaskBatch, dict]): - """A simple task encoder for captioning.""" +class TaskEncoder(DefaultTaskEncoder[OCRSample, OCRSample, ImageTaskBatchPacked, dict]): + """A simple task encoder for VLMs.""" def __init__( self ): - # Specify the batch_type for default batching (batching is performed here "manually" by - # overwriting the `batch` method) super().__init__() self.args = get_args() - self.tokenizer = Tokenizer() - self.manual_prompts = json.load(open(self.args.prompt_path)) - self.seq_len = self.args.dataloader_seq_length + self.tokenizer = get_tokenizer() + with open(self.args.prompt_path, "r") as f: + self.manual_prompts = json.load(f) + self.dataloader_seq_length = self.args.dataloader_seq_length # Always return samples of this length. + self.packing_seq_length = self.args.packing_seq_length # Packing sequence length, if packing is enabled. + self.is_packing_enabled = self.args.packing_buffer_size is not None and self.args.packing_buffer_size > 0 + + if self.dataloader_seq_length and self.packing_seq_length: + assert self.dataloader_seq_length >= self.packing_seq_length, "dataloader sequence length must be greater than or equal to the packing sequence length" + + if self.is_packing_enabled: + assert self.packing_seq_length > 0, "packing sequence length must be set" + + self.num_image_embeddings_per_tile = get_num_image_embeddings( + self.args.img_h, + self.args.img_w, + self.args.patch_dim, + self.args.vision_model_type, + self.args.disable_vision_class_token, + 1, + self.args.pixel_shuffle, + self.args.use_tile_tags, + ) self.txt_to_token_dict = {} self.img_h, self.img_w = self.args.img_h, self.args.img_w - self.ocr_document_visual_transform = _get_ocr_document_visual_transform(self.img_h, self.img_w) - self.ocr_document_identity_transform = _get_ocr_document_identity_transform(self.img_h, self.img_w) - self.ocr_paragraph_visual_transform = _get_ocr_paragraph_visual_transform(self.img_h, self.img_w) + def _get_total_seq_length(self, input_ids, num_tiles): + """Calculate expected sequence length given text tokens length and number of tiles.""" + total_num_images = len(num_tiles) + total_num_tiles = sum(num_tiles) + total_len = len(input_ids) + total_num_tiles * self.num_image_embeddings_per_tile - total_num_images + return total_len + + def _truncate_for_packing(self, input_ids, target, num_tiles): + """Truncate tokens and labels if they exceed packing sequence length.""" + total_num_images = len(num_tiles) + total_num_tiles = sum(num_tiles) + total_img_embeddings_len = total_num_tiles * self.num_image_embeddings_per_tile + max_text_tokens = self.packing_seq_length - total_img_embeddings_len + total_num_images + + input_ids = input_ids[:max_text_tokens] + target = target[:max_text_tokens] + + # If truncate causes all labels to be ignored, then skip the sample + if (target == IGNORE_INDEX).all(): + raise ValueError(f"all targets will be ignored after truncation: {input_ids}") + + return input_ids, target + @stateless(restore_seeds=True) def encode_sample(self, sample: Union[CaptioningSample, OCRSample, VQASample, SimilarityInterleavedSample]): if isinstance(sample, OCRSample): - yield self.encode_ocr(sample) + if "pdfa" in sample.__key__: + yield self.combined_ocr_encoder(sample, task_type='encode_pdf') + elif "multi" in sample.__key__: + yield self.combined_ocr_encoder(sample, task_type='_encode_ocr') + else: + yield self.combined_ocr_encoder(sample, task_type='encode_ocr_ref') elif isinstance(sample, CaptioningSample): yield self.encode_captioning(sample) elif isinstance(sample, VQASample): - is_llava_training = sample.__subflavors__['is_llava_training'] if 'is_llava_training' in sample.__subflavors__ else False + is_llava_training = sample.__subflavors__["is_llava_training"] if "is_llava_training" in sample.__subflavors__ else False if "llava" in sample.__key__ or is_llava_training: yield self.encode_llava_pretrain(sample) else: - yield self.encode_vqa(sample) + yield self.encode_any_single_turn_vqa(sample) elif isinstance(sample, SimilarityInterleavedSample): - if "llava" or "video" in sample.__key__: - yield self.encode_llava_sft(sample) - else: - raise NotImplementedError('Sample format not supported') + yield self.encode_llava_sft(sample) + elif isinstance(sample, MultiChoiceVQASample): + yield self.encode_any_single_turn_vqa(sample) else: - raise NotImplementedError('Sample format not supported') + raise NotImplementedError("Sample format not supported", sample) def encode_captioning(self, sample: CaptioningSample): + """Encode CaptioningSample.""" augment = sample.__subflavors__.get("augmentation") - conv_format = sample.__subflavors__['conv_format'] if 'conv_format' in sample.__subflavors__ else 'mistral' imgs = get_visual_transform( sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, + self.args.vision_model_type, ) num_tiles = [len(imgs)] - prompt_list = self.manual_prompts["CaptioningPretraining"]["llava"] + prompt_list = self.manual_prompts["CaptioningPretraining"]["raw"] prompt_idx = np.random.randint(len(prompt_list)) cur_prompt = prompt_list[prompt_idx] @@ -302,89 +246,71 @@ def encode_captioning(self, sample: CaptioningSample): caption_list = caption.split('\n') caption = np.random.choice(caption_list) - if conv_format == 'llama3_sft': - conv = conversation_lib.llama3_instruct.copy() - sep = conv.sep - elif conv_format == "mistral": - conv = conversation_lib.mistral_instruct.copy() - conv = conv.sep2 - - conversation = cur_prompt + caption + sep + conv = [ + # Note: no system message. + {"role": "user", "content": cur_prompt}, + {"role": "assistant", "content": caption}, + ] - input_ids = np.array(tokenizer_image_token(self.args, conversation, self.tokenizer, has_image=True)) - target = input_ids.copy() + input_ids, target = self.tokenizer.tokenize_conversation(conv, True, False) - prompt_len = len(tokenizer_image_token(self.args, cur_prompt, self.tokenizer)) - target[:prompt_len] = IGNORE_INDEX + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) return ImageTaskSample( __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, __subflavors__=sample.__subflavors__, imgs=imgs, num_tiles=num_tiles, - text=input_ids, - prompt_len=prompt_len, - target=target, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), ) def encode_llava_pretrain(self, sample: VQASample): - augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False - use_chat_format = sample.__subflavors__['use_chat_format'] if 'use_chat_format' in sample.__subflavors__ else False - conv_format = sample.__subflavors__['conv_format'] if 'conv_format' in sample.__subflavors__ else "mistral" + """Encode pretrain sample in LLAVA style.""" + augment = sample.__subflavors__.get("augmentation", False) imgs = get_visual_transform( sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, + self.args.vision_model_type, ) num_tiles = [len(imgs)] - assert "" in sample.context - has_image = True - - if use_chat_format: - prompt_idx = np.random.randint(len(self.manual_prompts["Captioning"]["raw"])) - prompt = self.manual_prompts["Captioning"]["raw"][prompt_idx] - - sample.context = "User: " + "\n" + prompt + " Assistant: " - conversation = sample.context + sample.answers + conversation_lib.mistral_instruct.sep - else: - # LLAVA training: override text-prompt with just IMAGE_TOKEN_INDEX - sample.context = "" + "\n" - if conv_format == 'llama3_sft': - conversation = sample.context + sample.answers + conversation_lib.llama3_instruct.sep - elif conv_format == "mistral": - conversation = sample.context + sample.answers + conversation_lib.mistral_instruct.sep2 + # LLAVA training: override text-prompt with just the image. + conv = [ + # Note: no system message. + {"role": "user", "content": "\n"}, + {"role": "assistant", "content": sample.answers}, + ] - input_ids = np.array(tokenizer_image_token(self.args, conversation, self.tokenizer, has_image=has_image)) - target = input_ids.copy() + input_ids, target = self.tokenizer.tokenize_conversation(conv, True, False) - prompt_len = len(tokenizer_image_token(self.args, sample.context, self.tokenizer, has_image=has_image)) - target[:prompt_len] = IGNORE_INDEX + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) return ImageTaskSample( __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, __subflavors__=sample.__subflavors__, imgs=imgs, num_tiles=num_tiles, - text=input_ids, - prompt_len=prompt_len, - target=target, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), ) - # Based on https://github.com/haotian-liu/LLaVA/blob/c121f0432da27facab705978f83c4ada465e46fd/llava/train/train.py#L500 def encode_llava_sft(self, sample: SimilarityInterleavedSample): + """Encode SFT sample.""" augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False - use_chat_format = sample.__subflavors__['use_chat_format'] if 'use_chat_format' in sample.__subflavors__ else False - has_image = sample.__subflavors__['has_image'] if 'has_image' in sample.__subflavors__ else False has_video = sample.__subflavors__['has_video'] if 'has_video' in sample.__subflavors__ else False - has_visual_data = has_image or has_video - conv_format = sample.__subflavors__['conv_format'] if 'conv_format' in sample.__subflavors__ else "mistral" + has_image = sample.__subflavors__['has_image'] if 'has_image' in sample.__subflavors__ else False + has_image = has_image or (hasattr(sample, "images") and len(sample.images) > 0) - if has_image: - imgs = get_visual_transform( - sample.images[0], self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, - ) - num_tiles = [len(imgs)] - elif has_video: + if has_video: # Grab the selected frames of the video as a tensor with shape # fhwc: (num_frames, height, width, num_channels). video_fhwc = sample.images[0].permute(0, 2, 3, 1) @@ -396,132 +322,65 @@ def encode_llava_sft(self, sample: SimilarityInterleavedSample): imgs += get_visual_transform( video_frame_hwc, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, - self.args.use_thumbnail, augment=False) + self.args.use_thumbnail, augment, self.args.vision_model_type) + num_tiles = [len(imgs)] + elif has_image: + imgs = get_visual_transform( + sample.images[0], self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, + self.args.vision_model_type, + ) num_tiles = [len(imgs)] else: imgs = num_tiles = [] sample.__key__ = "{}-{}".format("no-image", sample.__key__) - if conv_format == 'llama3_sft': - conv = conversation_lib.llama3_instruct.copy() - elif conv_format == "mistral": - conv = conversation_lib.mistral_instruct.copy() - - roles = {"human": conv.roles[0], "gpt": conv.roles[1]} - - if use_chat_format: - source = sample.texts - if roles[source[0]["from"]] != conv.roles[0]: - # Skip the first one if it is not from human - source = source[1:] - - conv.messages = [] - for j, sentence in enumerate(source): - role = roles[sentence["from"]] - assert role == conv.roles[j % 2], sentence - conv.append_message(role, sentence["value"]) - conversation = conv.get_prompt() - - ### Tokenize conversations - input_ids = tokenizer_image_token(self.args, conversation, self.tokenizer, has_visual_data) - - input_ids = torch.LongTensor(input_ids) - target = input_ids.clone() - - if conv.sep_style == conversation_lib.SeparatorStyle.MPT: - # Mask targets - sep = conv.sep + conv.roles[1] - - total_len = int((target != self.tokenizer.eod_token).sum()) - - rounds = conversation.split(conv.sep) - re_rounds = [conv.sep.join(rounds[:3])] # system + user + gpt - for conv_idx in range(3, len(rounds), 2): - re_rounds.append(conv.sep.join(rounds[conv_idx:conv_idx+2])) # user + gpt - - cur_len = 0 - target[:cur_len] = IGNORE_INDEX - - for i, rou in enumerate(re_rounds): - if rou == "": - break - - rou += conv.sep - - parts = rou.split(sep) - - if len(parts) != 2: - break - parts[0] += sep - - round_len = len(tokenizer_image_token(self.args, rou, self.tokenizer, has_visual_data)) - instruction_len = len(tokenizer_image_token(self.args, parts[0], self.tokenizer, has_visual_data)) - - if conv_format == 'llama3_sft' and i > 0: - round_len -= 1 - instruction_len -= 1 - - target[cur_len : cur_len + instruction_len] = IGNORE_INDEX - - cur_len += round_len - - target[cur_len:] = IGNORE_INDEX - - elif conv.sep_style == conversation_lib.SeparatorStyle.TWO: - ### Mask targets - sep = conv.sep + conv.roles[1] + ": " - - total_len = int((target != self.tokenizer.eod_token).sum()) - - rounds = conversation.split(conv.sep2) - - cur_len = 0 - - for i, rou in enumerate(rounds): - if rou == "": - break - - rou += conv.sep2 # put back conv.sep2 since we will lose it while we conversation.split above with conv.sep2 - - parts = rou.split(sep) + conversation = [] + # Note: Some tokenizers may ignore the system prompt. + conversation.append({"role": "system", "content": "Answer the questions."}) - if len(parts) != 2: - break - parts[0] += sep + has_image_token = False - round_len = len(tokenizer_image_token(self.args, rou, self.tokenizer, has_visual_data)) - instruction_len = len(tokenizer_image_token(self.args, parts[0], self.tokenizer, has_visual_data)) - 2 + for text in sample.texts: + if IMAGE_TOKEN in text["value"]: + has_image_token = True - target[cur_len : cur_len + instruction_len] = IGNORE_INDEX - - cur_len += round_len + if text["from"] == "human": + role = "user" + elif text["from"] == "gpt": + role = "assistant" + else: + raise RuntimeError(f"unexpected role {text['from']} in {sample.texts}") - target[cur_len:] = IGNORE_INDEX + turn = {"role": role, "content": text["value"]} + conversation.append(turn) - elif conv.sep_style == conversation_lib.SeparatorStyle.LLAMA_2: - raise NotImplementedError("this tokenizer is not supported yet with this data type") + # If the sample contains an image but none of the user messages has an image token, + # then add it to the first user message. + if len(imgs) > 0 and not has_image_token: + for turn in conversation: + if turn["role"] == "user": + turn["content"] = f"{IMAGE_TOKEN}\n" + turn["content"] + break - if cur_len != total_len: - target[:] = IGNORE_INDEX + input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) - raise Exception( - f"WARNING: tokenization mismatch: {cur_len} vs. {total_len}. Something is wrong, please fix!" - ) - - else: - return NotImplementedError + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) return ImageTaskSample( __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, __subflavors__=sample.__subflavors__, imgs=imgs, num_tiles=num_tiles, - text=input_ids, - prompt_len=instruction_len, - target=target, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), ) - def encode_vqa(self, sample: VQASample): + def encode_any_single_turn_vqa(self, sample): + """Encode MultiChoiceVQA or VQA sample.""" augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False has_video = sample.__subflavors__['has_video'] if 'has_video' in sample.__subflavors__ else False @@ -537,104 +396,199 @@ def encode_vqa(self, sample: VQASample): imgs += get_visual_transform( video_frame_hwc, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, - self.args.use_thumbnail, augment=False) + self.args.use_thumbnail, augment, self.args.vision_model_type) else: imgs = get_visual_transform( - sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, + sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, + self.args.use_thumbnail, augment, self.args.vision_model_type, ) + num_tiles = [len(imgs)] - has_image = True - if "" not in sample.context: - sample.context = "" + sample.context + if isinstance(sample, MultiChoiceVQASample): + cur_prompt = format_multichoice_question(sample.context, sample.choices) + if "" not in cur_prompt: + cur_prompt = "\n" + cur_prompt + cur_answer = format_multichoice_answer(sample.correct_choice_idx) + elif isinstance(sample, VQASample): + if 'docvqa' in sample.__key__: + prompt_list = self.manual_prompts["VQASFT"]["docvqa"] + elif sample.__subflavors__.get("VQASFT"): + prompt_list = self.manual_prompts["VQASFT"]["raw"] + else: + prompt_list = ["{}"] + + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] - if sample.context[-1:] != "\n": - sample.context = sample.context + "\n" + cur_prompt = cur_prompt.format(sample.context) - if isinstance(sample.answers, list): - answer_list = sample.answers - weight_list = np.array(sample.answer_weights).astype(np.float32) - weight_list = weight_list / np.sum(weight_list) - answer_idx = np.random.choice(weight_list.shape[0], 1, p=weight_list)[0] - answer = answer_list[answer_idx] + if "" not in cur_prompt: + cur_prompt = "\n" + cur_prompt + + if isinstance(sample.answers, list): + answer_list = sample.answers + weight_list = np.array(sample.answer_weights).astype(np.float32) + weight_list = weight_list / np.sum(weight_list) + answer_idx = np.random.choice(weight_list.shape[0], 1, p=weight_list)[0] + cur_answer = answer_list[answer_idx] + else: + cur_answer = sample.answers else: - answer = sample.answers + raise NotImplementedError("Unsupported data type provided", sample) - conversation = sample.context + answer - text = np.array(tokenizer_image_token(self.args, conversation, self.tokenizer, has_image=has_image)) + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": cur_prompt}, + {"role": "assistant", "content": str(cur_answer)}, + ] - prompt_len = len(tokenizer_image_token(self.args, sample.context, self.tokenizer, has_image=has_image)) + input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) - target = text.copy() - target[:prompt_len] = IGNORE_INDEX + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) return ImageTaskSample( __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, __subflavors__=sample.__subflavors__, imgs=imgs, num_tiles=num_tiles, - text=text, - prompt_len=prompt_len, - target=target, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), ) - def encode_ocr(self, sample: OCRSample) -> ImageTaskSample: - if sample.__subflavors__["type"] == "document": - visual_transform = self.ocr_document_visual_transform - elif sample.__subflavors__["type"] == "paragraph": - visual_transform = self.ocr_paragraph_visual_transform - elif sample.__subflavors__["augmentation"] == False: - visual_transform = self.ocr_document_identity_transform - else: - raise ValueError(f"Unknown subflavor {sample.__subflavors__}") - - if sample.words_boxes is not None and sample.words_boxes.shape[1] >= 5: - # Boxes with conf below 0.9 are skipped - filter_words_mask = sample.words_boxes[:, 4] < 0.9 - filter_boxes = sample.words_boxes[filter_words_mask, :4] - for x, y, x2, y2 in filter_boxes: - if isinstance(sample.image, Image.Image): - draw = ImageDraw.Draw(sample.image) - draw.rectangle([int(x), int(y), (int(x2), int(y2))], fill=0) - else: - sample.image[:, int(y) : int(y2) + 1, int(x) : int(x2) + 1] = 0 - - text = " ".join( - text for skip, text in zip(filter_words_mask, sample.words_text) if not skip - ) - else: - text = " ".join(sample.text.splitlines()) + def combined_ocr_encoder(self, sample, task_type): + """Encode OCR samples.""" + augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False - match = re.search(r'"text_sequence": "(.*?)"', text) - if match: - text = match.group(1) + if task_type == "encode_pdf": + sample, cur_prompt, cur_answer = self.encode_pdf_prompt(sample) + elif task_type == "encode_ocr_ref": + sample, cur_prompt, cur_answer = self.encode_ocr_ref_prompt(sample) + elif task_type == "_encode_ocr": + sample, cur_prompt, cur_answer = self.encode_ocr_prompt(sample) - img = visual_transform(sample.image) - img = (torch.Tensor(np.array(img)).permute(2, 0, 1) - self.pixel_mean) / self.pixel_std - img = torch.nn.functional.pad(img, (0, self.img_w - img.shape[2], 0, self.img_h - img.shape[1])) + imgs = get_visual_transform( + sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, + self.args.use_thumbnail, augment, self.args.vision_model_type, + ) + num_tiles = [len(imgs)] - # randomly select a prompt - prompt_idx = np.random.randint(len(self.manual_prompts["OCR"]["raw"])) - cur_prompt = self.manual_prompts["OCR"]["raw"][prompt_idx] + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": cur_prompt}, + {"role": "assistant", "content": str(cur_answer)}, + ] - if cur_prompt not in self.txt_to_token_dict: - self.txt_to_token_dict[cur_prompt] = self.tokenizer(cur_prompt) - cur_prompt = self.txt_to_token_dict[cur_prompt] + input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) - text_sample = self.tokenizer(text) - prompt_len = len(cur_prompt) - text_sample = np.concatenate([cur_prompt, text_sample]) + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) return ImageTaskSample( __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, __subflavors__=sample.__subflavors__, - imgs=[img], - num_tiles=[1], - text=text_sample, - prompt_len=prompt_len + imgs=imgs, + num_tiles=num_tiles, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), ) - def batch(self, samples: List[ImageTaskSample]) -> ImageTaskBatch: + def encode_pdf_prompt(self, sample: OCRSample) -> ImageTaskSample: + """Encode OCR sample.""" + prompt_list = self.manual_prompts["DocPretraining"]["raw"] + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] + if "" not in cur_prompt: + cur_prompt = "\n" + cur_prompt + + # Make sure there is no extra tag. + sample.text = sample.text.replace("", "") + + caption = sample.text.strip() + + split_by_line_flag = sample.__subflavors__.get("SplitByLine") + if split_by_line_flag: + caption_list = caption.split('\n') + caption = np.random.choice(caption_list) + cur_answer = caption + + return sample, cur_prompt, cur_answer + + def encode_ocr_ref_prompt(self, sample: OCRSample) -> ImageTaskSample: + """Encode OCR sample.""" + ref = sample.text + region = sample.words_boxes + + # Make sure there is no extra tag + ref = ref.replace("", "") + + if len(region) == 4: + region = f"({region[0]},{region[1]}),({region[2]},{region[3]})" + else: + region = f"({region[0]},{region[1]}),({region[2]},{region[3]}),({region[4]},{region[5]}),({region[6]},{region[7]})" + + # Randomly choose between two tasks + task_idx = np.random.randint(2) + if task_idx == 0: + # Referring Grounding + prompt_list = self.manual_prompts["DocPretraining"]["referring_grounding"] + prompt_content = ref + answer = region + else: + # Grounded OCR + prompt_list = self.manual_prompts["DocPretraining"]["grounded_ocr"] + prompt_content = region + answer = ref + + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] + cur_prompt = cur_prompt.format(prompt_content) + if "" not in cur_prompt: + cur_prompt = "\n" + cur_prompt + + return sample, cur_prompt, answer + + def bbox_coord_to_label(self, text, bbox): + """Format bbox coordinates as text.""" + assert len(bbox) == 4 or len(bbox) == 8 + + # Make sure there is no extra tag + text = text.replace("", "") + + if len(bbox) == 4: + label_str = f"{text}({bbox[0]},{bbox[1]}),({bbox[2]},{bbox[3]})" + else: + label_str = f"{text}({bbox[0]},{bbox[1]}),({bbox[2]},{bbox[3]}),({bbox[4]},{bbox[5]}),({bbox[6]},{bbox[7]})" + + return label_str + + def encode_ocr_prompt(self, sample: OCRSample) -> ImageTaskSample: + """Encode OCR sample.""" + if isinstance(sample.words_boxes[0], int): + answer = self.bbox_coord_to_label(sample.text, sample.words_boxes) + elif isinstance(sample.words_boxes[0], list): + answer = "" + for i, bbox in enumerate(sample.words_boxes): + answer += self.bbox_coord_to_label(sample.words_text[i], bbox) + + prompt_list = self.manual_prompts["DocPretraining"]["ocr_multi"] + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] + + if "" not in cur_prompt: + cur_prompt = "\n" + cur_prompt + cur_answer = answer + + return sample, cur_prompt, cur_answer + + def batch(self, samples: List[Union[ImageTaskSample, ImageTaskSamplePacked]]) -> ImageTaskBatchPacked: # Stack images to [num_tiles, c, h, w]. If there are no images (text-only), then use a dummy image. imgs = [img for s in samples for img in s.imgs] if len(imgs) > 0: @@ -642,45 +596,128 @@ def batch(self, samples: List[ImageTaskSample]) -> ImageTaskBatch: else: imgs = torch.tensor([[0]], dtype=torch.float32) - # Put tile counts to a single tensor. If there are no images (text-only), then use a dummy tensor. - num_tiles = torch.tensor([n for s in samples for n in s.num_tiles], dtype=torch.int) - if len(num_tiles) == 0: - num_tiles = torch.tensor([[0]], dtype=torch.int) - - # If the user hasn't defined a target sequence length, then use the max along the sample lengths. - max_seq_len = self.seq_len + # If the user hasn't defined a target dataloader sequence length, then use the max along the sample lengths. + max_seq_len = self.dataloader_seq_length if not max_seq_len: - max_seq_len = max(len(s.text) for s in samples) + max_seq_len = max(len(s.tokens) for s in samples) - text_mat = np.full((len(samples), max_seq_len), self.tokenizer.eod_token, dtype=np.int64) + tokens = np.full((len(samples), max_seq_len), self.tokenizer.pad, dtype=np.int64) # +1 to accommodate shift to left by one later. - target_mat = np.full((len(samples), max_seq_len + 1), self.tokenizer.eod_token, dtype=np.int64) + labels = np.full((len(samples), max_seq_len + 1), self.tokenizer.pad, dtype=np.int64) for i, s in enumerate(samples): # If the sample/target length exceeds the target sequence length, then truncate. - text_len = min(max_seq_len, len(s.text)) - target_len = min(max_seq_len+1, len(s.target)) + text_len = min(max_seq_len, len(s.tokens)) + target_len = min(max_seq_len+1, len(s.labels)) - text_mat[i, :text_len] = np.array(s.text)[:text_len] - target_mat[i, :target_len] = np.array(s.target)[:target_len] + tokens[i, :text_len] = s.tokens[:text_len] + labels[i, :target_len] = s.labels[:target_len] - batch = ImageTaskBatch( - __keys__=[s.__key__ for s in samples], - __subflavors__=[s.__subflavors__ for s in samples], + num_tiles = torch.tensor([n for s in samples for n in s.num_tiles], dtype=torch.int32) + if len(num_tiles) == 0: + num_tiles = torch.tensor([[0]], dtype=torch.int32) + + # Cumulative sample lengths are needed for packing, otherwise use dummy values. + cu_lengths = torch.tensor([[0]], dtype=torch.int32) + max_lengths = torch.tensor([[0]], dtype=torch.int32) + + if self.is_packing_enabled: + cu_lengths = torch.stack([s.cu_lengths for s in samples]) + max_lengths = torch.tensor([s.max_length for s in samples], dtype=torch.int32) + + return ImageTaskBatchPacked( + __key__=[s.__key__ for s in samples], + __restore_key__=[s.__restore_key__ for s in samples], + __subflavor__=None, + __subflavors__=samples[0].__subflavors__, + tokens=tokens, + labels=labels, imgs=imgs, num_tiles=num_tiles, - text=torch.from_numpy(text_mat), - prompt_len=torch.from_numpy(np.array([s.prompt_len for s in samples], dtype=np.int64)), - target=torch.from_numpy(target_mat), + cu_lengths=cu_lengths, + max_lengths=max_lengths, ) - return batch - - def encode_batch(self, batch: ImageTaskBatch) -> dict: + def encode_batch(self, batch: ImageTaskBatchPacked) -> dict: raw = dataclasses.asdict(batch) del raw["__subflavors__"] return raw + def select_samples_to_pack(self, samples: List[ImageTaskSample]) -> List[List[ImageTaskSample]]: + """Selects which samples will be packed together. + + NOTE: Energon dataloader calls this method internally if packing is used. + Please see https://nvidia.github.io/Megatron-Energon/packing.html + """ + lengths = [sample.total_len for sample in samples] + + packed_samples = greedy_knapsack(lengths, samples, self.packing_seq_length) + + return packed_samples + + @stateless + def pack_selected_samples(self, samples: List[ImageTaskSample]) -> List[ImageTaskSamplePacked]: + """ + Function to pack a list of ImageTaskSample into a single ImageTaskSamplePacked. + + NOTE: Energon dataloader calls this method internally if packing is used. + Please see https://nvidia.github.io/Megatron-Energon/packing.html + + Args: + samples: List of ImageTaskSample instances to pack into one sample. + + Returns: + ImageTaskSamplePacked instance. + """ + packing_seq_len = self.packing_seq_length + + packed_tokens = [] + packed_labels = [] + packed_imgs = [] + + current_length = 0 + max_length = 0 + cu_lengths = [0] + + # Process each sample and build lists that we will concatenate to create the packed sample. + for _, sample in enumerate(samples): + sample_len = sample.total_len + + if sample_len > max_length: + max_length = sample_len + + # If adding this sample exceeds the max length, stop. + # This should not happen. The select_samples_to_pack method should have already ensured that the samples fit. + if current_length + sample_len > packing_seq_len: + raise ValueError(f"Packed sample exceeds the maximum sequence length of {packing_seq_len}: {samples}") + + # Add the sample's tokens and labels + packed_tokens.append(sample.tokens) + packed_labels.append(sample.labels) + + # Add the images + packed_imgs += sample.imgs + + current_length += sample_len + cu_lengths.append(current_length) + + # Concatenate packed tokens and labels. + packed_tokens = torch.cat(packed_tokens, dim=0) + packed_labels = torch.cat(packed_labels, dim=0) + + return ImageTaskSamplePacked( + __key__=",".join([s.__key__ for s in samples]), + __restore_key__=(), # Will be set by energon based on `samples` + __subflavor__=None, + __subflavors__=samples[0].__subflavors__, + tokens=packed_tokens, + labels=packed_labels, + imgs=packed_imgs, + cu_lengths=torch.tensor(cu_lengths, dtype=torch.int32), + max_length=max_length, + num_tiles=[n for s in samples for n in s.num_tiles], + ) + def print_error_handler(exc: Exception, key: Optional[str]): print( @@ -689,35 +726,18 @@ def print_error_handler(exc: Exception, key: Optional[str]): ) traceback.print_exc() -# From https://github.com/haotian-liu/LLaVA/blob/c121f0432da27facab705978f83c4ada465e46fd/llava/mm_utils.py#L185 -def tokenizer_image_token(args, prompt, tokenizer, has_image=True, image_token_index=IMAGE_TOKEN_INDEX, return_tensors=None): - - if not has_image: - input_ids = tokenizer(prompt) - - else: - prompt_chunks = [tokenizer(chunk) for chunk in prompt.split('')] - - def insert_separator(X, sep): - return [ele for sublist in zip(X, [sep]*len(X)) for ele in sublist][:-1] - - input_ids = [] - offset = 0 - if args.tokenizer_type in ['Llama2Tokenizer', 'Llama3Tokenizer'] and len(prompt_chunks) > 0 and len(prompt_chunks[0]) > 0: - offset = 1 - input_ids.append(prompt_chunks[0][0]) +def format_multichoice_question(question, multichoice_options): + """Format multi-choice question.""" + options_text = ["{}. {}\n".format(chr(ord('A') + i), option) for i, option in + zip(range(len(multichoice_options)), multichoice_options)] + options_text = "".join(options_text) - for x in insert_separator(prompt_chunks, [image_token_index] * (offset + 1)): - input_ids.extend(x[offset:]) + options_text = f"{options_text}Answer with the option's letter from the given choices directly." - if return_tensors is not None: - if return_tensors == 'pt': - return torch.tensor(input_ids, dtype=torch.long) - raise ValueError(f'Unsupported tensor type: {return_tensors}') + return "{}\n{}".format(question, options_text) - # # remove BOS token - # if args.tokenizer_type in ['Llama2Tokenizer', 'Llama3Tokenizer']: - # return input_ids[1:] - return input_ids +def format_multichoice_answer(idx): + """Format multi-choice answer.""" + return chr(ord('A') + idx) diff --git a/examples/multimodal/evaluate_ai2d.py b/examples/multimodal/evaluate_ai2d.py new file mode 100644 index 000000000..2d5db67b6 --- /dev/null +++ b/examples/multimodal/evaluate_ai2d.py @@ -0,0 +1,46 @@ +import argparse +import json + +from evaluate_mmmu import get_input_output_paths +from evaluate_vqav2 import compute_vqa_accuracy + + +def merge_input_files(input_path): + """Merge input files to a format compatible with the evaluator.""" + input_file_paths, output_file_path = get_input_output_paths(input_path, task="AI2D") + + results = [] + + for input_file_path in input_file_paths: + with open(input_file_path, "r") as input_file: + for line in input_file: + res = json.loads(line) + results.append( + { + "question_id": res["sample_id"], + "answer": res["answer"], + "gt_answer": res["gt_answer"], + } + ) + + with open(output_file_path, "w") as output_file: + json.dump(results, output_file) + + return output_file_path + + +def ai2d_eval(input_path): + """Run AI2D evaluation.""" + result_file_path = merge_input_files(input_path) + avg_acc = compute_vqa_accuracy(result_file_path, task="AI2D") + return avg_acc + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--input-path', type=str, help="Path to input file(s)") + args = parser.parse_args() + + avg_acc = ai2d_eval(args.input_path) + + print(f"===== AI2D Accuracy {avg_acc:.2f}% =====") diff --git a/examples/multimodal/evaluate_chartqa.py b/examples/multimodal/evaluate_chartqa.py index 8ec346d0d..e9238069d 100644 --- a/examples/multimodal/evaluate_chartqa.py +++ b/examples/multimodal/evaluate_chartqa.py @@ -28,7 +28,7 @@ def merge_input_files(input_path): def chartqa_eval(input_path): """Run ChartQA evaluation.""" result_file_path = merge_input_files(input_path) - return compute_vqa_accuracy(result_file_path, use_chartqa_metric=True) + return compute_vqa_accuracy(result_file_path, task="ChartQA") if __name__ == "__main__": diff --git a/examples/multimodal/evaluate_mathvista.py b/examples/multimodal/evaluate_mathvista.py new file mode 100644 index 000000000..3474c5f25 --- /dev/null +++ b/examples/multimodal/evaluate_mathvista.py @@ -0,0 +1,114 @@ +import argparse +import json +import re + +from evaluate_mmmu import get_input_output_paths +from MMMU.mmmu.utils.eval_utils import parse_multi_choice_response +from open_flamingo.eval.vqa_metric import VQAEval + + +def merge_input_files(input_path): + """Merge input files to a format compatible with the evaluator.""" + input_file_paths, output_file_path = get_input_output_paths(input_path, task="MathVista") + + results = [] + + for input_file_path in input_file_paths: + with open(input_file_path, "r") as input_file: + for line in input_file: + res = json.loads(line) + results.append(res) + + with open(output_file_path, "w") as output_file: + json.dump(results, output_file) + + return output_file_path + + +def extra_processing(text): + """Extra processing.""" + # Max decimal point capped to 2 decimal point + regex = re.compile(r'^\d+\.\d+$') + decimal = regex.findall(text) + + if len(decimal) > 0: + non_decimal = len(decimal[0].split(".")[0]) + + # if decimal values are all 0, trim them + decimal_digits = [int(d) for d in decimal[0].split(".")[1]] + if sum(decimal_digits) == 0: + text = decimal[0][:non_decimal] + else: + text = decimal[0][: non_decimal + 3] + + # remove % and trailing . + text = text.replace("%", "") + if text[-1] == ".": + text = text[:-1] + + return text + + +def extract_answer(text): + """Extract answer.""" + alphabet = re.findall(r'[a-zA-Z]+', text) + if len(alphabet) > 0 and "e+" not in text: + template = re.findall(r'answer is -*\d+\.*\d*', text) + if len(template) > 0: + text = template[0] + + numbers = re.findall(r'-*\d+\.*\d*', text) + text = numbers[0] if len(numbers) > 0 else text + + return text + + +def compute_mathvista_accuracy(result_file): + """Compute MathVista accuracy.""" + merged_results = json.load(open(result_file)) + + vqa = VQAEval(vqa=None, vqaRes=None) + acc = 0 + for res in merged_results: + pred_ans = res["answer"] + if res["question_type"] == "multi_choice": + pred_ans = parse_multi_choice_response(pred_ans, res["all_choices"], res["index2ans"]) + else: + pred_ans = vqa.processPunctuation(pred_ans) + pred_ans = vqa.processDigitArticle(pred_ans) + # Extra processing and extraction. + pred_ans = extra_processing(pred_ans) + pred_ans = extract_answer(pred_ans) + + gt_ans = res["gt_answer"] + if isinstance(gt_ans, list): + assert len(gt_ans) == 1, f"Expected 1 groundtruth, got {gt_ans}" + gt_ans = gt_ans[0] + + if res["question_type"] != "multi_choice": + gt_ans = vqa.processPunctuation(gt_ans) + gt_ans = vqa.processDigitArticle(gt_ans) + + gt_ans = extra_processing(gt_ans) + + if pred_ans == gt_ans: + acc += 1 + acc = acc / len(merged_results) * 100 + return acc + + +def mathvista_eval(input_path): + """Run MathVista evaluation.""" + result_file_path = merge_input_files(input_path) + acc = compute_mathvista_accuracy(result_file_path) + return acc + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--input-path', type=str, help="Path to input file(s)") + args = parser.parse_args() + + acc = mathvista_eval(args.input_path) + + print(f"===== MathVista accuracy: {acc} =====") diff --git a/examples/multimodal/evaluate_mmmu.py b/examples/multimodal/evaluate_mmmu.py index 955be9584..66118fa90 100644 --- a/examples/multimodal/evaluate_mmmu.py +++ b/examples/multimodal/evaluate_mmmu.py @@ -40,6 +40,14 @@ def convert_to_mmmu_format(input_path): sample_id = res["sample_id"] prediction = res["prediction"] + if res["question_type"] == "multiple-choice": + from MMMU.mmmu.utils.eval_utils import parse_multi_choice_response + + prediction = parse_multi_choice_response( + prediction, res["all_choices"], res["index2ans"] + ) + + # MMMU eval script expects just a sample_id to prediction mapping. output[sample_id] = prediction with open(output_file_path, "w") as output_file: @@ -69,7 +77,7 @@ def mmmu_eval(input_path, groundtruth_path): print(output.stderr) print(output.stdout) - m = re.search("'Overall': {'num': \d, 'acc': (\d.\d+)}", output.stdout) + m = re.search("'Overall': {'num': \d+, 'acc': (\d.\d+)}", output.stdout) return float(m.group(1)) * 100.0 diff --git a/examples/multimodal/evaluate_ocrbench.py b/examples/multimodal/evaluate_ocrbench.py new file mode 100644 index 000000000..bc2b90106 --- /dev/null +++ b/examples/multimodal/evaluate_ocrbench.py @@ -0,0 +1,129 @@ +import argparse +import json + +from evaluate_mmmu import get_input_output_paths + + +def merge_input_files(input_path): + """Merge input files to a format compatible with the evaluator.""" + input_file_paths, output_file_path = get_input_output_paths(input_path, task="OCRBench") + + results = [] + + for input_file_path in input_file_paths: + with open(input_file_path, "r") as input_file: + for line in input_file: + res = json.loads(line) + results.append(res) + + with open(output_file_path, "w") as output_file: + json.dump(results, output_file) + + return output_file_path + + +def compute_ocrbench_score(result_file): + """Compute OCRBench score.""" + merged_results = json.load(open(result_file)) + + # OCRBench score calculation is adopted from https://github.com/Yuliang-Liu/MultimodalOCR/blob/1b7713f44c91f30f64efb6d3e494c416861ef15f/example.py#L1 + # MIT License. Copyright (c) 2023 Yuliang Liu + score = { + "Regular Text Recognition": 0, + "Irregular Text Recognition": 0, + "Artistic Text Recognition": 0, + "Handwriting Recognition": 0, + "Digit String Recognition": 0, + "Non-Semantic Text Recognition": 0, + "Scene Text-centric VQA": 0, + "Doc-oriented VQA": 0, + "Doc-oriented VQA": 0, + "Key Information Extraction": 0, + "Handwritten Mathematical Expression Recognition": 0, + } + + for res in merged_results: + predict = res["answer"] + answers = res["gt_answer"] + + dataset_name = res["dataset_name"] + ocr_type = res["data_type"] + + if dataset_name == "HME100k": + if isinstance(answers, list): + for j in range(len(answers)): + answer = answers[j].strip().replace("\n", " ").replace(" ", "") + predict = predict.strip().replace("\n", " ").replace(" ", "") + if answer in predict: + score[ocr_type] += 1 + else: + answers = answers.strip().replace("\n", " ").replace(" ", "") + predict = predict.strip().replace("\n", " ").replace(" ", "") + if answers in predict: + score[ocr_type] += 1 + else: + if isinstance(answers, list): + for j in range(len(answers)): + answer = answers[j].lower().strip().replace("\n", " ") + predict = predict.lower().strip().replace("\n", " ") + if answer in predict: + score[ocr_type] += 1 + else: + answers = answers.lower().strip().replace("\n", " ") + predict = predict.lower().strip().replace("\n", " ") + if answers in predict: + score[ocr_type] += 1 + + recognition_score = ( + score['Regular Text Recognition'] + + score['Irregular Text Recognition'] + + score['Artistic Text Recognition'] + + score['Handwriting Recognition'] + + score['Digit String Recognition'] + + score['Non-Semantic Text Recognition'] + ) + final_score = ( + recognition_score + + score['Scene Text-centric VQA'] + + score['Doc-oriented VQA'] + + score['Key Information Extraction'] + + score['Handwritten Mathematical Expression Recognition'] + ) + result_log = f"""###########################OCRBench############################## +Text Recognition(Total 300): {recognition_score} +------------------Details of Recognition Score------------------- +Regular Text Recognition(Total 50): {score['Regular Text Recognition']} +Irregular Text Recognition(Total 50): {score['Irregular Text Recognition']} +Artistic Text Recognition(Total 50): {score['Artistic Text Recognition']} +Handwriting Recognition(Total 50): {score['Handwriting Recognition']} +Digit String Recognition(Total 50): {score['Digit String Recognition']} +Non-Semantic Text Recognition(Total 50): {score['Non-Semantic Text Recognition']} +---------------------------------------------------------------- +Scene Text-centric VQA(Total 200): {score['Scene Text-centric VQA']} +---------------------------------------------------------------- +Doc-oriented VQA(Total 200): {score['Doc-oriented VQA']} +---------------------------------------------------------------- +Key Information Extraction(Total 200): {score['Key Information Extraction']} +---------------------------------------------------------------- +Handwritten Mathematical Expression Recognition(Total 100): {score['Handwritten Mathematical Expression Recognition']} +----------------------Final Score------------------------------- +Final Score(Total 1000): {final_score}""" + + return result_log, final_score + + +def ocrbench_eval(input_path): + """Run OCRBench evaluation.""" + result_file_path = merge_input_files(input_path) + result_log, score = compute_ocrbench_score(result_file_path) + return result_log, score + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--input-path', type=str, help="Path to input file(s)") + args = parser.parse_args() + + result_log, _ = ocrbench_eval(args.input_path) + + print(result_log) diff --git a/examples/multimodal/evaluate_textvqa.py b/examples/multimodal/evaluate_textvqa.py index e231b8e2c..c9bba7134 100644 --- a/examples/multimodal/evaluate_textvqa.py +++ b/examples/multimodal/evaluate_textvqa.py @@ -35,7 +35,7 @@ def merge_input_files(input_path): def textvqa_eval(input_path): """Run TextVQA evaluation.""" result_file_path = merge_input_files(input_path) - avg_acc = compute_vqa_accuracy(result_file_path) + avg_acc = compute_vqa_accuracy(result_file_path, task="TextVQA") return avg_acc diff --git a/examples/multimodal/evaluate_vqav2.py b/examples/multimodal/evaluate_vqav2.py index 9e3b72750..0b1b9209b 100644 --- a/examples/multimodal/evaluate_vqav2.py +++ b/examples/multimodal/evaluate_vqav2.py @@ -34,7 +34,7 @@ def is_number(n: str): return False -def compute_vqa_accuracy(result_file, use_chartqa_metric=False): +def compute_vqa_accuracy(result_file, task): """Compute VQA accuracy.""" merged_results = json.load(open(result_file)) @@ -51,8 +51,8 @@ def compute_vqa_accuracy(result_file, use_chartqa_metric=False): # ChartQA uses relaxed accuracy: # "We consider an answer to be correct if it is within 5% of the gold answer. - # For non-numeric answers, we still need an exact match to consider an answer to be correct." - if use_chartqa_metric: + # For non-numeric answers, we still need an exact match to consider an answer to be correct." + if task == "ChartQA": acc = 0.0 assert len(gt) == 1, "expected exactly one groundtruth answer." gt = gt[0] @@ -66,10 +66,16 @@ def compute_vqa_accuracy(result_file, use_chartqa_metric=False): acc = 1.0 all_acc.append(acc) - else: + elif task in ("VQAv2", "TextVQA"): num_match = sum([pred == ans for ans in gt]) acc = min(1.0, num_match / 3.0) all_acc.append(acc) + elif task == "AI2D": + assert len(gt) == 1, f"Expected exactly 1 GT, got {gt}" + acc = pred == gt[0] + all_acc.append(acc) + else: + raise NotImplementedError(f"unknown task {task}") acc_avg = sum(all_acc) / len(all_acc) * 100 @@ -79,7 +85,7 @@ def compute_vqa_accuracy(result_file, use_chartqa_metric=False): def vqav2_eval(input_path): """Run VQAv2 evaluation.""" result_file = merge_input_files(input_path) - avg_acc = compute_vqa_accuracy(result_file) + avg_acc = compute_vqa_accuracy(result_file, task="VQAv2") return avg_acc diff --git a/examples/multimodal/evaluation_datasets.py b/examples/multimodal/evaluation_datasets.py new file mode 100644 index 000000000..97f9ba926 --- /dev/null +++ b/examples/multimodal/evaluation_datasets.py @@ -0,0 +1,858 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""Evaluation datasets.""" +import glob +import itertools +import json +import os +import re +from collections import defaultdict + +import numpy as np +import torch +from image_processing import get_visual_transform +from PIL import Image + +from megatron.training import print_rank_0 + + +def _get_partition_bounds( + total_num_samples, num_samples_per_partition, num_partitions, partition_id +): + if num_samples_per_partition == 0: + samples_per_partition = [ + int(x) for x in np.linspace(0, total_num_samples, num_partitions + 1) + ] + return samples_per_partition[partition_id], samples_per_partition[partition_id + 1] + return num_samples_per_partition * partition_id, num_samples_per_partition * (partition_id + 1) + + +class VQADataset(torch.utils.data.Dataset): + """VQA evaluation dataset.""" + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ): + samples = json.load(open(gt_path, encoding='utf-8')) + if "data" in samples: + samples = samples["data"] + + # Optionally, process only a subset of the input files. + if num_partitions > 0: + lb, ub = _get_partition_bounds( + len(samples), num_samples_per_partition, num_partitions, partition_id + ) + samples = samples[lb:ub] + + self._keys = keys + self._samples = samples + self._input_image_path = input_image_path + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._samples) + + def __getitem__(self, idx): + sample = self._samples[idx] + + img_file = "{}/{}".format(self._input_image_path, sample[self._keys["image_id"]]) + if not os.path.exists(img_file): + img_file += ".jpg" + + if not os.path.exists(img_file): + img_file = img_file.replace('.jpg', '.png') + + img = Image.open(img_file) + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + sample_id = idx + if "sample_id" in self._keys: + sample_id = sample[self._keys["sample_id"]] + + metadata = "" # Not used. + + return ( + torch.stack(imgs), + tile_count, + sample_id, + sample[self._keys["question"]], + sample[self._keys["answer"]], + metadata, + ) + + +class CaptioningDataset(torch.utils.data.Dataset): + """Captioning evaluation dataset.""" + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ): + image_files = sorted(glob.glob(input_image_path + "/*")) + + # Optionally, process only a subset of the input files. + if num_partitions > 0: + lb, ub = _get_partition_bounds( + len(image_files), num_samples_per_partition, num_partitions, partition_id + ) + image_files = image_files[lb:ub] + + gts = json.load(open(gt_path)) + answers = defaultdict(list) + for gt in gts["annotations"]: + answers[gt["image_id"]].append(gt['caption']) + + self._image_files = image_files + self._answers = answers + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._image_files) + + def __getitem__(self, idx): + img_file = self._image_files[idx] + image_id = int(img_file.split("_")[-1].split(".")[0]) + + img = Image.open(img_file) + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + question = "" # Fixed for all samples. + metadata = "" # Not used. + + return torch.stack(imgs), tile_count, image_id, question, self._answers[image_id], metadata + + +class MMMUDataset(torch.utils.data.Dataset): + """MMMU evaluation dataset.""" + + def __init__( + self, + input_image_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + single_image, + vision_model_type, + ): + import datasets + from MMMU.mmmu.utils.data_utils import CAT_SHORT2LONG, load_yaml + + # The following downloads the MMMU dataset from HuggingFace and uses the API from the MMMU github repo to run MMMU evaluation. + all_mmmu_datasets = [] + + hf_datasets_cache = os.environ["HF_DATASETS_CACHE"] + assert hf_datasets_cache != "", "Please set the environment variable HF_DATASETS_CACHE." + + for subject in CAT_SHORT2LONG.values(): + # Use a local copy of the dataset if exists (can be faster) or the HF one. + if os.path.exists(input_image_path): + subject_dataset = datasets.load_dataset( + os.path.join(input_image_path, subject), + split=datasets.Split.VALIDATION, + cache_dir=hf_datasets_cache, + verification_mode="no_checks", + ) + else: + subject_dataset = datasets.load_dataset( + "MMMU/MMMU", + subject, + split=datasets.Split.VALIDATION, + cache_dir=hf_datasets_cache, + ) + + all_mmmu_datasets.append(subject_dataset) + + dataset = datasets.concatenate_datasets(all_mmmu_datasets) + + dataset = [s for s in dataset if s['id'].startswith("val")] + + # Optionally, process only a subset of the input files. + if num_partitions > 0: + lb, ub = _get_partition_bounds( + len(dataset), num_samples_per_partition, num_partitions, partition_id + ) + dataset = dataset[lb:ub] + + # Using the LLaVA config from the MMMU repo. + config = load_yaml("examples/multimodal/MMMU/mmmu/configs/llava1.5.yaml") + for k, v in config.items(): + if isinstance(v, list): + assert len(v) == 1, "only one value supported." + config[k] = v[0] + + self._config = config + + self._dataset = dataset + + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._single_image = single_image + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._dataset) + + def __getitem__(self, idx): + from MMMU.mmmu.utils.data_utils import construct_prompt, process_single_sample + + sample = self._dataset[idx] + + # Use the single image approach from the MMMU repo. + if self._single_image: + sample = process_single_sample(sample) + sample = construct_prompt(sample, self._config) + + img = sample["image"] + sample_imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + sample_num_tiles = [len(sample_imgs)] + else: + sample = construct_prompt(sample, self._config) + + sample_imgs = [] + sample_num_tiles = [] + + img_indices = re.findall(r"" + + img = sample[img_key] + assert img is not None, f"{img_str} is in prompt but not in sample images" + + # Note: Only replace the current image tag. + sample["final_input_prompt"] = sample["final_input_prompt"].replace( + img_str, "", 1 + ) + + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + adjusted_max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) # List of tiles. + + sample_imgs.extend(imgs) + sample_num_tiles.append(len(imgs)) + + # Sanity check. + for i in range(1, 8): + assert ( + f"" not in sample["final_input_prompt"] + ), "prompt contains unhandled image tags" + + # MMMU specific metadata. + metadata = {"question_type": sample["question_type"]} + if sample["question_type"] == "multiple-choice": + metadata["index2ans"] = sample["index2ans"] + metadata["all_choices"] = sample["all_choices"] + + prompt = sample['final_input_prompt'] + if self._single_image: + for i in range(8): + prompt = prompt.replace(f"", "") + prompt = f"\n{prompt}" + + tile_count = torch.tensor(sample_num_tiles, dtype=torch.int) + + return ( + torch.stack(sample_imgs), + tile_count, + sample["id"], + prompt, + sample["answer"], + metadata, + ) + + +class VideoMMMEDataset(torch.utils.data.Dataset): + "Video MME evaluation dataset." + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_frames, + vision_model_type, + ): + ground_truth_original = json.load(open(gt_path)) + ground_truth = [] + for gt in ground_truth_original: + video_path = gt["url"] + video_path = video_path.replace("https://www.youtube.com/watch?v=", "") + video_path = video_path.replace("https://m.youtube.com/watch?v=", "") + video_path = os.path.join(input_image_path, video_path + ".mp4") + if not os.path.exists(video_path): + continue + gt["video_path"] = video_path + ground_truth.append(gt) + + ground_truth = sorted(ground_truth, key=lambda gt: gt["video_path"]) + print_rank_0(f"Found {len(ground_truth)} videos to process.") + + if num_partitions > 0: + start_idx, end_idx = _get_partition_bounds( + len(ground_truth), num_samples_per_partition, num_partitions, partition_id + ) + ground_truth = ground_truth[start_idx:end_idx] + + self._ground_truth = ground_truth + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._num_frames = num_frames + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._ground_truth) + + def __getitem__(self, idx): + from torchvision.io import read_video + + gt = self._ground_truth[idx] + + video, _, _ = read_video(gt["video_path"], start_pts=0, end_pts=None, pts_unit='sec') + video = video.numpy() + selected_frames = torch.linspace(0, video.shape[0] - 1, self._num_frames).long() + video_frames = video[selected_frames] + if self._num_frames == 1: + video_frames = video_frames[None] + + imgs = list( + itertools.chain.from_iterable( + get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + for img in video_frames + ) + ) + + for question in gt["questions"]: + # Very hacky, but we essentially re-create gt holding only the + # question of interest. This is the make this generation script + # compatible with the Video MME evaluation script. + question_dict = { + "video_id": gt["video_id"], + "duration_category": gt["duration_category"], + "video_category": gt["video_category"], + "video_subcategory": gt["video_subcategory"], + "url": gt["url"], + "questions": [question], + } + + num_tiles = torch.tensor([len(imgs)], dtype=torch.int) + + answer = "" + metadata = "" + + return ( + torch.stack(imgs), + num_tiles, + question["question_id"], + question_dict, + answer, + metadata, + ) + + +class OCRBenchDataset(torch.utils.data.Dataset): + """OCRBench evaluation dataset.""" + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ): + gt = json.load(open(gt_path, encoding='utf-8')) + + if num_partitions > 0: + start_idx, end_idx = _get_partition_bounds( + len(gt), num_samples_per_partition, num_partitions, partition_id + ) + gt = gt[start_idx:end_idx] + + self._input_image_path = input_image_path + self._gt = gt + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._gt) + + def __getitem__(self, idx): + img_path = os.path.join(self._input_image_path, self._gt[idx]['image_path']) + + img = Image.open(img_path) + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + metadata = { + "dataset_name": self._gt[idx]["dataset_name"], + "data_type": self._gt[idx]["type"], + } + + return ( + torch.stack(imgs), + tile_count, + idx, + self._gt[idx]["question"], + self._gt[idx]["answers"], + metadata, + ) + + +class MathVistaDataset(torch.utils.data.Dataset): + """MathVista evaluation dataset.""" + + def __init__( + self, + input_image_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ): + import datasets + + hf_datasets_cache = os.environ["HF_DATASETS_CACHE"] + assert hf_datasets_cache != "", "Please set the environment variable HF_DATASETS_CACHE." + + if os.path.exists(input_image_path): + dataset = datasets.load_dataset( + input_image_path, cache_dir=hf_datasets_cache, verification_mode="no_checks" + ) + else: + dataset = datasets.load_dataset( + "AI4Math/MathVista", split="testmini", cache_dir=hf_datasets_cache + ) + + if num_partitions > 0: + start_idx, end_idx = _get_partition_bounds( + len(dataset), num_samples_per_partition, num_partitions, partition_id + ) + dataset = dataset[start_idx:end_idx] + + self._dataset = dataset + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._dataset["pid"]) + + def __getitem__(self, idx): + # Already a PIL object. + img = self._dataset['decoded_image'][idx] + + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + question_id = self._dataset["pid"][idx] + question = self._dataset["question"][idx] + question_type = self._dataset["question_type"][idx] # free_form or multi_choice + query = self._dataset["query"][idx] + choices = self._dataset["choices"][idx] + answer = self._dataset["answer"][idx] + + if question_type == 'multi_choice': + start_chr = 'A' + choices_str = '' + index2ans = {} + all_choices = [] + for choice in choices: + all_choices.append(start_chr) + index2ans[start_chr] = choice + choices_str += f"{start_chr}. {choice}\n" + start_chr = chr(ord(start_chr) + 1) + + question = question + '\n' + choices_str + question = question + "Answer with the option's letter from the given choices directly." + answer = chr(ord('A') + choices.index(answer)) + else: + question = query.replace("Hint: ", "") + index2ans = {} + all_choices = [] + + metadata = { + "question_type": question_type, + "index2ans": index2ans, + "all_choices": all_choices, + } + + return torch.stack(imgs), tile_count, question_id, question, answer, metadata + + +class AI2DDataset(torch.utils.data.Dataset): + """AI2D evaluation dataset.""" + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + no_mask, + vision_model_type, + ): + with open(gt_path, 'r') as f: + jsonl = list(f) + + gt = [json.loads(json_str) for json_str in jsonl] + + if num_partitions > 0: + start_idx, end_idx = _get_partition_bounds( + len(gt), num_samples_per_partition, num_partitions, partition_id + ) + gt = gt[start_idx:end_idx] + + self._gt = gt + self._input_image_path = input_image_path + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._no_mask = no_mask + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._gt) + + def __getitem__(self, idx): + img_path = os.path.join(self._input_image_path, self._gt[idx]['image']) + if self._no_mask: + img_path.replace("AI2D_TEST", "AI2D_TEST_NO_MASK_IMAGES") + + img = Image.open(img_path) + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + metadata = "" # Not used. + + return ( + torch.stack(imgs), + tile_count, + self._gt[idx]["question_id"], + self._gt[idx]["question"], + self._gt[idx]["answer"], + metadata, + ) + + +def get_evaluation_dataset( + task, + input_image_path, + gt_path, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_samples_per_partition, + num_partitions, + partition_id, + num_frames, + vision_model_type, +): + """Get an evaluation dataset.""" + if task == "TextVQA": + keys = { + "image_id": "image_id", + "sample_id": "question_id", + "question": "question", + "answer": "answers", + } + + dataset = VQADataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "VQAv2": + keys = { + "image_id": "image", + "sample_id": "question_id", + "question": "question", + "answer": "answer", + } + + dataset = VQADataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "ChartQA": + keys = {"image_id": "imgname", "question": "query", "answer": "label"} + + dataset = VQADataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "captioning": + dataset = CaptioningDataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == 'MMMU': + # Note: single_image=True uses only one image like in the MMMU repo example. + # single_image=False uses all images in the sample. + dataset = MMMUDataset( + input_image_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + single_image=True, + vision_model_type=vision_model_type, + ) + elif task == "VideoMME": + dataset = VideoMMMEDataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_frames, + vision_model_type, + ) + elif task == "OCRBench": + dataset = OCRBenchDataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "MathVista": + dataset = MathVistaDataset( + input_image_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "AI2D": + dataset = AI2DDataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + no_mask=False, + vision_model_type=vision_model_type, + ) + else: + raise NotImplementedError(f"unsupported task {task}") + + return dataset diff --git a/examples/multimodal/image_processing.py b/examples/multimodal/image_processing.py index a4541576a..ed9401c67 100644 --- a/examples/multimodal/image_processing.py +++ b/examples/multimodal/image_processing.py @@ -1,71 +1,36 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. Except portions as noted which are Copyright (c) 2023 OpenGVLab and licensed under the MIT license found in LICENSE. -import numpy as np -import torch - -from PIL import Image, ImageDraw from torchvision import transforms as T -from torchvision.transforms import Compose, RandAugment, RandomResizedCrop, Resize, ToPILImage - - -# Imagenet's mean and std. -pixel_mean = [123.675, 116.28, 103.53] -pixel_std = [58.395, 57.12, 57.375] +from torchvision.transforms import Compose +from torchvision.transforms.functional import InterpolationMode -# Reshape for broadcasting. -pixel_mean = torch.Tensor(pixel_mean).view(-1, 1, 1) -pixel_std = torch.Tensor(pixel_std).view(-1, 1, 1) +IMAGENET_PIXEL_MEAN = [0.485, 0.456, 0.406] +IMAGENET_PIXEL_STD = [0.229, 0.224, 0.225] +SIGLIP_PIXEL_MEAN = [0.5, 0.5, 0.5] +SIGLIP_PIXEL_STD = [0.5, 0.5, 0.5] +CLIP_PIXEL_MEAN = [0.48145466, 0.4578275, 0.40821073] +CLIP_PIXEL_STD = [0.26862954, 0.26130258, 0.27577711] -def convert_to_rgb(image): - return image.convert("RGB") -def _transform_train_aug(img_h, img_w): - return Compose([ - ToPILImage(), - RandomResizedCrop((img_h, img_w), scale=(0.5, 1.0)), - convert_to_rgb, - RandAugment(2, 5, isPIL=True, augs=['Identity', 'AutoContrast', 'Brightness', 'Sharpness', 'Equalize', - 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'Rotate']), - ]) +pixel_statistics = { + "clip": (CLIP_PIXEL_MEAN, CLIP_PIXEL_STD), + "siglip": (SIGLIP_PIXEL_MEAN, SIGLIP_PIXEL_STD), + "internvit": (IMAGENET_PIXEL_MEAN, IMAGENET_PIXEL_STD), +} -def _transform_test(img_h, img_w): - return Compose([ - ToPILImage(), - Resize((img_h, img_w)), - convert_to_rgb, - ]) +def get_visual_transform(img, img_h, img_w, use_tiling=False, max_num_tiles=1, use_thumbnail=False, augment=False, vision_model_type="clip"): + pixel_mean, pixel_std = pixel_statistics[vision_model_type] -def standardize_image(img): - """Standardize image pixel values.""" - return (torch.Tensor(np.array(img)).permute(2, 0, 1) - pixel_mean) / pixel_std + assert not augment, "Image augmentation not implemented." + transform = build_transform(img_h, pixel_mean, pixel_std, vision_model_type) - -def get_visual_transform(img, img_h, img_w, use_tiling=False, max_num_tiles=1, use_thumbnail=False, augment=False): if use_tiling: assert img_h == img_w, "dynamic tiling expects equal tile height and width" imgs = dynamic_preprocess(img, min_num=1, max_num=max_num_tiles, image_size=img_h, use_thumbnail=use_thumbnail) - imgs = [standardize_image(img.convert("RGB")) for img in imgs] + imgs = [transform(img) for img in imgs] else: - img = np.array(img) - original_h, original_w = img.shape[0], img.shape[1] - ratio = float(max(img_h, img_w)) / max(original_h, original_w) - scaled_h, scaled_w = int(original_h * ratio + 0.5), int(original_w * ratio + 0.5) - - if augment: - visual_transform = _transform_train_aug(scaled_h, scaled_w) - else: - visual_transform = _transform_test(scaled_h, scaled_w) - - img = visual_transform(img) - - # Standardize pixel values. - img = standardize_image(img) - - # Pad to target image size. - delta_h, delta_w = img_h - scaled_h, img_w - scaled_w - img = torch.nn.functional.pad(img, (0, delta_w, 0, delta_h)) - imgs = [img] + imgs = [transform(img)] return imgs @@ -128,3 +93,26 @@ def dynamic_preprocess(image, min_num=1, max_num=6, image_size=448, use_thumbnai thumbnail_img = image.resize((image_size, image_size)) processed_images.append(thumbnail_img) return processed_images + + +# Based on https://github.com/openai/CLIP/blob/dcba3cb2e2827b402d2701e7e1c7d9fed8a20ef1/clip/clip.py#L79 +# and https://github.com/OpenGVLab/InternVL/blob/aa521e6eb1df4cf153aa4118fcf13e673c055d46/internvl_chat/internvl/train/dataset.py#L276 +def build_transform(input_size, pixel_mean, pixel_std, vision_model_type): + if vision_model_type in ("siglip", "internvit"): + transform = T.Compose([ + T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img), + T.Resize((input_size, input_size), interpolation=InterpolationMode.BICUBIC), + T.ToTensor(), + T.Normalize(mean=pixel_mean, std=pixel_std) + ]) + elif vision_model_type == "clip": + transform = Compose([ + T.Resize((input_size, input_size), interpolation=InterpolationMode.BICUBIC), + T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img), + T.ToTensor(), + T.Normalize(mean=pixel_mean, std=pixel_std), + ]) + else: + raise NotImplementedError(f"image processing not defined for vision model {vision_model_type}") + + return transform diff --git a/examples/multimodal/layer_specs.py b/examples/multimodal/layer_specs.py index b56e0b07e..2e07dc808 100644 --- a/examples/multimodal/layer_specs.py +++ b/examples/multimodal/layer_specs.py @@ -12,7 +12,7 @@ from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules try: - from megatron.core.transformer.custom_layers.transformer_engine import ( + from megatron.core.extensions.transformer_engine import ( TEColumnParallelLinear, TEDotProductAttention, TELayerNormColumnParallelLinear, @@ -28,16 +28,17 @@ import apex from megatron.core.fusions.fused_layer_norm import FusedLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm HAVE_APEX = True LNImpl = FusedLayerNorm except ImportError: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm def get_layer_spec(is_vit, normalization) -> ModuleSpec: @@ -45,7 +46,21 @@ def get_layer_spec(is_vit, normalization) -> ModuleSpec: if normalization == "LayerNorm": norm = LNImpl elif normalization == "RMSNorm": - norm = TENorm + if HAVE_TE: + norm = TENorm + else: + version = torch.__version__.split('.') + version_geq_2_4 = ( + int(TORCH_VERSION[0]) > 2 + or ( + int(TORCH_VERSION[0]) == 2 + and int(TORCH_VERSION[1]) >= 4 + ) + ) + assert version_geq_2_4, "Torch version >= 2.4.0 is required for RMSNorm" + if HAVE_APEX: + warnings.warn(f'Apex does not support RMSNorm. Falling back to Torch Norm') + norm = WrappedTorchNorm else: raise RuntimeError("unknown normalization", normalization) diff --git a/examples/multimodal/model.py b/examples/multimodal/model.py index b4bab73cf..6db834e97 100644 --- a/examples/multimodal/model.py +++ b/examples/multimodal/model.py @@ -4,11 +4,11 @@ import torch from config import get_language_model_config, get_vision_model_config, get_vision_projection_config -from layer_specs import get_layer_spec, get_layer_spec_te, get_mlp_module_spec +from layer_specs import get_layer_spec, get_layer_spec_te, get_mlp_module_spec, get_norm_mlp_module_spec_te -from megatron.core.models.multimodal.llava_model import LLaVAModel +from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN, LLaVAModel from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings -from megatron.training import get_args, print_rank_0 +from megatron.training import get_args, get_tokenizer, print_rank_0 from megatron.training.arguments import core_transformer_config_from_args @@ -30,14 +30,22 @@ def model_provider( model: A multimodal model. """ args = get_args() + assert args.ckpt_format == 'torch', "Only ckpt-format torch is supported for VLM training currently." + assert args.encoder_pipeline_model_parallel_size <= 1, "LLaVA does not support pp>1 for encoder on it's own pipeline rank" use_te = args.use_te print_rank_0('building a multimodal model ...') num_image_embeddings = get_num_image_embeddings( - args.img_h, args.img_w, args.patch_dim, args.vision_model_type, - args.disable_vision_class_token, 1 + args.img_h, + args.img_w, + args.patch_dim, + args.vision_model_type, + args.disable_vision_class_token, + 1, + args.pixel_shuffle, + args.use_tile_tags, ) old_seq_length = args.seq_length args.seq_length = args.encoder_seq_length = num_image_embeddings @@ -92,6 +100,9 @@ def model_provider( vision_transformer_layer_spec = get_layer_spec( is_vit=True, normalization=vision_config.normalization ) + elif vision_model_type == "internvit": + from nvlm.internvit import get_internvit_layer_spec + vision_transformer_layer_spec = get_internvit_layer_spec(use_te=use_te) else: raise RuntimeError("unsupported vision model type", vision_model_type) @@ -100,21 +111,35 @@ def model_provider( vision_projection_config, language_config.hidden_size ) + # --encoder-pipeline-model-parallel-size 1 will enable a separate pipeline stage for the vision model. if args.encoder_pipeline_model_parallel_size > 0: assert ( args.encoder_pipeline_model_parallel_size == 1 ), "vision model and projection can only live on 1 pipeline stage." - vision_config.pipeline_model_parallel_size = args.encoder_pipeline_model_parallel_size - vision_projection_config.pipeline_model_parallel_size = ( - args.encoder_pipeline_model_parallel_size - ) + if args.encoder_tensor_model_parallel_size > 0: vision_config.tensor_model_parallel_size = args.encoder_tensor_model_parallel_size vision_projection_config.tensor_model_parallel_size = ( args.encoder_tensor_model_parallel_size ) - vision_projection_layer_spec = get_mlp_module_spec(use_te=use_te).submodules + # Make sure vision model pipeline parallel size is not inherited from the language model pipeline parallel size. + # 0 is not a valid for the config value, hence max(1, ). + vision_config.pipeline_model_parallel_size = max(1, args.encoder_pipeline_model_parallel_size) + vision_projection_config.pipeline_model_parallel_size = vision_config.pipeline_model_parallel_size + + # Make sure the vision model does not inherit first and last pipeline num layers from the language model. + vision_config.first_pipeline_num_layers = vision_config.last_pipeline_num_layers = None + + if vision_projection_config.normalization: + vision_projection_layer_spec = get_norm_mlp_module_spec_te().submodules + else: + vision_projection_layer_spec = get_mlp_module_spec(use_te=use_te).submodules + + tokenizer = get_tokenizer() + image_token_index = tokenizer.convert_tokens_to_ids(IMAGE_TOKEN) + + tile_tags = _get_tile_tags(args, tokenizer) model = LLaVAModel( language_transformer_config=language_config, @@ -139,6 +164,10 @@ def model_provider( img_w=args.img_w, patch_dim=args.patch_dim, language_rotary_base=args.rotary_base, + language_rope_scaling=args.use_rope_scaling, + image_token_index=image_token_index, + pixel_shuffle=args.pixel_shuffle, + tile_tags=tile_tags, ) model.freeze( @@ -148,3 +177,26 @@ def model_provider( ) return model + + +def _get_tile_tags(args, tokenizer): + """Tile tags are used in NVLM to surround image tiles with text tags.""" + if not args.use_tile_tags: + return None + + # We expect the tokenized length of the tags is same. + thumbnail_tag_text = "" + if args.tokenizer_prompt_format == "nvlm-yi-34b": + thumbnail_tag_text = "" + + assert args.max_num_tiles <= 6, "Up to 6 tile tags used" + tile_tags_text = [f"" for i in range(1, args.max_num_tiles + 1)] + [thumbnail_tag_text] + + start_idx = 0 + if tokenizer._prompt_config.has_bos: + start_idx = 1 + + # Convert to tokens [num_tiles, tile_seq_len]. + tile_tags = [tokenizer.tokenize(t)[start_idx:] for t in tile_tags_text] + + return tile_tags diff --git a/examples/multimodal/clip_converter.py b/examples/multimodal/model_converter/clip_converter.py similarity index 100% rename from examples/multimodal/clip_converter.py rename to examples/multimodal/model_converter/clip_converter.py diff --git a/examples/multimodal/model_converter/internvit_converter.py b/examples/multimodal/model_converter/internvit_converter.py new file mode 100755 index 000000000..48404c208 --- /dev/null +++ b/examples/multimodal/model_converter/internvit_converter.py @@ -0,0 +1,162 @@ +import argparse +import os + +import torch +from transformers import AutoModel + + +def convert(model_name, output_path, tensor_parallel_size, use_te): + """Convert InternViT HF checkpoint to mcore.""" + hf_model = AutoModel.from_pretrained( + model_name, + trust_remote_code=True + ) + + hf_state_dict = hf_model.state_dict() + new_state_dicts = [{"model": dict()} for _ in range(tensor_parallel_size)] + + hidden_size = 3200 + num_heads = 25 + dim = 128 + + order = torch.ones(3 * hidden_size).long() + + for j in range(num_heads): + for i in range(dim): + order[i + dim*3*j] = j*dim+i + order[dim + i + dim*3*j] = j*dim+i+num_heads*dim + order[dim*2 + i + dim*3*j] = j*dim+i+num_heads*dim*2 + + for name, tensor in hf_state_dict.items(): + # Map parameter names to ones used in megatron. + new_name = "" + new_tensor = tensor + + # This is used for chunking some tensors to target tensor parallel size. + chunk_dim = None + + if "embeddings.class_embedding" in name: + new_name = "class_token" + elif "embeddings.patch_embedding.weight" in name: + new_name = "conv1.weight" + elif "embeddings.patch_embedding.bias" in name: + new_name = "conv1.bias" + elif "embeddings.position_embedding" in name: + new_name = "position_embeddings.weight" + new_tensor = new_tensor.squeeze(0) + elif "encoder.layers" in name: + layer_idx = name.split(".")[2] + + base = f"decoder.layers.{layer_idx}" + + head_dim = 128 + + if tensor_parallel_size == 1: + num_padded_heads = 25 + elif tensor_parallel_size == 8: + # Note: 25 is not divisible by 8 and we don't currently support uneven heads split with tensor parallelism. + # So we pad with dummy all-zero heads. Please use a nice even number of attention heads in your model. + num_padded_heads = 32 + else: + raise NotImplementedError("invalid tensor parallel size value:", tensor_parallel_size) + + if "ls1" in name: + new_name = f"{base}.ls1" + elif "ls2" in name: + new_name = f"{base}.ls2" + elif "attn.qkv.weight" in name: + new_name = f"{base}.self_attention.linear_qkv.weight" + num_tensors = 3 + padded_dim = head_dim * num_padded_heads * num_tensors + padded_tensor = torch.zeros((padded_dim, new_tensor.shape[-1]), dtype=new_tensor.dtype, device=new_tensor.device) + padded_tensor[:new_tensor.shape[0], :] = new_tensor[order] + new_tensor = padded_tensor + chunk_dim = 0 + elif "attn.q_norm.weight" in name: + new_name = f"{base}.self_attention.q_layernorm.weight" + num_tensors = 1 + padded_dim = head_dim * num_padded_heads * num_tensors + padded_tensor = torch.zeros(padded_dim, dtype=new_tensor.dtype, device=new_tensor.device) + padded_tensor[:new_tensor.shape[0]] = new_tensor + new_tensor = padded_tensor + chunk_dim = 0 + elif "attn.k_norm.weight" in name: + new_name = f"{base}.self_attention.k_layernorm.weight" + num_tensors = 1 + padded_dim = head_dim * num_padded_heads * num_tensors + padded_tensor = torch.zeros(padded_dim, dtype=new_tensor.dtype, device=new_tensor.device) + padded_tensor[:new_tensor.shape[0]] = new_tensor + new_tensor = padded_tensor + chunk_dim = 0 + elif "attn.proj.weight" in name: + new_name = f"{base}.self_attention.linear_proj.weight" + num_tensors = 1 + padded_dim = head_dim * num_padded_heads * num_tensors + padded_tensor = torch.zeros((new_tensor.shape[0], padded_dim), dtype=new_tensor.dtype, device=new_tensor.device) + padded_tensor[:, :new_tensor.shape[-1]] = new_tensor + new_tensor = padded_tensor + chunk_dim = 1 + elif "attn.proj.bias" in name: + new_name = f"{base}.self_attention.linear_proj.bias" + elif "mlp.fc1.weight" in name: + new_name = f"{base}.mlp.linear_fc1.weight" + chunk_dim = 0 + elif "mlp.fc1.bias" in name: + new_name = f"{base}.mlp.linear_fc1.bias" + chunk_dim = 0 + elif "mlp.fc2.weight" in name: + new_name = f"{base}.mlp.linear_fc2.weight" + chunk_dim = 1 + elif "mlp.fc2.bias" in name: + new_name = f"{base}.mlp.linear_fc2.bias" + elif "norm1" in name: + new_name = f"{base}.input_layernorm.weight" + elif "norm2" in name: + new_name = f"{base}.pre_mlp_layernorm.weight" + else: + raise RuntimeError("unexpected transformer layer name", name) + else: + raise RuntimeError("unexpected layer name", name) + + assert new_name != "", f"unexpected layer name {name}" + + # TE sets _extra_state (for FP8 purposes), so set an empty one here for compatibility. + extra_state_layers = ("linear_qkv", "linear_proj", "linear_fc1", "linear_fc2") + is_extra_state_layer = any([l in new_name for l in extra_state_layers]) + if use_te and is_extra_state_layer: + layer = new_name.split(".")[-2] + if layer in extra_state_layers: + extra_state_name = ( + new_name[: new_name.rfind(".") + 1] + "_extra_state" + ) # Replace the weight name. + for i in range(tensor_parallel_size): + new_state_dicts[i]["model"][extra_state_name] = None + + if chunk_dim is None: + new_tensors = [new_tensor for _ in range(tensor_parallel_size)] + else: + new_tensors = torch.chunk(new_tensor, tensor_parallel_size, dim=chunk_dim) + + for i in range(tensor_parallel_size): + new_state_dicts[i]["model"][new_name] = new_tensors[i].clone() + + for i in range(tensor_parallel_size): + output_dir_tp = os.path.join(output_path, f"iter_0000001/mp_rank_0{i}") + os.makedirs(output_dir_tp, exist_ok=True) + output_path_tp = os.path.join(output_dir_tp, "model_optim_rng.pt") + torch.save(new_state_dicts[i], output_path_tp) + print("saved file", output_path_tp) + + print("done") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="InternVIT HuggingFace to Mcore converter") + parser.add_argument("--model-name", type=str, default="OpenGVLab/InternViT-6B-448px-V1-5", help="Model name in HuggingFace") + parser.add_argument("--output-dir", type=str, required=True, help="Output directory for the mcore model.") + parser.add_argument("--use-te", action="store_true", default=True) + parser.add_argument("--tensor-parallel-size", type=int, required=True) + + args = parser.parse_args() + + convert(args.model_name, args.output_dir, args.tensor_parallel_size, args.use_te) diff --git a/examples/multimodal/model_converter/siglip_converter.py b/examples/multimodal/model_converter/siglip_converter.py new file mode 100644 index 000000000..666cda15e --- /dev/null +++ b/examples/multimodal/model_converter/siglip_converter.py @@ -0,0 +1,154 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import argparse +import os +from transformers import PaliGemmaForConditionalGeneration +import torch + + +def convert(output_path, tensor_parallel_size, use_te): + device = "cuda" + + model_id = "google/paligemma-3b-pt-448" + model = PaliGemmaForConditionalGeneration.from_pretrained(model_id).eval() + + model = model.to(device) + + print(model.config) + for name, tensor in model.state_dict().items(): + if "vision_model" not in name: + continue + shape_str = "(" + ", ".join([str(x) for x in tensor.shape]) + ")" + print(f"{name:<75} {shape_str:>20}") + + state_dict = model.state_dict() + new_state_dicts = [{"model": dict()} for _ in range(tensor_parallel_size)] + + def add_chunck_tensor(new_tensor, new_name, chunk_dim=None): + if chunk_dim is None: + new_tensors = [new_tensor for _ in range(tensor_parallel_size)] + else: + new_tensors = torch.chunk(new_tensor, tensor_parallel_size, dim=chunk_dim) + + for i in range(tensor_parallel_size): + # chunk() creates a view of a bigger tensor. clone() is used here to avoid excessive storage. + new_state_dicts[i]["model"][new_name] = new_tensors[i].clone() + + # TE sets _extra_state (for FP8 purposes), so set an empty one here for compatibility. + extra_state_layers = ("linear_qkv", "linear_proj", "linear_fc1", "linear_fc2") + is_extra_state_layer = any([l in new_name for l in extra_state_layers]) + if use_te and is_extra_state_layer: + layer = new_name.split(".")[-2] + if layer in extra_state_layers: + extra_state_name = ( + new_name[: new_name.rfind(".") + 1] + "_extra_state" + ) # Replace the weight name. + new_state_dicts[i]["model"][extra_state_name] = None + + for name, tensor in state_dict.items(): + if tensor.dtype == torch.float16: + state_dict[name] = tensor.to(torch.float32) + + add_chunck_tensor( + state_dict["vision_tower.vision_model.embeddings.position_embedding.weight"], + "position_embeddings.weight") + add_chunck_tensor( + state_dict["vision_tower.vision_model.embeddings.patch_embedding.weight"], + "conv1.weight") + add_chunck_tensor( + state_dict["vision_tower.vision_model.embeddings.patch_embedding.bias"], + "conv1.bias") + + head_dim = 72 + num_head = 16 + for layer_idx in range(27): + origin_base = f"vision_tower.vision_model.encoder.layers.{layer_idx}" + target_base = f"decoder.layers.{layer_idx}" + + for param_type in ["weight", "bias"]: + # QKV + q_proj_params = state_dict[f"{origin_base}.self_attn.q_proj.{param_type}"] + k_proj_params = state_dict[f"{origin_base}.self_attn.k_proj.{param_type}"] + v_proj_params = state_dict[f"{origin_base}.self_attn.v_proj.{param_type}"] + # Do some tensor manipulation because megatron expect one tensor + # projection for the QKV in the order + # [(Q1, K1, V1), (Q2, K2, V2), ...] where Qi is the query of the + # i-th head with dimension num_head. + new_tensor = torch.concatenate([ + q_proj_params.view(num_head, head_dim, -1), + k_proj_params.view(num_head, head_dim, -1), + v_proj_params.view(num_head, head_dim, -1)], axis=1).view( + 3*head_dim*num_head, -1) + if param_type == "bias": + new_tensor = new_tensor[:, 0] + new_name = f"{target_base}.self_attention.linear_qkv.{param_type}" + add_chunck_tensor(new_tensor, new_name, chunk_dim=0) + # linear_proj + add_chunck_tensor( + state_dict[f"{origin_base}.self_attn.out_proj.{param_type}"], + f"{target_base}.self_attention.linear_proj.{param_type}", + chunk_dim=1 if param_type == "weight" else None) + # layer_norm + new_name = f"{target_base}.input_layernorm.{param_type}" + if use_te: + new_name = f"{target_base}.self_attention.linear_qkv.layer_norm_{param_type}" + add_chunck_tensor( + state_dict[f"{origin_base}.layer_norm1.{param_type}"], + new_name) + # FC 1 + add_chunck_tensor( + state_dict[f"{origin_base}.mlp.fc1.{param_type}"], + f"{target_base}.mlp.linear_fc1.{param_type}", + chunk_dim=0) + # FC 2 + add_chunck_tensor( + state_dict[f"{origin_base}.mlp.fc2.{param_type}"], + f"{target_base}.mlp.linear_fc2.{param_type}", + chunk_dim=1 if param_type=="weight" else None) + # layer_norm + new_name = f"{target_base}.pre_mlp_layernorm.{param_type}" + if use_te: + new_name = f"{target_base}.mlp.linear_fc1.layer_norm_{param_type}" + add_chunck_tensor( + state_dict[f"{origin_base}.layer_norm2.{param_type}"], + new_name) + + add_chunck_tensor( + state_dict["vision_tower.vision_model.post_layernorm.weight"], + "ln_post.weight") + add_chunck_tensor( + state_dict["vision_tower.vision_model.post_layernorm.bias"], + "ln_post.bias") + + for i in range(tensor_parallel_size): + output_dir_tp = os.path.join(output_path, "iter_0000001", f"mp_rank_0{i}") + os.makedirs(output_dir_tp) + output_path_tp = os.path.join(output_dir_tp, "model_optim_rng.pt") + torch.save(new_state_dicts[i], output_path_tp) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=""" +Convert SigLIP weights to megatron format. + + +Example usage: +python siglip_converter.py --tensor-parallel-size 4 --output google_paligemma_3b_pt_44_mcore_tp_4 --use-te + +examples/multimodal/combine_mistral_clip.sh Mistral-7B-Instruct-v0.3-mcore-tp4 google_paligemma_3b_pt_44_mcore_tp_4 mistral_7b_instruct_v0p3_google_paligemma_3b_pt_44_mcore_tp_4 +""", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument( + "--output", type=str, required=True, help="output directory for megatron state dict file(s)" + ) + parser.add_argument( + "--tensor-parallel-size", type=int, default=1, help="model tensor parallel size" + ) + parser.add_argument("--use-te", action="store_true", help="Use Transformer Engine") + + args = parser.parse_args() + + convert(args.output, args.tensor_parallel_size, args.use_te) + + print("done.") diff --git a/examples/multimodal/model_converter/vision_model_tester.py b/examples/multimodal/model_converter/vision_model_tester.py new file mode 100644 index 000000000..ef36dd5f9 --- /dev/null +++ b/examples/multimodal/model_converter/vision_model_tester.py @@ -0,0 +1,121 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import argparse +import os +import sys + +# Add megatron and the multimodal example to the path. +sys.path.append( + os.path.abspath( + os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir, os.path.pardir) + ) +) +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) + +import torch +from transformers import AutoModel + +from examples.multimodal.model import model_provider +from examples.multimodal.multimodal_args import add_multimodal_extra_args +from megatron.training import get_model +from megatron.training.checkpointing import load_checkpoint +from megatron.training.initialize import initialize_megatron + + +def run_mcore_vision(model_path): + """Run mcore vision model.""" + os.environ["CUDA_DEVICE_MAX_CONNECTIONS"] = "1" + + # Megatron has some mandatory flags. + sys.argv = [ + "ignore_me.py", + "--micro-batch-size=1", + "--num-layers=2", + "--vision-model-type=internvit", + "--language-model-type=mistral_7b", + "--tokenizer-prompt-format=mistral", + "--tokenizer-type=MultimodalTokenizer", + "--tokenizer-model=mistralai/Mistral-7B-Instruct-v0.3", + "--vocab-size=1024", + "--hidden-size=64", + "--num-attention-heads=8", + "--seq-length=1024", + "--decoder-seq-length=2048", + "--max-position-embeddings=2048", + "--bf16", + "--img-h=448", + "--img-w=448", + "--patch-dim=14", + "--tensor-model-parallel-size=8", + "--use-te", + f"--pretrained-checkpoint={model_path}", + ] + + initialize_megatron(extra_args_provider=add_multimodal_extra_args) + + def wrapped_model_provider(pre_process, post_process): + return model_provider(pre_process, post_process, parallel_output=False) + + # Set up model and load checkpoint. + model = get_model(wrapped_model_provider, wrap_with_ddp=False) + + vision_model = model[0].module.vision_model + + load_checkpoint([vision_model], None, None) + + vision_model.eval() + + images = torch.ones((1, 3, 448, 448), dtype=torch.bfloat16, device="cuda") + + output = vision_model(images) + + return output + + +def run_hf_vision(model_name): + """Run HF vision model.""" + model = ( + AutoModel.from_pretrained(model_name, torch_dtype=torch.bfloat16, trust_remote_code=True) + .cuda() + .eval() + ) + + images = torch.ones((1, 3, 448, 448), dtype=torch.bfloat16, device="cuda") + + outputs = model(images, return_dict=True) + + return outputs + + +def main(mcore_model, hf_model): + """Compare vision model outputs between mcore and HF given the same fixed input.""" + mcore = run_mcore_vision(mcore_model) + + if torch.distributed.get_rank() == 0: + hf = run_hf_vision(hf_model) + hf = hf["last_hidden_state"] + + # Compare logits. Due to different attention implementations and other details, + # there will be numerical differences. + diff = (mcore - hf).abs() + mean_diff = diff.mean().item() + max_diff = diff.max().item() + print(f"mean diff {mean_diff}, max diff {max_diff}") + assert mean_diff < 0.1, "mean output difference is greater than expected" + assert max_diff < 50, "max output difference is greater than expected" + + print("lgtm") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Check mcore vision model output vs. HF numerically.", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument( + "--mcore-model", type=str, required=True, help="directory for mcore model weights" + ) + parser.add_argument("--hf-model", type=str, required=True, help="Model name in HF") + + args = parser.parse_args() + + main(args.mcore_model, args.hf_model) diff --git a/examples/multimodal/multimodal_args.py b/examples/multimodal/multimodal_args.py index a7cb4235e..4b2be450a 100644 --- a/examples/multimodal/multimodal_args.py +++ b/examples/multimodal/multimodal_args.py @@ -1,4 +1,5 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN def add_multimodal_extra_args(parser): @@ -39,5 +40,36 @@ def add_multimodal_extra_args(parser): group.add_argument( "--online-evaluation-config", type=str, help="Config file for online evaluation." ) + group.add_argument( + "--special-tokens", + nargs="*", + default=[IMAGE_TOKEN], + help="Special tokens used in the multimodal model", + ) + group.add_argument( + "--tokenizer-prompt-format", + type=str, + choices=["mistral", "llama3", "chatml", "nvlm-yi-34b", "qwen2p0"], + required=True, + help="Prompt format to use with the tokenizer.", + ) + group.add_argument("--pixel-shuffle", action="store_true", default=False) + group.add_argument( + "--image-tag-type", + type=str, + choices=["nvlm", "internvl", ""], + default="", # Default: Image tag not used. + help="Surround image tokens with tags.", + ) + group.add_argument("--use-tile-tags", action="store_true", default=False, help="Use tile tags") + group.add_argument( + "--packing-buffer-size", + type=int, + default=None, # Packing is disabled by default. + help="Enable sample packing by setting the buffer size to > 0", + ) + group.add_argument( + "--packing-seq-length", type=int, default=0, help="Packing sequence length. Must be > 0 if using packing." + ) return parser diff --git a/examples/multimodal/nvlm/README.md b/examples/multimodal/nvlm/README.md new file mode 100644 index 000000000..7eddbb7ef --- /dev/null +++ b/examples/multimodal/nvlm/README.md @@ -0,0 +1,100 @@ +NVLM +==== + +Please refer to the [NVLM paper](https://arxiv.org/pdf/2409.11402) for details. + +*NOTE: VLMs in Megatron are under active development and are expected to change.* + +# Setup + +## Docker image + +Please use `examples/multimodal/Dockerfile`. + +## Dataset preparation + +Please refer to Tables 4 and 6 in the [NVLM paper](https://arxiv.org/pdf/2409.11402) for full list of pretrain and SFT datasets. +Please refer to https://nvidia.github.io/Megatron-Energon/data_prep.html on preparing datasets in the Megatron Energon format. + +## Model conversion + +### Vision model + +NVLM 1.0 models use [OpenGVLab/InternViT-6B-448px-V1-5](https://huggingface.co/OpenGVLab/InternViT-6B-448px-V1-5) from HuggingFace. +Please download it and run the following command to convert it to Megatron format. +``` +python examples/multimodal/model_converter/internvit_converter.py --output-dir --use-te --tensor-parallel-size 8 +``` + +### 34B Language model + +NVLM 1.0 34B starts from [NousResearch/Nous-Hermes-2-Yi-34B](https://huggingface.co/NousResearch/Nous-Hermes-2-Yi-34B) from HuggingFace. +Please download it and run the following command to convert it to Megatron format. +``` +python tools/checkpoint/convert.py --bf16 --model-type GPT --loader llama_mistral --saver mcore --target-tensor-parallel-size 8 --checkpoint-type hf \ + --load-dir --save-dir --tokenizer-model \ + --saver-transformer-impl transformer_engine --model-size yi-34B --make-vocab-size-divisible-by 1 +``` + +### 72B Language model + +NVLM 1.0 72B starts from [Qwen/Qwen2-72B-Instruct](https://huggingface.co/Qwen/Qwen2-72B-Instruct) from HuggingFace. +Please download it and run the following command to convert it to Megatron format. +``` +python tools/checkpoint/convert.py --bf16 --model-type GPT --loader llama_mistral --saver mcore --target-tensor-parallel-size 8 --checkpoint-type hf \ + --load-dir --save-dir --tokenizer-model \ + --saver-transformer-impl transformer_engine --model-size qwen2.5-72Bf +``` + +### Combined checkpoint + +Combine the vision model checkpoint from [InternVit](#internvit) with the [34B](#34b-language-model) or [72B](#72b-language-model) language model by running: +``` +examples/multimodal/combine_lm_vision_checkpoints.sh nvlm +``` + +# Training + +## 34B + +1. Pretraining: please run `examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh`. Please use the InternViT + 34B [combined checkpoint](#combined-checkpoint) and tokenizer from HuggingFace. +2. SFT: please run `examples/multimodal/nvlm/sft_34b_internvit.sh` using the checkpoint from 1. + +## 72B + +1. Pretraining: please run `examples/multimodal/nvlm/pretrain_qwen20_72b_internvit_6b.sh`. Please use the InternViT + 72B [combined checkpoint](#combined-checkpoint) and tokenizer from HuggingFace. +2. Convert the pretraining checkpoint from 1. to have pipeline parallel size = 4 for SFT. Please run +``` +examples/multimodal/nvlm/pp_checkpoint_converter.py --input \ +--input-pipeline-parallel 1 --output --output-pipeline-parallel 4 \ +--tensor-parallel 8 +``` +3. SFT: please run `examples/multimodal/nvlm/sft_qwen20_72b_internvit_6b.sh` using the checkpoint from 2. +4. To convert the checkpoint with pipeline parallel size = 4 back to 1 for evaluation, please run +``` +examples/multimodal/nvlm/pp_checkpoint_converter.py --input \ +--input-pipeline-parallel 4 --output --output-pipeline-parallel 1 \ +--tensor-parallel 8 +``` + +# Evaluation + +Run the text generation script. +- 34B +``` +examples/multimodal/nvlm/run_text_generation_yi_34b_internvit_6b.sh --input-image-path /path/to/input/images --output-path /some/output/directory \ + --model-path /path/to/model.pt --gt-path /path/to/groundtruth/file --task generation-task-name --use-tiling +``` +- 72B +``` +examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh --input-image-path /path/to/input/images --output-path /some/output/directory \ + --model-path /path/to/model.pt --gt-path /path/to/groundtruth/file --task generation-task-name --use-tiling +``` + +where `--task generation-task-name` is the name of the evaluation benchmark such as `captioning`, `MMMU` or `TextVQA`. + +Then, run one of the evaluation scripts from `examples/multimodal`. For example + +``` +python examples/multimodal/evaluate_mmmu.py --input-path /output/directory/from/generation +``` diff --git a/examples/multimodal/nvlm/internvit.py b/examples/multimodal/nvlm/internvit.py new file mode 100644 index 000000000..cd116ffb7 --- /dev/null +++ b/examples/multimodal/nvlm/internvit.py @@ -0,0 +1,273 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""" +NOTE: NVLM uses InternViT with tensor parallel (TP) size = 8. +Since InternViT has 25 attention heads and Megatron currently requires the number of attention heads +to be divisible by the TP size, we add 7 dummy zero attention heads to have 32 attention heads. + +This workaround requires some changes to how we compute RMSNorm, Attention etc. + +Additionally, InternViT introduces some unique features like Layer Scaling. + +Those code changes are gathered here. +""" +from functools import partial +from typing import Dict + +import torch + +from megatron.core.dist_checkpointing.mapping import ShardedStateDict +from megatron.core.extensions.transformer_engine import ( + TEColumnParallelLinear, + TEDotProductAttention, + TERowParallelLinear, +) +from megatron.core.parallel_state import ( + get_tensor_model_parallel_group, + get_tensor_model_parallel_rank, + get_tensor_model_parallel_world_size, +) +from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear +from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules +from megatron.core.transformer.dot_product_attention import DotProductAttention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.mlp import MLP, MLPSubmodules +from megatron.core.transformer.module import MegatronModule +from megatron.core.transformer.spec_utils import ModuleSpec, build_module +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules +from megatron.core.transformer.utils import make_sharded_tensors_for_checkpoint + + +class InternViTRMSNorm(MegatronModule): + + def __init__( + self, + config, + hidden_size: int, + eps: float = 1e-6, + sequence_parallel: bool = False, + compute_var: bool = False, + ): + """Custom RMSNorm for InternViT. + + Args: + config (TransformerConfig): Config. + hidden_size (int): Input hidden size. + eps (float): epsilon to use for the norm, default to 1e-6 + sequence_parallel (bool): Set to true if sequence parallelism is being used, + this marks the weights as needing to be allreduced. + compute_var (bool): Indicator to compute statistic manually. + """ + super().__init__(config=config) + self.config = config + self.eps = eps + self.weight = torch.nn.Parameter(torch.ones(hidden_size)) + self._compute_var = compute_var + + assert not sequence_parallel, "Sequence parallelism is not supported with InternViT." + + setattr(self.weight, 'sequence_parallel', sequence_parallel) + + def _norm(self, x, var): + if var is None: + var = x.pow(2).mean(-1, keepdim=True) + + return x * torch.rsqrt(var + self.eps) + + def forward(self, x): + """Run RMSNorm with an option to compute custom statistic.""" + var = None + if self._compute_var: + unpadded_hidden_size = self.config.hidden_size # 3200 + max_dim = x.shape[-1] # 128 + + x = x.reshape(x.size(0), x.size(1), -1) + var = self._gather_var(x.float().pow(2), max_dim) / unpadded_hidden_size + + output = self._norm(x.float(), var).type_as(x) + output = output * self.weight + + if self._compute_var: + output = output.reshape(output.size(0), output.size(1), -1, max_dim) + + return output + + def _gather_var(self, input_, max_dim, valid_ranks=6): + """Compute statistic across the non-dummy heads.""" + world_size = get_tensor_model_parallel_world_size() + assert world_size == 8, "tested only with TP=8" + + # Size and dimension. + last_dim = input_.dim() - 1 + rank = get_tensor_model_parallel_rank() + + if rank < valid_ranks: # Ranks 0-5 have 24 non-dummy attention heads. + var = input_.sum(-1, keepdim=True) + elif rank == valid_ranks: # Rank 6 has 1 non-dummy attention head. + var = input_[..., :max_dim].sum(-1, keepdim=True) + else: + var = input_.sum(-1, keepdim=True) * 0.0 # Zero-out the dummy heads. + + tensor_list = [torch.empty_like(var) for _ in range(world_size)] + tensor_list[rank] = var + torch.distributed.all_gather(tensor_list, var, group=get_tensor_model_parallel_group()) + + output = torch.cat(tensor_list, dim=last_dim).contiguous() + + return output.sum(-1, keepdim=True) + + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata={}): + + # in InternVitSelfAttention the q_layernorm and k_layernorm weights + # are tensor-parallel so must be converted to sharded tensors + if 'q_layernorm' in prefix or 'k_layernorm' in prefix: + state_dict = self.state_dict(prefix='', keep_vars=True) + return make_sharded_tensors_for_checkpoint( + state_dict, prefix, {'weight': 0}, sharded_offsets + ) + else: + return super().sharded_state_dict(prefix, sharded_offsets, metadata) + + +def get_mlp_module_spec(use_te: bool = True) -> ModuleSpec: + # Dense MLP w/ or w/o TE modules. + return ModuleSpec( + module=MLP, + submodules=MLPSubmodules( + linear_fc1=TEColumnParallelLinear if use_te else ColumnParallelLinear, + linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, + ), + ) + + +# Handle InternViT's layer scaling. +def _bias_dropout_add_func_internvit(ls, x_with_bias, residual, prob, training): + x, bias = x_with_bias # unpack + residual = residual if residual.dtype == x.dtype else residual.to(x.dtype) + if bias is not None: + x = x + bias + out = torch.nn.functional.dropout(x, p=prob, training=training) + out = residual + out * ls + return out + else: + out = torch.nn.functional.dropout(x, p=prob, training=training) + out = residual + out * ls + return out + + +def bias_dropout_add_unfused_internvit(ls, training): + """Bias-dropout-add as in Megatron but with added LayerScaling handling.""" + + def _bias_dropout_add(x_with_bias, residual, prob): + return _bias_dropout_add_func_internvit(ls, x_with_bias, residual, prob, training) + + return _bias_dropout_add + + +def get_bias_dropout_add_internvit(ls, training, fused): + """Bias-dropout-add as in Megatron but with added LayerScaling handling.""" + assert not fused, "Fused bias-dropout-add not implemented for InternViT." + return bias_dropout_add_unfused_internvit(ls, training) + + +# Add InternViT specialties to our default TransformerLayer. +class InternViTTransformerLayer(TransformerLayer): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.ls1 = torch.nn.Parameter(torch.ones(self.config.hidden_size)) + self.ls2 = torch.nn.Parameter(torch.ones(self.config.hidden_size)) + + self.self_attn_bda = partial(self.self_attn_bda, self.ls1) + self.mlp_bda = partial(self.mlp_bda, self.ls2) + + +# Override a few things that are special in InternViT and not supported by the SelfAttention class. +class InternViTSelfAttention(SelfAttention): + def __init__( + self, config: TransformerConfig, submodules: SelfAttentionSubmodules, *args, **kwargs + ): + super().__init__(config=config, submodules=submodules, *args, **kwargs) + + # Need to override linear_qkv, q_layernorm and k_layernorm. + qkv_bias = False + + self.linear_qkv = build_module( + submodules.linear_qkv, + self.config.hidden_size, + self.query_projection_size + 2 * self.kv_projection_size, + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=qkv_bias, + skip_bias_add=False, + is_expert=False, + tp_comm_buffer_name='qkv', + ) + + qk_layernorm_hidden_size = ( + self.hidden_size_per_attention_head * self.num_attention_heads_per_partition + ) # 512 for internvit + + self.q_layernorm = build_module( + submodules.q_layernorm, + hidden_size=qk_layernorm_hidden_size, + config=self.config, + eps=self.config.layernorm_epsilon, + compute_var=True, + ) + + self.k_layernorm = build_module( + submodules.k_layernorm, + hidden_size=qk_layernorm_hidden_size, + config=self.config, + eps=self.config.layernorm_epsilon, + compute_var=True, + ) + + +class InternViTTEDotProductAttention(TEDotProductAttention): + """Adjusted Attention for InternViT""" + + def forward(self, *args, **kwargs): + """Regular TEDotProductAttention + zero-out dummy attention heads.""" + out = super().forward(*args, **kwargs) + + # This makes sure the dummy attention heads are zeroed out. + mask = torch.ones_like(out, dtype=out.dtype, device=out.device) + rank = get_tensor_model_parallel_rank() + max_dim = out.shape[-1] # 128 + valid_ranks = 6 + + if rank == valid_ranks: + mask[..., max_dim:] *= 0.0 + elif rank > valid_ranks: + mask *= 0.0 + out *= mask + + return out + + +def get_internvit_layer_spec(use_te) -> ModuleSpec: + mlp = get_mlp_module_spec(use_te) # no norm + + return ModuleSpec( + module=InternViTTransformerLayer, + submodules=TransformerLayerSubmodules( + input_layernorm=InternViTRMSNorm, + self_attention=ModuleSpec( + module=InternViTSelfAttention, + params={"attn_mask_type": AttnMaskType.no_mask}, + submodules=SelfAttentionSubmodules( + linear_qkv=TEColumnParallelLinear if use_te else ColumnParallelLinear, + core_attention=TEDotProductAttention if use_te else DotProductAttention, + linear_proj=TERowParallelLinear if use_te else RowParallelLinear, + q_layernorm=InternViTRMSNorm, + k_layernorm=InternViTRMSNorm, + ), + ), + self_attn_bda=get_bias_dropout_add_internvit, + pre_mlp_layernorm=InternViTRMSNorm, + mlp=mlp, + mlp_bda=get_bias_dropout_add_internvit, + ), + ) diff --git a/examples/multimodal/nvlm/nvlm_prompts.json b/examples/multimodal/nvlm/nvlm_prompts.json new file mode 100644 index 000000000..ab36adc76 --- /dev/null +++ b/examples/multimodal/nvlm/nvlm_prompts.json @@ -0,0 +1,165 @@ +{ + "COMMENT": "Mixture of our own custom prompts and some prompts from https://huggingface.co/datasets/liuhaotian/LLaVA-Pretrain/viewer and https://huggingface.co/datasets/HuggingFaceM4/M3IT", + "Captioning": { + "raw": [ + "Can you briefly explain what you see in the image?", + "Describe what's happening in this image in one short sentence.", + "Write a short caption that accurately represents the content of this image.", + "Please generate a descriptive caption for the image provided.", + "How would you summarize the scene depicted in the picture in short?", + "Describe the image briefly.", + "Write a succinct description of the image, capturing its main components, the relationships between them, and any notable details.", + "Create a concise caption that accurately describes the main elements in the image provided.", + "Write a brief, yet comprehensive, description of the image.", + "Describe the image in a clear and concise manner.", + "For the given image, provide a one-sentence summary that captures the most important details.", + "Generate a short caption for the picture.", + "Write a short and informative description that highlights the primary subjects and actions occurring in the given image.", + "Provide a concise and informative caption for the image, focusing on the primary subjects.", + "Write a clear description of the image, make sure the key features are well covered.", + "Offer a succinct explanation of the picture presented." + ] + }, + "CaptioningPretraining": { + "raw": [ + "Give a brief description of image.", + "Give a brief description of the image.", + "Provide a brief description of the given image.", + "Provide a one-sentence caption for the provided image.", + "Write a terse but informative summary of the picture.", + "Describe the image concisely.", + "Generate a clear and concise summary of the photo." + ] + }, + "CaptioningSFT": { + "raw": [ + "Give a brief description of the image.", + "Give a short and clear explanation of the subsequent image.", + "Present a compact description of the photo's key features.", + "Provide a brief description of the given image.", + "Provide a one-sentence caption for the provided image.", + "Render a clear and concise summary of the photo.", + "Share a concise interpretation of the image provided.", + "Summarize the visual content of the image.", + "Write a terse but informative summary of the picture.", + "Describe the image concisely." + ] + }, + "VQAPretraining": { + "raw": [ + "Question: {} Short answer:", + "Question: {} Answer:" + ] + }, + "VQASFT": { + "raw": [ + "{}", + "{}\nAnswer the question using a single word or phrase." + ], + "docvqa": [ + "{}", + "{}\nAnswer this question using the text in the image directly." + ] + }, + "DocPretraining": { + "raw": [ + "Retrieve the text from the given pdf image.", + "Extract the text from the provided document.", + "Transcribe the text displayed in the image." + ], + "ocr_multi": [ + "Apply grounded Optical Character Recognition (OCR) to the provided image.", + "Extract all texts and their bounding boxes from the given image using grounded OCR.", + "Extract and transcribe all visible text from the provided image, ensuring accurate spatial recognition.", + "Conduct a detailed optical character recognition analysis on this image, maintaining the text's original layout and positioning.", + "Execute a thorough text recognition procedure on this visual input, ensuring that the spatial arrangement of the text is accurately represented.", + "Perform an in-depth OCR scan of the image, capturing both the content and contextual positioning of all textual information.", + "OCR with grounding:" + ], + "md": [ + "Extract the text from the given image and format it in Markdown.", + "Convert the text from the provided image into Markdown format.", + "Transform the text from the given image into Markdown syntax.", + "Extract and convert the text from the image to Markdown.", + "Retrieve the text from the image and present it in Markdown format." + ], + "grounded_ocr": [ + "{}. Text:", + "Recognize the text in this region: {}.", + "Identify the text in this area: {}.", + "Detect the text within this section: {}." + ], + "referring_grounding": [ + "Region of \"{}\" is:", + "Locate the text \"{}\" in the image.", + "Identify the text \"{}\" in the image and provide the coordinates." + ] + }, + "CaptioningDetailed": { + "raw": [ + "Create a comprehensive paragraph that captures the essence of the image while weaving a cohesive narrative around its elements.", + "Compose a paragraph that thoroughly describes the image's content, providing context and connections between different aspects of the scene.", + "Provide a detailed, paragraph-length description of the image that paints a vivid picture and tells a coherent story.", + "Write a rich and engaging paragraph that delves into the image's components, describing not only what is seen but also how the elements relate to one another.", + "Give a well-rounded, paragraph-length explanation of the image, describing the scene and its components while forming a complete and engaging narrative.", + "Produce a paragraph that not only describes the individual elements in the image but also weaves them together to form a cohesive, connected account.", + "Construct a paragraph that captures the image's details and context, offering a more in-depth and engaging story than a simple caption.", + "Compose a descriptive paragraph that brings the image to life through detailed storytelling, connecting the various visual elements into a unified narrative.", + "Create a paragraph that provides an extensive and interconnected description of the image, ensuring that the narrative is both detailed and cohesive.", + "Write a compelling and detailed paragraph that delves into the image's components, linking them together to create a unified and engaging story." + ] + }, + "OCR": { + "raw": [ + "Can you read the text from image and output here?", + "Extract and document the text from the provided image.", + "Converting the text embedded in this image into a readable document.", + "Transcribe all the text you find.", + "Can you extract all visible text from the image here?" + ], + "markdown": [ + "Can you extract all visible text from the provided image?", + "Converting the text embedded in this image into a readable markdown document.", + "Can you read the text in the document as markdown?", + "Transcribe the document as markdown.", + "Extract and document the text from the provided image." + ], + "table_markdown": [ + "Can you extract all visible text from the provided table?", + "Can you read the text in the provided table as markdown?", + "Transcribe the table as markdown.", + "Extract and document the text from the provided table image." + ], + "plain": [ + "Transcribe the document as plain text.", + "Extract and document the text from the provided image.", + "Converting the text embedded in this image into a readable document.", + "Transcribe all the text you find.", + "Can you extract all visible text from the image here?" + ], + "bbox_plain": [ + "Transcribe the document as plain text along with bounding boxes.", + "Extract and document the text from the provided image along with bounding boxes.", + "Converting the text embedded in this image into a readable documen along with bounding boxes.", + "Can you extract all visible text with bounding boxes from the image here?" + ] + }, + "VQA": { + "raw": [ + "Given the image, answer the following question with few words.", + "Answer the following question: ", + "What is the answer to this question?", + "Write the answer: ", + "Please answer this question: " + ] + }, + "Embedded": { + "raw": [ + "Given the image, answer the following question with few words.", + "Answer the following question: ", + "What is the answer to this question?", + "Write the answer: ", + "Please answer this question: " + ] + } +} diff --git a/examples/multimodal/nvlm/pp_checkpoint_converter.py b/examples/multimodal/nvlm/pp_checkpoint_converter.py new file mode 100644 index 000000000..7e99d650b --- /dev/null +++ b/examples/multimodal/nvlm/pp_checkpoint_converter.py @@ -0,0 +1,180 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import argparse +import os +import sys + +import torch + +# Add megatron to the path. +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir, os.path.pardir)) +) + + +def split(input_dir, base_output_dir, input_pp, output_pp, num_tp, num_layers_per_pp_rank): + """Split pipeline parallel size = 1 checkpoint to pipeline parallel size N.""" + for tp in range(num_tp): + path = os.path.join(input_dir, f"mp_rank_0{tp}", "model_optim_rng.pt") + sd = torch.load(path) + + if num_layers_per_pp_rank is None: + num_layers = sd["args"].num_layers + assert num_layers % output_pp == 0, "specify --num-layers-per-pp-rank for an uneven split" + num_layers_per_pp_rank = [num_layers // output_pp] * output_pp + + layer_lb = 0 + for pp in range(output_pp): + assert num_layers_per_pp_rank[pp] > 0, "each pp rank must have at least 1 layer" + layer_ub = layer_lb + num_layers_per_pp_rank[pp] + + new_sd = sd.copy() + new_sd["model"] = dict() + for k, v in sd["model"].items(): + # First pp rank has vision model. + if pp == 0 and ("vision_model" in k or "vision_projection" in k): + new_sd["model"][k] = v + continue + + # Only the first pp rank has the word embeddings. + if "language_model.embedding.word_embeddings" in k and pp == 0: + new_sd["model"][k] = v + + # Only the last pp rank has the output layer. + if "language_model.output_layer" in k and pp == output_pp - 1: + new_sd["model"][k] = v + + # Only the last pp rank has final layer norm. + if "language_model.decoder.final_layernorm" in k and pp == output_pp - 1: + new_sd["model"][k] = v + + if "language_model.decoder.layers" in k: + layer_num = int(k.split(".")[3]) + + if layer_lb <= layer_num and layer_num < layer_ub: + # On all pp ranks, megatron starts layer nums from 0! + new_layer_num = int(layer_num - layer_lb) + + k_splitted = k.split(".") + k_splitted[3] = str(new_layer_num) + new_k = ".".join(k_splitted) + + new_sd["model"][new_k] = v + + output_dir = os.path.join(base_output_dir, f"iter_0000001/mp_rank_0{tp}_00{pp}") + os.makedirs(output_dir, exist_ok=True) + output_path = os.path.join(output_dir, "model_optim_rng.pt") + torch.save(new_sd, output_path) + + print(f"processed tp rank: {tp}/{num_tp - 1} and pp rank: {pp}/{output_pp - 1}") + + layer_lb = layer_ub + + # This is needed for megatron checkpoint loading. + with open(os.path.join(base_output_dir, "latest_checkpointed_iteration.txt"), "w") as f: + f.write("1") + + +def combine(input_dir, base_output_dir, input_pp, output_pp, num_tp, num_layers_per_pp_rank): + """Combine pipeline parallel size = N checkpoint to pipeline parallel size 1.""" + for tp in range(num_tp): + new_sd = None + + layer_num_offset = 0 + max_layer_num = 0 + + for pp in range(input_pp): + path = os.path.join(input_dir, f"mp_rank_0{tp}_00{pp}", "model_optim_rng.pt") + sd = torch.load(path) + + if pp == 0: + new_sd = sd.copy() + new_sd["model"] = dict() + new_sd["args"].pipeline_model_parallel_size = 1 + + assert new_sd is not None + + for k, v in sd["model"].items(): + # First pp rank has vision model. + if pp == 0 and ("vision_model" in k or "vision_projection" in k): + new_sd["model"][k] = v + continue + + # Only the first pp rank has the word embeddings. + if "language_model.embedding.word_embeddings" in k and pp == 0: + new_sd["model"][k] = v + + # Only the last pp rank has the output layer. + if "language_model.output_layer" in k and pp == input_pp - 1: + new_sd["model"][k] = v + + # Only the last pp rank has final layer norm. + if "language_model.decoder.final_layernorm" in k and pp == input_pp - 1: + new_sd["model"][k] = v + + if "language_model.decoder.layers" in k: + layer_num = int(k.split(".")[3]) + + # On all pp ranks, megatron starts layer nums from 0! + new_layer_num = layer_num_offset + layer_num + + if new_layer_num > max_layer_num: + max_layer_num = new_layer_num + + k_splitted = k.split(".") + k_splitted[3] = str(new_layer_num) + new_k = ".".join(k_splitted) + + new_sd["model"][new_k] = v + + print(f"processed tp rank: {tp}/{num_tp - 1} and pp rank: {pp}/{input_pp - 1}") + + layer_num_offset = max_layer_num + 1 + + output_dir = os.path.join(base_output_dir, f"iter_0000001/mp_rank_0{tp}") + os.makedirs(output_dir, exist_ok=True) + output_path = os.path.join(output_dir, "model_optim_rng.pt") + torch.save(new_sd, output_path) + + # This is needed for megatron checkpoint loading. + with open(os.path.join(base_output_dir, "latest_checkpointed_iteration.txt"), "w") as f: + f.write("1") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Change pipeline parallelism for a model", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + parser.add_argument( + "--input", type=str, required=True, help="Input model directory" + ) + parser.add_argument( + "--input-pipeline-parallel", type=int, required=True, help="Input model pipeline parallelism" + ) + parser.add_argument( + "--output", type=str, required=True, help="Output model directory" + ) + parser.add_argument( + "--output-pipeline-parallel", type=int, required=True, help="Output model pipeline parallelism" + ) + parser.add_argument( + "--tensor-parallel", type=int, required=True, help="Model tensor parallel size", + ) + parser.add_argument( + "--num-layers-per-pp-rank", type=int, default=None, nargs="*", help="Specify this for uneven pipeline parallel split", + ) + + args = parser.parse_args() + + f = None + if args.input_pipeline_parallel == 1 and args.output_pipeline_parallel > 1: + f = split + elif args.input_pipeline_parallel > 1 and args.output_pipeline_parallel == 1: + f = combine + else: + raise NotImplementedError("Only pipeline parallel 1 to N and N to 1 are supported") + + f(args.input, args.output, args.input_pipeline_parallel, args.output_pipeline_parallel, args.tensor_parallel, args.num_layers_per_pp_rank) + + print("done.") diff --git a/examples/multimodal/nvlm/pretrain_blend.yaml b/examples/multimodal/nvlm/pretrain_blend.yaml new file mode 100644 index 000000000..fbbcc5438 --- /dev/null +++ b/examples/multimodal/nvlm/pretrain_blend.yaml @@ -0,0 +1,28 @@ +__module__: megatron.energon +__class__: Metadataset +splits: + train: + datasets: + - weight: 0.579 # Datasets are weighted according to their size. Weights sum up to 1. + path: + subflavors: + augmentation: False + + - weight: 0.02 + path: + subflavors: + augmentation: False + + - weight: 0.01 + path: + subflavors: + augmentation: False + + # Please refer to Table 4 in https://arxiv.org/pdf/2409.11402 for full list of pretrain datasets. + # Please refer to https://nvidia.github.io/Megatron-Energon/data_prep.html on preparing datasets in the Megatron Energon format. + val: + datasets: + - weight: 1. + path: + subflavors: + augmentation: False diff --git a/examples/multimodal/nvlm/pretrain_qwen20_72b_internvit_6b.sh b/examples/multimodal/nvlm/pretrain_qwen20_72b_internvit_6b.sh new file mode 100644 index 000000000..320c7ad3f --- /dev/null +++ b/examples/multimodal/nvlm/pretrain_qwen20_72b_internvit_6b.sh @@ -0,0 +1,158 @@ +#!/bin/bash + +# Your SBATCH commands here if using SLURM. + +# Please launch this script from megatron-lm root. + +# Train a multimodal model. + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export TOKENIZERS_PARALLELISM="false" + +DEBUG=0 + +if [[ $BATCH -eq 0 ]]; then + DATETIME=`date +'%y-%m-%d-%H-%M-%S'` + MODEL_NAME="mcore-qwen20-72b-internvit-${DATETIME}" +else + MODEL_NAME="mcore-qwen20-72b-internvit" +fi + +WORKSPACE="" +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR=${OUTPUT}/checkpoints +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +CHECKPOINT_DIR="${WORKSPACE}/combined-qwen2.0-72b-instruct-internvit-6b-448px-1.5-tp8-te" + +DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/pretrain_blend.yaml" + +if [[ $DEBUG -eq 1 ]]; then + MBZ=1 + BZ=1 + NW=0 + AD=0.0 + HD=0.0 + LI=1 + EXTRA_ARGS="" + ALLOW_NONDETERMINISTIC=1 +else + MBZ=1 + BZ=2048 + NW=8 + AD=0.1 + HD=0.1 + LI=5 + EXTRA_ARGS="" + ALLOW_NONDETERMINISTIC=1 +fi + +SEQ_LEN=256 # Image embeddings sequence length. +DECODER_SEQ_LEN=512 # Language model sequence length. +MAX_POS_EMBED=512 + + +OPTIONS=" \ + --use-checkpoint-args \ + --exit-duration-in-mins 230 \ + --disable-bias-linear \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model ${WORKSPACE}/ \ + --tokenizer-prompt-format qwen2p0 \ + --transformer-impl transformer_engine \ + --normalization RMSNorm \ + --norm-epsilon 1e-06 \ + --group-query-attention \ + --num-query-groups 8 \ + --no-masked-softmax-fusion \ + --attention-softmax-in-fp32 \ + --attention-dropout ${AD} \ + --hidden-dropout ${HD} \ + --untie-embeddings-and-output-weights \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --swiglu \ + --tensor-model-parallel-size 8 \ + --pipeline-model-parallel-size 1 \ + --num-layers 80 \ + --hidden-size 8192 \ + --ffn-hidden-size 29568 \ + --add-qkv-bias \ + --num-attention-heads 64 \ + --use-distributed-optimizer \ + --use-te \ + --num-workers ${NW} \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings 32768 \ + --train-samples 122880000 \ + --lr-decay-samples 25600000 \ + --lr-warmup-samples 83200 \ + --micro-batch-size ${MBZ} \ + --global-batch-size ${BZ} \ + --lr 1e-4 \ + --min-lr 2.5e-5 \ + --lr-decay-style cosine \ + --log-interval ${LI} \ + --eval-iters 10 \ + --eval-interval 500 \ + --data-path ${DATA_TRAIN} \ + --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ + --save-interval 5000 \ + --save ${FINETUNE_DIR} \ + --load ${FINETUNE_DIR} \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --split 100,0,0 \ + --clip-grad 10.0 \ + --weight-decay 0.1 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --bf16 \ + --eod-mask-loss \ + --freeze-ViT \ + --freeze-LM \ + --patch-dim 14 \ + --img-h 448 \ + --img-w 448 \ + --dataloader-type external \ + --tensorboard-dir ${TENSORBOARD_DIR} \ + --language-model-type qwen2.0_72B \ + ${EXTRA_ARGS} \ + --allow-missing-vision-projection-checkpoint \ + --vision-model-type internvit \ + --disable-vision-class-token \ + --log-params-norm \ + --log-num-zeros-in-grad \ + --ckpt-format torch \ + --pixel-shuffle \ + --image-tag-type nvlm +" + + +export NVTE_APPLY_QK_LAYER_SCALING=0 +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} + +# Interactive or batch mode +if [[ $BATCH -eq 0 ]]; then + torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} +else + run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" + + DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` + + srun -l --verbose \ + --container-image \ + --container-mounts "" \ + --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ + sh -c "${run_cmd}" + + set +x +fi diff --git a/examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh b/examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh new file mode 100644 index 000000000..c36cb0599 --- /dev/null +++ b/examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh @@ -0,0 +1,154 @@ +#!/bin/bash + +# Your SBATCH commands here if using SLURM. + +# Please launch this script from megatron-lm root. + +# Train a multimodal model. + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export TOKENIZERS_PARALLELISM="false" + +DEBUG=0 + +if [[ $BATCH -eq 0 ]]; then + DATETIME=`date +'%y-%m-%d-%H-%M-%S'` + MODEL_NAME="mcore-nous-yi34b-internvit-mlp-${DATETIME}" +else + MODEL_NAME="mcore-nous-yi34b-internvit-mlp" +fi + +WORKSPACE="" +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR=${OUTPUT}/checkpoints +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +LOAD_NAME="combined-yi-34b-internvit-tp8-mcore" +CHECKPOINT_DIR="${WORKSPACE}/${LOAD_NAME}" + +DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/pretrain_blend.yaml" + + +if [[ $DEBUG -eq 1 ]]; then + MBZ=1 + BZ=1 + NW=0 + LI=1 + AD=0.0 + HD=0.0 + EXTRA_ARGS="" + ALLOW_NONDETERMINISTIC=1 +else + MBZ=1 + BZ=2048 + NW=8 + LI=5 + AD=0.1 + HD=0.1 + EXTRA_ARGS="" + ALLOW_NONDETERMINISTIC=1 +fi + +SEQ_LEN=256 # Image embeddings sequence length. +DECODER_SEQ_LEN=512 # Language model sequence length. +MAX_POS_EMBED=512 + + +OPTIONS=" \ + --swiglu \ + --use-distributed-optimizer \ + --num-workers ${NW} \ + --num-layers 60 \ + --hidden-size 7168 \ + --normalization RMSNorm \ + --num-attention-heads 56 \ + --exit-duration-in-mins 230 \ + --group-query-attention \ + --num-query-groups 8 \ + --ffn-hidden-size 20480 \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings ${MAX_POS_EMBED} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model ${WORKSPACE}/ \ + --tokenizer-prompt-format nvlm-yi-34b \ + --vocab-size 64000 \ + --make-vocab-size-divisible-by 1 \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 5000000 \ + --disable-bias-linear \ + --tensor-model-parallel-size 8 \ + --language-model-type yi-34b \ + --vision-model-type internvit \ + --micro-batch-size ${MBZ} \ + --global-batch-size ${BZ} \ + --train-samples 122880000 \ + --lr-decay-samples 25600000 \ + --lr-warmup-samples 83200 \ + --lr 1e-4 \ + --min-lr 2.5e-5 \ + --lr-decay-style cosine \ + --clip-grad 10.0 \ + --weight-decay 0.1 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --attention-dropout ${AD} \ + --hidden-dropout ${HD} \ + --eod-mask-loss \ + --bf16 \ + --tensorboard-dir=${TENSORBOARD_DIR} \ + --freeze-LM \ + --freeze-ViT \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --data-path ${DATA_TRAIN} \ + --dataloader-type external \ + --split 100,0,0 \ + --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ + --log-interval ${LI} \ + --save-interval 2000 \ + --eval-interval 500 \ + --eval-iters 10 \ + --log-params-norm \ + --log-num-zeros-in-grad \ + ${EXTRA_ARGS} \ + --save ${FINETUNE_DIR} \ + --load ${FINETUNE_DIR} \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --allow-missing-vision-projection-checkpoint \ + --disable-vision-class-token \ + --use-te \ + --use-checkpoint-args \ + --ckpt-format torch \ + --pixel-shuffle \ + --image-tag-type nvlm + " + +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} +export NVTE_APPLY_QK_LAYER_SCALING=0 + +# Interactive or batch mode +if [[ $BATCH -eq 0 ]]; then + torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} +else + run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" + + DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` + + srun -l --verbose \ + --container-image \ + --container-mounts "" \ + --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ + sh -c "${run_cmd}" + + set +x +fi diff --git a/examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh b/examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh new file mode 100755 index 000000000..35cd90409 --- /dev/null +++ b/examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh @@ -0,0 +1,141 @@ +#!/bin/bash + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NVTE_APPLY_QK_LAYER_SCALING=0 +export TOKENIZERS_PARALLELISM="false" + +INPUT_IMAGE_PATH="placeholder" +GROUNDTRUTH_PATH="placeholder" + +USE_TILING=0 +USE_PIXEL_SHUFFLE_ONLY=0 + +while [[ $# -gt 0 ]]; do + case $1 in + --input-image-path) + INPUT_IMAGE_PATH="$2" + shift + shift + ;; + -o|--output-path) + OUTPUT_PATH="$2" + shift + shift + ;; + -m|--model-path) + MODEL_PATH="$2" + shift + shift + ;; + --task) + TASK="$2" + shift + shift + ;; + -g|--gt-path) + GROUNDTRUTH_PATH="$2" + shift + shift + ;; + --use-tiling) + USE_TILING=1 + shift + shift + ;; + --use-pixel-shuffle-only) + USE_PIXEL_SHUFFLE_ONLY=1 + shift + shift + ;; + -*|--*) + echo "Invalid option $1" + exit 1 + ;; + esac +done + +# Please modify these as needed. +NUM_PARTITIONS=0 +START=0 +END=0 + +SEQ_LEN=1024 # Image embeddings sequence length. +DECODER_SEQ_LEN=8192 # Language model sequence length. +MAX_POS_EMBED=8192 + +# Additional arguments. +EXTRA_ARGS="" + +if [[ $USE_TILING -eq 1 ]]; then + EXTRA_ARGS+=" --pixel-shuffle --use-tiling --max-num-tiles 6 --use-thumbnail --use-tile-tags" + SEQ_LEN=261 # Image embeddings sequence length (256 image embeddings + 5 tile tag embeddings). +fi + +if [[ $USE_PIXEL_SHUFFLE_ONLY -eq 1 ]]; then + EXTRA_ARGS+=" --pixel-shuffle" + SEQ_LEN=256 +fi + +for PARTITION_ID in $( eval echo {$START..$END} ) +do + torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ + --attention-softmax-in-fp32 \ + --no-masked-softmax-fusion \ + --swiglu \ + --num-layers 80 \ + --hidden-size 8192 \ + --normalization RMSNorm \ + --norm-epsilon 1e-06 \ + --num-attention-heads 64 \ + --exit-on-missing-checkpoint \ + --group-query-attention \ + --num-query-groups 8 \ + --ffn-hidden-size 29568 \ + --load ${MODEL_PATH} \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings ${MAX_POS_EMBED} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model \ + --tokenizer-prompt-format qwen2p0 \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --disable-bias-linear \ + --add-qkv-bias \ + --tensor-model-parallel-size 8 \ + --pipeline-model-parallel-size 1 \ + --language-model-type qwen2.0_72B \ + --vision-model-type internvit \ + --micro-batch-size 1 \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --bf16 \ + --freeze-LM \ + --freeze-ViT \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --use-te \ + --transformer-impl transformer_engine \ + --use-checkpoint-args \ + --out-seq-length 16 \ + --temperature 1.0 \ + --patch-dim 14 \ + --seed 1234 \ + --top_k 1 \ + --no-load-rng \ + --no-load-optim \ + --num-partitions ${NUM_PARTITIONS} \ + --partition-id ${PARTITION_ID} \ + --output-path ${OUTPUT_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + --disable-vision-class-token \ + --input-image-path ${INPUT_IMAGE_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + ${EXTRA_ARGS} \ + --task ${TASK} \ + --image-tag-type nvlm \ + --ckpt-format torch +done diff --git a/examples/multimodal/nvlm/run_text_generation_yi_34b_internvit_6b.sh b/examples/multimodal/nvlm/run_text_generation_yi_34b_internvit_6b.sh new file mode 100644 index 000000000..0437e4c16 --- /dev/null +++ b/examples/multimodal/nvlm/run_text_generation_yi_34b_internvit_6b.sh @@ -0,0 +1,140 @@ +#!/bin/bash + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NVTE_APPLY_QK_LAYER_SCALING=0 + +INPUT_IMAGE_PATH="placeholder" +GROUNDTRUTH_PATH="placeholder" + +USE_TILING=0 +USE_PIXEL_SHUFFLE_ONLY=0 + +while [[ $# -gt 0 ]]; do + case $1 in + --input-image-path) + INPUT_IMAGE_PATH="$2" + shift + shift + ;; + -o|--output-path) + OUTPUT_PATH="$2" + shift + shift + ;; + -m|--model-path) + MODEL_PATH="$2" + shift + shift + ;; + --task) + TASK="$2" + shift + shift + ;; + -g|--gt-path) + GROUNDTRUTH_PATH="$2" + shift + shift + ;; + --use-tiling) + USE_TILING=1 + shift + shift + ;; + --use-pixel-shuffle-only) + USE_PIXEL_SHUFFLE_ONLY=1 + shift + shift + ;; + -*|--*) + echo "Invalid option $1" + exit 1 + ;; + esac +done + +# Please modify these as needed. +NUM_PARTITIONS=0 +START=0 +END=0 + +SEQ_LEN=1024 # Image embeddings sequence length. +DECODER_SEQ_LEN=8192 # Language model sequence length. +MAX_POS_EMBED=8192 + +# Additional arguments. +EXTRA_ARGS="" + +if [[ $USE_TILING -eq 1 ]]; then + EXTRA_ARGS+=" --pixel-shuffle --use-tiling --max-num-tiles 6 --use-thumbnail --use-tile-tags" + SEQ_LEN=261 # Image embeddings sequence length (256 image embeddings + 5 tile tag embeddings). +fi + +if [[ $USE_PIXEL_SHUFFLE_ONLY -eq 1 ]]; then + EXTRA_ARGS+=" --pixel-shuffle" + SEQ_LEN=256 +fi + +for PARTITION_ID in $( eval echo {$START..$END} ) +do + torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ + --attention-softmax-in-fp32 \ + --no-masked-softmax-fusion \ + --swiglu \ + --num-layers 60 \ + --hidden-size 7168 \ + --normalization RMSNorm \ + --num-attention-heads 56 \ + --exit-on-missing-checkpoint \ + --group-query-attention \ + --num-query-groups 8 \ + --ffn-hidden-size 20480 \ + --load ${MODEL_PATH} \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings ${MAX_POS_EMBED} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model \ + --tokenizer-prompt-format nvlm-yi-34b \ + --vocab-size 64000 \ + --make-vocab-size-divisible-by 1 \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 5000000 \ + --disable-bias-linear \ + --tensor-model-parallel-size 8 \ + --pipeline-model-parallel-size 1 \ + --language-model-type yi-34b \ + --vision-model-type internvit \ + --micro-batch-size 1 \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --bf16 \ + --freeze-LM \ + --freeze-ViT \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --use-te \ + --transformer-impl transformer_engine \ + --use-checkpoint-args \ + --out-seq-length 16 \ + --temperature 1.0 \ + --patch-dim 14 \ + --seed 1234 \ + --top_k 1 \ + --no-load-rng \ + --no-load-optim \ + --num-partitions ${NUM_PARTITIONS} \ + --partition-id ${PARTITION_ID} \ + --output-path ${OUTPUT_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + --disable-vision-class-token \ + --input-image-path ${INPUT_IMAGE_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + ${EXTRA_ARGS} \ + --task ${TASK} \ + --image-tag-type nlvm \ + --ckpt-format torch +done diff --git a/examples/multimodal/nvlm/sft_34b_internvit.sh b/examples/multimodal/nvlm/sft_34b_internvit.sh new file mode 100644 index 000000000..3d585d8d3 --- /dev/null +++ b/examples/multimodal/nvlm/sft_34b_internvit.sh @@ -0,0 +1,160 @@ +#!/bin/bash + +# Your SBATCH commands here if using SLURM. + +# Please launch this script from megatron-lm root. + +# Train a multimodal model. + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NCCL_ALGO=^NVLS +export TOKENIZERS_PARALLELISM="false" + + +DEBUG=0 + +if [[ $BATCH -eq 0 ]]; then + DATETIME=`date +'%y-%m-%d-%H-%M-%S'` + MODEL_NAME="mcore-nous-yi34b-internvit-mlp-sft-${DATETIME}" +else + MODEL_NAME="mcore-nous-yi34b-internvit-mlp-sft" +fi + +WORKSPACE="" +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR=${OUTPUT}/checkpoints +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +LOAD_NAME="mcore-nous-yi34b-internvit-mlp" # From pretraining +CHECKPOINT_DIR="${WORKSPACE}/output/${LOAD_NAME}/checkpoints" + +DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/sft_blend.yaml" + + +if [[ $DEBUG -eq 1 ]]; then + MBZ=1 + BZ=1 + NW=0 + LI=1 + AD=0.0 + HD=0.0 + ALLOW_NONDETERMINISTIC=1 + + # Can run out of GPU memory in interactive memory without this. + # This is just for interactive testing purposes. Do not use for proper training. + EXTRA_ARGS=" --freeze-LM" +else + MBZ=1 + BZ=128 + NW=2 + LI=5 + AD=0.0 + HD=0.0 + ALLOW_NONDETERMINISTIC=1 + + EXTRA_ARGS="" +fi + +SEQ_LEN=261 # Image embeddings sequence length (256 image embeddings + 5 tile tag embeddings). +DECODER_SEQ_LEN=3200 # Language model sequence length. +MAX_POS_EMBED=3200 + +OPTIONS=" \ + --swiglu \ + --use-distributed-optimizer \ + --num-workers ${NW} \ + --num-layers 60 \ + --hidden-size 7168 \ + --normalization RMSNorm \ + --num-attention-heads 56 \ + --exit-duration-in-mins 230 \ + --group-query-attention \ + --num-query-groups 8 \ + --ffn-hidden-size 20480 \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings ${MAX_POS_EMBED} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model ${WORKSPACE}/ \ + --tokenizer-prompt-format nvlm-yi-34b \ + --vocab-size 64000 \ + --make-vocab-size-divisible-by 1 \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 5000000 \ + --disable-bias-linear \ + --tensor-model-parallel-size 8 \ + --language-model-type yi-34b \ + --vision-model-type internvit \ + --micro-batch-size ${MBZ} \ + --global-batch-size ${BZ} \ + --train-samples 30000000 \ + --lr-decay-samples 25600000 \ + --lr-warmup-samples 83200 \ + --lr 2e-6 \ + --min-lr 2.5e-7 \ + --lr-decay-style cosine \ + --split 100,0,0 \ + --clip-grad 10 \ + --weight-decay 0.1 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --attention-dropout ${AD} \ + --hidden-dropout ${HD} \ + --eod-mask-loss \ + --bf16 \ + --tensorboard-dir=${TENSORBOARD_DIR} \ + --freeze-ViT \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --data-path ${DATA_TRAIN} \ + --dataloader-type external \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ + --log-interval ${LI} \ + --load ${FINETUNE_DIR} \ + --save ${FINETUNE_DIR} \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --save-interval 5000 \ + --eval-interval 500 \ + --eval-iters 10 \ + --log-params-norm \ + --log-num-zeros-in-grad \ + ${EXTRA_ARGS} \ + --disable-vision-class-token \ + --use-te \ + --ckpt-format torch \ + --pixel-shuffle \ + --use-tiling \ + --max-num-tiles 6 \ + --use-thumbnail \ + --use-tile-tags \ + --image-tag-type nvlm + " + +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} +export NVTE_APPLY_QK_LAYER_SCALING=0 + +# Interactive or batch mode +if [[ $BATCH -eq 0 ]]; then + torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} +else + run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" + + DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` + + srun -l --verbose \ + --container-image \ + --container-mounts "" \ + --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ + sh -c "${run_cmd}" + + set +x +fi diff --git a/examples/multimodal/nvlm/sft_blend.yaml b/examples/multimodal/nvlm/sft_blend.yaml new file mode 100644 index 000000000..56c8230a2 --- /dev/null +++ b/examples/multimodal/nvlm/sft_blend.yaml @@ -0,0 +1,23 @@ +__module__: megatron.energon +__class__: Metadataset +splits: + train: + datasets: + - weight: 0.01 # # Datasets are weighted according to their size. Weights sum up to 1. + path: + subflavors: + augmentation: False + + - weight: 0.02 + path: + subflavors: + augmentation: False + + # Please refer to Table 6 in https://arxiv.org/pdf/2409.11402 for full list of SFT datasets. + # Please refer to https://nvidia.github.io/Megatron-Energon/data_prep.html on preparing datasets in the Megatron Energon format. + val: + datasets: + - weight: 1. + path: + subflavors: + augmentation: False diff --git a/examples/multimodal/nvlm/sft_qwen20_72b_internvit_6b.sh b/examples/multimodal/nvlm/sft_qwen20_72b_internvit_6b.sh new file mode 100644 index 000000000..adb1d1b14 --- /dev/null +++ b/examples/multimodal/nvlm/sft_qwen20_72b_internvit_6b.sh @@ -0,0 +1,165 @@ +#!/bin/bash + +# Your SBATCH commands here if using SLURM. + +# Please launch this script from megatron-lm root. + +# Train a multimodal model. + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NCCL_ALGO=^NVLS +export TOKENIZERS_PARALLELISM="false" + +DEBUG=0 + +if [[ $BATCH -eq 0 ]]; then + DATETIME=`date +'%y-%m-%d-%H-%M-%S'` + MODEL_NAME="mcore-qwen20-72b-internvit-sft-${DATETIME}" +else + MODEL_NAME="mcore-qwen20-72b-internvit-sft" +fi + +WORKSPACE="" +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR="${OUTPUT}/checkpoints" +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +# From pretraining. The pretraining checkpoint must be manually split to 4 pipeline parallel stages. +# Please refer to README.md and run examples/multimodal/nvlm/pp_checkpoint_converter.py. +LOAD_NAME="mcore-qwen20-72b-internvit-pp4" + +CHECKPOINT_DIR="${WORKSPACE}/output/${LOAD_NAME}/checkpoints" + +DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/sft_blend.yaml" + +if [[ $DEBUG -eq 1 ]]; then + MBZ=1 + BZ=1 + NW=0 + AD=0.0 + HD=0.0 + LI=1 + # This is just for interactive testing purposes. Do not use for proper training. + EXTRA_ARGS="--freeze-LM" + ALLOW_NONDETERMINISTIC=1 +else + MBZ=1 + BZ=256 + NW=8 + AD=0.0 + HD=0.0 + LI=5 + EXTRA_ARGS="" + ALLOW_NONDETERMINISTIC=1 +fi + +SEQ_LEN=261 # Image embeddings sequence length (256 image embeddings + 5 tile tag embeddings). +DECODER_SEQ_LEN=3200 # Language model sequence length. +MAX_POS_EMBED=8192 + +OPTIONS=" \ + --use-checkpoint-args \ + --exit-duration-in-mins 230 \ + --disable-bias-linear \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model ${WORKSPACE}/ \ + --tokenizer-prompt-format qwen2p0 \ + --transformer-impl transformer_engine \ + --normalization RMSNorm \ + --norm-epsilon 1e-06 \ + --group-query-attention \ + --num-query-groups 8 \ + --no-masked-softmax-fusion \ + --attention-softmax-in-fp32 \ + --attention-dropout ${AD} \ + --hidden-dropout ${HD} \ + --untie-embeddings-and-output-weights \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --swiglu \ + --tensor-model-parallel-size 8 \ + --pipeline-model-parallel-size 4 \ + --num-layers 80 \ + --hidden-size 8192 \ + --ffn-hidden-size 29568 \ + --add-qkv-bias \ + --num-attention-heads 64 \ + --use-distributed-optimizer \ + --use-te \ + --num-workers ${NW} \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings 32768 \ + --train-samples 122880000 \ + --lr-decay-samples 25600000 \ + --lr-warmup-samples 83200 \ + --micro-batch-size ${MBZ} \ + --global-batch-size ${BZ} \ + --lr 2e-6 \ + --min-lr 2.5e-7 \ + --lr-decay-style cosine \ + --log-interval ${LI} \ + --eval-iters 10 \ + --eval-interval 500 \ + --data-path ${DATA_TRAIN} \ + --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ + --save-interval 10000 \ + --save ${FINETUNE_DIR} \ + --load ${FINETUNE_DIR} \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --split 100,0,0 \ + --clip-grad 10.0 \ + --weight-decay 0.1 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --bf16 \ + --eod-mask-loss \ + --freeze-ViT \ + --patch-dim 14 \ + --img-h 448 \ + --img-w 448 \ + --dataloader-type external \ + --tensorboard-dir ${TENSORBOARD_DIR} \ + --language-model-type qwen2.0_72B \ + ${EXTRA_ARGS} \ + --vision-model-type internvit \ + --disable-vision-class-token \ + --log-params-norm \ + --log-num-zeros-in-grad \ + --ckpt-format torch \ + --pixel-shuffle \ + --use-tiling \ + --max-num-tiles 6 \ + --use-thumbnail \ + --use-tile-tags \ + --image-tag-type nvlm +" + + +export NVTE_APPLY_QK_LAYER_SCALING=0 +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} + +# Interactive or batch mode +if [[ $BATCH -eq 0 ]]; then + torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} +else + run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" + + DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` + + srun -l --verbose \ + --container-image \ + --container-mounts "" \ + --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ + sh -c "${run_cmd}" + + set +x +fi diff --git a/examples/multimodal/pretrain_mistral_clip.sh b/examples/multimodal/pretrain_mistral_clip.sh index b06dbfe53..ea1f741ae 100755 --- a/examples/multimodal/pretrain_mistral_clip.sh +++ b/examples/multimodal/pretrain_mistral_clip.sh @@ -92,8 +92,9 @@ OPTIONS=" \ --log-interval ${LI} \ --eval-iters 10 \ --eval-interval 1000 \ - --tokenizer-type HuggingFaceTokenizer \ + --tokenizer-type MultimodalTokenizer \ --tokenizer-model ${WORKSPACE}/${TOKENIZER_MODEL} \ + --tokenizer-prompt-format mistral \ --data-path ${DATA_TRAIN} \ --prompt-path ${SOURCE}/examples/multimodal/manual_prompts.json \ --save-interval 1000 \ @@ -123,6 +124,7 @@ OPTIONS=" \ ${EXTRA_ARGS} \ --distributed-timeout-minutes 60 \ --allow-missing-vision-projection-checkpoint \ + --ckpt-format torch " export NVTE_APPLY_QK_LAYER_SCALING=0 diff --git a/examples/multimodal/run_text_generation.py b/examples/multimodal/run_text_generation.py index 37d9072f0..f4bb5025f 100644 --- a/examples/multimodal/run_text_generation.py +++ b/examples/multimodal/run_text_generation.py @@ -1,13 +1,9 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. """Generate text using a vision language model.""" -import glob -import itertools import json import logging import os -import re import sys -from collections import defaultdict from functools import partial # Add megatron to the path. @@ -15,30 +11,18 @@ os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) ) -import datasets -import numpy as np import torch import yaml from config import EvaluationConfig -from dataset_helpers import tokenizer_image_token -from image_processing import get_visual_transform -from MMMU.mmmu.utils.data_utils import ( - CAT_SHORT2LONG, - construct_prompt, - load_yaml, - process_single_sample, -) -from MMMU.mmmu.utils.eval_utils import parse_multi_choice_response +from evaluation_datasets import get_evaluation_dataset from model import model_provider from multimodal_args import add_multimodal_extra_args -from PIL import Image -from torchvision.io import read_video from megatron.core import parallel_state -from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN_INDEX from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings from megatron.inference.text_generation.api import generate_and_post_process from megatron.inference.text_generation.forward_step import ForwardStep +from megatron.inference.text_generation.communication import broadcast_int_list from megatron.training import get_args, get_model, get_tokenizer, print_rank_0 from megatron.training.checkpointing import load_checkpoint from megatron.training.initialize import initialize_megatron @@ -64,20 +48,23 @@ def add_text_generation_args(parser): group.add_argument( "--task", type=str, - choices=["captioning", "TextVQA", "VQAv2", "ChartQA", "MMMU", "VideoMME"], + choices=[ + "captioning", + "TextVQA", + "VQAv2", + "ChartQA", + "MMMU", + "VideoMME", + "OCRBench", + "MathVista", + "AI2D", + ], help="Generation task to run", ) group.add_argument( "--num-samples-per-partition", type=int, default=0, help="Number of samples per partition" ) - group.add_argument( - "--prompt-format", - type=str, - default="mistral", - choices=["llama3", "mistral"], - help="Prompting format to use", - ) - group.add_argument("--config-path", type=str, help="Config file to use.") + group.add_argument("--config-path", type=str, help="Evaluation config file to use.") # Add common multimodal arguments needed for e.g. building the model. parser = add_multimodal_extra_args(parser) @@ -85,410 +72,6 @@ def add_text_generation_args(parser): return parser -def _get_partition_bounds( - total_num_samples, num_samples_per_partition, num_partitions, partition_id -): - if num_samples_per_partition == 0: - samples_per_partition = [ - int(x) for x in np.linspace(0, total_num_samples, num_partitions + 1) - ] - return samples_per_partition[partition_id], samples_per_partition[partition_id + 1] - return num_samples_per_partition * partition_id, num_samples_per_partition * (partition_id + 1) - - -class VQADataset(torch.utils.data.Dataset): - def __init__( - self, - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - keys, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - ): - samples = json.load(open(gt_path, encoding='utf-8')) - if "data" in samples: - samples = samples["data"] - - # Optionally, process only a subset of the input files. - if num_partitions > 0: - lb, ub = _get_partition_bounds( - len(samples), num_samples_per_partition, num_partitions, partition_id - ) - samples = samples[lb:ub] - - self._keys = keys - self._samples = samples - self._input_image_path = input_image_path - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - - def __len__(self): - return len(self._samples) - - def __getitem__(self, idx): - sample = self._samples[idx] - - img_file = "{}/{}".format(self._input_image_path, sample[self._keys["image_id"]]) - if not os.path.exists(img_file): - img_file += ".jpg" - - if not os.path.exists(img_file): - img_file = img_file.replace('.jpg', '.png') - - img = Image.open(img_file) - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - ) - tile_count = torch.tensor([len(imgs)], dtype=torch.int) - - sample_id = idx - if "sample_id" in self._keys: - sample_id = sample[self._keys["sample_id"]] - - metadata = "" # Not used. - - return ( - torch.stack(imgs), - tile_count, - sample_id, - sample[self._keys["question"]], - sample[self._keys["answer"]], - metadata, - ) - - -class CaptioningDataset(torch.utils.data.Dataset): - def __init__( - self, - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - ): - image_files = sorted(glob.glob(input_image_path + "/*")) - - # Optionally, process only a subset of the input files. - if num_partitions > 0: - lb, ub = _get_partition_bounds( - len(image_files), num_samples_per_partition, num_partitions, partition_id - ) - image_files = image_files[lb:ub] - - gts = json.load(open(gt_path)) - answers = defaultdict(list) - for gt in gts["annotations"]: - answers[gt["image_id"]].append(gt['caption']) - - self._image_files = image_files - self._answers = answers - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - - def __len__(self): - return len(self._image_files) - - def __getitem__(self, idx): - img_file = self._image_files[idx] - image_id = int(img_file.split("_")[-1].split(".")[0]) - - img = Image.open(img_file) - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - ) - - tile_count = torch.tensor([len(imgs)], dtype=torch.int) - - question = "" # Fixed for all samples. - metadata = "" # Not used. - - return torch.stack(imgs), tile_count, image_id, question, self._answers[image_id], metadata - - -class MMMUDataset(torch.utils.data.Dataset): - def __init__( - self, - input_image_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - single_image, - ): - # The following downloads the MMMU dataset from HuggingFace and uses the API from the MMMU github repo to run MMMU evaluation. - all_mmmu_datasets = [] - - hf_datasets_cache = os.environ["HF_DATASETS_CACHE"] - assert hf_datasets_cache != "", "Please set the environment variable HF_DATASETS_CACHE." - - for subject in CAT_SHORT2LONG.values(): - # Use a local copy of the dataset if exists (can be faster) or the HF one. - if os.path.exists(input_image_path): - subject_dataset = datasets.load_dataset( - os.path.join(input_image_path, subject), - split=datasets.Split.VALIDATION, - cache_dir=hf_datasets_cache, - verification_mode="no_checks", - ) - else: - subject_dataset = datasets.load_dataset( - "MMMU/MMMU", - subject, - split=datasets.Split.VALIDATION, - cache_dir=hf_datasets_cache, - ) - - all_mmmu_datasets.append(subject_dataset) - - dataset = datasets.concatenate_datasets(all_mmmu_datasets) - - dataset = [s for s in dataset if s['id'].startswith("val")] - - # Optionally, process only a subset of the input files. - if num_partitions > 0: - lb, ub = _get_partition_bounds( - len(dataset), num_samples_per_partition, num_partitions, partition_id - ) - dataset = dataset[lb:ub] - - # Using the LLaVA config from the MMMU repo. - config = load_yaml("examples/multimodal/MMMU/mmmu/configs/llava1.5.yaml") - for k, v in config.items(): - if isinstance(v, list): - assert len(v) == 1, "only one value supported." - config[k] = v[0] - - self._config = config - - self._dataset = dataset - - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._single_image = single_image - - def __len__(self): - return len(self._dataset) - - def __getitem__(self, idx): - sample = self._dataset[idx] - - # Use the single image approach from the MMMU repo. - if self._single_image: - sample = process_single_sample(sample) - sample = construct_prompt(sample, self._config) - - img = sample["image"] - sample_imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - ) - sample_num_tiles = [len(sample_imgs)] - else: - sample = construct_prompt(sample, self._config) - - sample_imgs = [] - sample_num_tiles = [] - - img_indices = re.findall(r"" - - img = sample[img_key] - assert img is not None, f"{img_str} is in prompt but not in sample images" - - # Note: Only replace the current image tag. - sample["final_input_prompt"] = sample["final_input_prompt"].replace( - img_str, "", 1 - ) - - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - adjusted_max_num_tiles, - self._use_thumbnail, - augment=False, - ) # List of tiles. - - sample_imgs.extend(imgs) - sample_num_tiles.append(len(imgs)) - - # Sanity check. - for i in range(1, 8): - assert ( - f"" not in sample["final_input_prompt"] - ), "prompt contains unhandled image tags" - - # MMMU specific metadata. - metadata = {"question_type": sample["question_type"]} - if sample["question_type"] == "multiple-choice": - metadata["index2ans"] = sample["index2ans"] - metadata["all_choices"] = sample["all_choices"] - - prompt = sample['final_input_prompt'] - if self._single_image: - for i in range(8): - prompt = prompt.replace(f"", "") - prompt = f"\n{prompt}" - - tile_count = torch.tensor(sample_num_tiles, dtype=torch.int) - - return ( - torch.stack(sample_imgs), - tile_count, - sample["id"], - prompt, - sample["answer"], - metadata, - ) - - -class VideoMMMEDataset(torch.utils.data.Dataset): - def __init__( - self, - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - num_frames, - ): - ground_truth_original = json.load(open(gt_path)) - ground_truth = [] - for gt in ground_truth_original: - video_path = gt["url"] - video_path = video_path.replace("https://www.youtube.com/watch?v=", "") - video_path = video_path.replace("https://m.youtube.com/watch?v=", "") - video_path = os.path.join(input_image_path, video_path + ".mp4") - if not os.path.exists(video_path): - continue - gt["video_path"] = video_path - ground_truth.append(gt) - - ground_truth = sorted(ground_truth, key=lambda gt: gt["video_path"]) - print_rank_0(f"Found {len(ground_truth)} videos to process.") - - if num_partitions > 0: - start_idx, end_idx = _get_partition_bounds( - len(ground_truth), num_samples_per_partition, num_partitions, partition_id - ) - ground_truth = ground_truth[start_idx:end_idx] - - self._ground_truth = ground_truth - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._num_frames = num_frames - - def __len__(self): - return len(self._ground_truth) - - def __getitem__(self, idx): - gt = self._ground_truth[idx] - - video, _, _ = read_video(gt["video_path"], start_pts=0, end_pts=None, pts_unit='sec') - video = video.numpy() - selected_frames = torch.linspace(0, video.shape[0] - 1, self._num_frames).long() - video_frames = video[selected_frames] - if self._num_frames == 1: - video_frames = video_frames[None] - - imgs = list( - itertools.chain.from_iterable( - get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - ) - for img in video_frames - ) - ) - - for question in gt["questions"]: - # Very hacky, but we essentially re-create gt holding only the - # question of interest. This is the make this generation script - # compatible with the Video MME evaluation script. - question_dict = { - "video_id": gt["video_id"], - "duration_category": gt["duration_category"], - "video_category": gt["video_category"], - "video_subcategory": gt["video_subcategory"], - "url": gt["url"], - "questions": [question], - } - - num_tiles = torch.tensor([len(imgs)], dtype=torch.int) - - answer = "" - metadata = "" - - return ( - torch.stack(imgs), - num_tiles, - question["question_id"], - question_dict, - answer, - metadata, - ) - - def get_evaluation_dataloader( task, input_image_path, @@ -503,110 +86,24 @@ def get_evaluation_dataloader( partition_id, num_frames, num_workers, + vision_model_type, ): """Build evaluation dataset.""" - if task == "TextVQA": - keys = { - "image_id": "image_id", - "sample_id": "question_id", - "question": "question", - "answer": "answers", - } - - dataset = VQADataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - keys, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - ) - elif task == "VQAv2": - keys = { - "image_id": "image", - "sample_id": "question_id", - "question": "question", - "answer": "answer", - } - - dataset = VQADataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - keys, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - ) - elif task == "ChartQA": - keys = {"image_id": "imgname", "question": "query", "answer": "label"} - - dataset = VQADataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - keys, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - ) - elif task == "captioning": - dataset = CaptioningDataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - ) - elif task == 'MMMU': - # Note: single_image=True uses only one image like in the MMMU repo example. - # single_image=False uses all images in the sample. - dataset = MMMUDataset( - input_image_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - single_image=True, - ) - elif task == "VideoMME": - dataset = VideoMMMEDataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - num_frames, - ) - else: - raise NotImplementedError(f"unsupported task {task}") + dataset = get_evaluation_dataset( + task, + input_image_path, + gt_path, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_samples_per_partition, + num_partitions, + partition_id, + num_frames, + vision_model_type, + ) dp_rank = parallel_state.get_data_parallel_rank() dp_world_size = parallel_state.get_data_parallel_world_size() @@ -640,25 +137,33 @@ def generate_samples(model, config: EvaluationConfig, print_output): config.partition_id, args.num_frames, args.num_workers, + args.vision_model_type, ) num_img_embeddings_per_tile = get_num_image_embeddings( - args.img_h, args.img_w, args.patch_dim, args.vision_model_type, args.disable_vision_class_token, 1 + args.img_h, + args.img_w, + args.patch_dim, + args.vision_model_type, + args.disable_vision_class_token, + 1, + args.pixel_shuffle, + args.use_tile_tags, ) for idx, (imgs, num_tiles, sample_id, question, answers, metadata) in enumerate(dataloader): imgs = imgs.to("cuda") num_tiles = num_tiles.to("cuda") - prompt = get_prompt(config.task, question, config.prompt_format) + conv = get_conversation(config.task, question) - forward_step = partial(VLMForwardStep, num_img_embeddings_per_tile, imgs, num_tiles) + forward_step = partial(VLMForwardStep, num_img_embeddings_per_tile, imgs, num_tiles, args.decoder_seq_length) if is_first_rank(): resp_sentences, _, _, _ = generate_and_post_process( model, forward_step=forward_step, - prompts=[prompt], + prompts=[conv], tokens_to_generate=config.out_seq_length, top_k_sampling=config.top_k, top_p_sampling=config.top_p, @@ -669,43 +174,62 @@ def generate_samples(model, config: EvaluationConfig, print_output): data_parallel=True, ) - for prompt, generation in zip([prompt], resp_sentences): + for generation in resp_sentences: if isinstance(sample_id, torch.Tensor): sample_id = sample_id.item() - output = {"sample_id": sample_id, "prompt": prompt} + output = {"sample_id": sample_id} output_name = "" if config.task == "captioning": output_name = "caption" - elif config.task in ("TextVQA", "VQAv2", "ChartQA"): + elif config.task in ( + "TextVQA", + "VQAv2", + "ChartQA", + "OCRBench", + "MathVista", + "AI2D", + ): output_name = "answer" elif config.task in ("MMMU"): output_name = "text" elif config.task == "VideoMME": output_name = "response" output = question + else: + raise NotImplementedError("no output name defined for", config.task) - generated = get_generated(generation, config.prompt_format) + prompt, generated = get_prompt_and_generated( + generation, args.tokenizer_prompt_format + ) if config.task == "VideoMME": output["questions"][0][output_name] = generated else: output[output_name] = generated + output["prompt"] = prompt if config.task == "captioning": output["ground_truth"] = answers - elif config.task in ("TextVQA", "VQAv2"): - output["gt_answer"] = [ans for ans in answers] - elif config.task == "ChartQA": - output["gt_answer"] = [answers] + elif config.task in ( + "TextVQA", + "VQAv2", + "ChartQA", + "OCRBench", + "MathVista", + "AI2D", + ): + if isinstance(answers, str): + answers = [answers] + output["gt_answer"] = answers + + if len(metadata) > 0: + output.update(metadata) elif config.task == "MMMU": - prediction = generated - if metadata["question_type"] == "multiple-choice": - prediction = parse_multi_choice_response( - generated, metadata["all_choices"], metadata["index2ans"] - ) - - output["prediction"] = prediction + output["prediction"] = generated + output.update(metadata) + else: + raise NotImplementedError("no output processing defined for", config.task) if print_output: print(output) @@ -741,7 +265,6 @@ def get_evaluation_config(): num_partitions=args.num_partitions, partition_id=args.partition_id, num_samples_per_partition=args.num_samples_per_partition, - prompt_format=args.prompt_format, ) # Default output path if not defined... @@ -753,6 +276,7 @@ def get_evaluation_config(): def is_first_rank(): + """First tensor and pipeline parallel rank.""" return ( parallel_state.is_pipeline_first_stage(ignore_virtual=True) and parallel_state.get_tensor_model_parallel_rank() == 0 @@ -760,6 +284,7 @@ def is_first_rank(): def get_output_path(config, dp_rank): + """Generation output path.""" return ( f"{config.output_path}-{config.task}-dprank={dp_rank}-partition={config.partition_id}.jsonl" ) @@ -792,6 +317,7 @@ def __init__( num_img_embeddings_per_tile, images, num_tiles, + decoder_seq_length, model, max_batch_size, max_sequence_length, @@ -803,6 +329,18 @@ def __init__( super().__init__(model, max_batch_size, max_sequence_length + num_img_embeddings) self._images = images self._num_tiles = num_tiles + self._num_img_embeddings = num_img_embeddings + self.decoder_seq_length = decoder_seq_length + + self._recv_only_vision_embeds = False + pp_rank = parallel_state.get_pipeline_model_parallel_rank() + # Checks if the previous stage only has a vision encoder, and that the current stage has part of the LM decoder. + # In this case, the current stage should only receive vision embeddings. + if pp_rank > 0: + self._recv_only_vision_embeds = parallel_state.is_inside_encoder(pp_rank - 1) and (not parallel_state.is_inside_decoder(pp_rank - 1)) and parallel_state.is_inside_decoder() + + # Checks if the current stage only has a vision encoder + self._encoder_only = parallel_state.is_inside_encoder() and not parallel_state.is_inside_decoder() def _forward(self, tokens, position_ids, attention_mask): return self.model( @@ -816,62 +354,79 @@ def _forward(self, tokens, position_ids, attention_mask): ) def __call__(self, tokens, position_ids, attention_mask): - logits = super().__call__(tokens, position_ids, attention_mask) + num_image_tokens = (tokens == self.model.image_token_index).sum().item() + num_tokens = tokens.size(1) + recv_buffer_seq_length = None + if num_image_tokens > 0: + # When there are image tokens and this stage only receives vision embeddings, adjust the recv buffer seq length to match the image embeddings sequence length. + # If there are image tokens and this stage receives full embeddings, make sure we compensate for expansion of image tokens. + # Note that this will set a recv_buffer_seq_length for the encoder stage, this length is irrelevant since that recv buffer is never allocated. + if self._recv_only_vision_embeds: + recv_buffer_seq_length = self._num_img_embeddings + else: + recv_buffer_seq_length = min(self._num_img_embeddings + num_tokens - num_image_tokens, self.decoder_seq_length) + elif self._recv_only_vision_embeds: + # If this stage only receives vision embeddings and there are no image tokens we won't run the encoder and therefore shouldn't try to recv. + recv_buffer_seq_length = 0 + + # If the pipeline stage only has a vision encoder, then it only needs to run when there are image tokens + if not (self._encoder_only and num_image_tokens == 0): + output = super().__call__(tokens, position_ids, attention_mask, recv_buffer_seq_length=recv_buffer_seq_length) + else: + output = None + if isinstance(output, tuple): + logits, _ = output + else: + logits = output # On the first inference iteration, we compute image tokens. - # Update the sequence length offset by the number of image tokens. - num_image_tokens = (tokens == -200).sum().item() - num_tokens = tokens.size(1) + # On every PP stage(although inference params should only matter for decoder), + # update the sequence length offset by the number of image tokens. if num_tokens > 1 and num_image_tokens > 0: - self.inference_params.sequence_len_offset += ( - self.inference_params.key_value_memory_dict["image_tokens_count"] - num_image_tokens - ) + if "image_tokens_count" not in self.inference_params.key_value_memory_dict: + self.inference_params.key_value_memory_dict["image_tokens_count"] = self._num_img_embeddings + + if self._num_img_embeddings + num_tokens - num_image_tokens > self.decoder_seq_length: + self.inference_params.sequence_len_offset += self.decoder_seq_length - num_tokens + else: + self.inference_params.sequence_len_offset += ( + self.inference_params.key_value_memory_dict["image_tokens_count"] - num_image_tokens + ) return logits -def get_prompt(task, question, prompt_format): - """Get a prompt for the evaluation task.""" +def get_conversation(task, question): + """Get a conversation for a given task and evaluation question.""" + conversation = [] + + # In all cases, the tokenizer adds possible header tokens for the assistant. if task == "captioning": - if prompt_format == "llama3": - prompt = "<|start_header_id|>system<|end_header_id|>\n\nA chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n\nProvide a one-sentence caption for provided image.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n" - elif prompt_format == "mistral": - prompt = ( - "[INST] Give a short and clear explanation of the subsequent image. [/INST]" - ) - elif task == "TextVQA": - if prompt_format == "llama3": - prompt = "<|start_header_id|>system<|end_header_id|>\n\nAnswer the questions.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n\n{}\nAnswer the question using a single word or phrase.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n".format( - question - ) - elif prompt_format == "mistral": - prompt = "[INST] \n{}\nAnswer the question using a single word or phrase. [/INST]".format( - question - ) - elif task == "VQAv2": - if prompt_format == "llama3": - prompt = "<|start_header_id|>system<|end_header_id|>\n\nAnswer the questions.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n\n{}\nAnswer the question using a single word or phrase.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n".format( - question - ) - elif prompt_format == "mistral": - prompt = "[INST] \n{}\nAnswer the question using a single word or phrase. [/INST]".format( - question - ) - elif task == "ChartQA": - if prompt_format == "llama3": - prompt = "<|start_header_id|>system<|end_header_id|>\n\nAnswer the questions.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n\n{}\nAnswer the question using a single word or phrase.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n".format( - question - ) - elif prompt_format == "mistral": - prompt = "[INST] \n{}\nAnswer the question using a single word or phrase. [/INST]".format( - question - ) + conversation = [ + {"role": "system", "content": "Answer the questions."}, + { + "role": "user", + "content": "\nProvide a one-sentence caption for provided image.", + }, + ] + elif task in ("TextVQA", "VQAv2", "ChartQA"): + conversation = [ + {"role": "system", "content": "Answer the questions."}, + { + "role": "user", + "content": f"\n{question}\nAnswer the question using a single word or phrase.", + }, + ] + elif task in ("OCRBench", "MathVista", "AI2D"): + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": f"\n{question}"}, + ] elif task == "MMMU": - if prompt_format == "llama3": - prompt = "<|start_header_id|>system<|end_header_id|>\n\nAnswer the questions.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n" - prompt = prompt.format(question) - elif prompt_format == "mistral": - prompt = "[INST] {} [/INST]".format(question) + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": question}, + ] elif task == "VideoMME": q = ( "Select the best answer to the following multiple-choice " @@ -884,70 +439,55 @@ def get_prompt(task, question, prompt_format): q += question["questions"][0]["choices"][2] + "\n" q += question["questions"][0]["choices"][3] + "\n" - if prompt_format == "llama3": - prompt = "<|start_header_id|>system<|end_header_id|>\n\nAnswer the questions.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n\n{}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n" - prompt = prompt.format(q) - elif prompt_format == "mistral": - prompt = "[INST] \n{} [/INST]".format(q) + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": f"\n{question}"}, + ] - return prompt + return conversation -def get_generated(prompt_and_generation, prompt_format): +def get_prompt_and_generated(prompt_and_generation, prompt_format): """Strip prompt and other unnecessary text from generation.""" if prompt_format == "llama3": - generated = prompt_and_generation.split( - "<|start_header_id|>assistant<|end_header_id|>\n\n" - )[-1] + splitted = prompt_and_generation.split("<|start_header_id|>assistant<|end_header_id|>\n\n") + prompt = splitted[0] + generated = splitted[1] generated = generated.split("<|eot_id|>")[0] elif prompt_format == "mistral": - generated = prompt_and_generation.split("[/INST]")[-1] + splitted = prompt_and_generation.split("[/INST]") + prompt = splitted[0] + generated = splitted[1] generated = generated.split("")[0] - + elif prompt_format == "chatml": + splitted = prompt_and_generation.split("<|im_start|> assistant\n") + prompt = splitted[0] + generated = splitted[1] + generated = generated.split("<|im_end|>")[0] + elif prompt_format in ("nvlm-yi-34b", "qwen2p0"): + splitted = prompt_and_generation.split("<|im_start|>assistant\n") + prompt = splitted[0] + generated = splitted[1] + generated = generated.split("<|im_end|>")[0] + + # Remove possible garbage. generated = generated.strip() generated = generated.split("\n\n")[0] generated = generated.split("\n")[0] - return generated - - -def patch_tokenizer(args): - """Patch tokenizer with image token support.""" - - def _decorate_tokenize(f): - # When tokenizing, replace with the image token index (-200) - def wrapper(prompt): - tokens = tokenizer_image_token(args, prompt, f) - - return tokens - - return wrapper - - def _decorate_detokenize(f): - # When detokenizing, skip image token index. - def wrapper(tokens): - tokens = np.array(tokens) - tokens = tokens[tokens != IMAGE_TOKEN_INDEX] - tokens = tokens.tolist() - - return f(tokens) - - return wrapper - - tokenizer = get_tokenizer() - tokenizer.tokenize = _decorate_tokenize(tokenizer.tokenize) - tokenizer.detokenize = _decorate_detokenize(tokenizer.detokenize) + return prompt, generated def main(): """Vision language model text generation.""" - logging.getLogger(__name__).warning("Models using pipeline parallelism are not supported yet.") - initialize_megatron(extra_args_provider=add_text_generation_args) - args = get_args() + if torch.distributed.get_rank() == 0: + logging.getLogger(__name__).warning( + "Models using pipeline parallelism are not supported yet." + ) - patch_tokenizer(args) # Make the tokenizer support image tokens. + args = get_args() def wrapped_model_provider(pre_process, post_process): return model_provider(pre_process, post_process, parallel_output=False) diff --git a/examples/multimodal/sft_mistral_clip.sh b/examples/multimodal/sft_mistral_clip.sh index 46fc99605..8a083cc1f 100755 --- a/examples/multimodal/sft_mistral_clip.sh +++ b/examples/multimodal/sft_mistral_clip.sh @@ -97,8 +97,9 @@ OPTIONS=" \ --log-interval ${LI} \ --eval-iters 10 \ --eval-interval 500 \ - --tokenizer-type HuggingFaceTokenizer \ + --tokenizer-type MultimodalTokenizer \ --tokenizer-model ${WORKSPACE}/${TOKENIZER_MODEL} \ + --tokenizer-prompt-format mistral \ --data-path ${DATA_TRAIN} \ --prompt-path ${SOURCE}/examples/multimodal/manual_prompts.json \ --save-interval 500 \ @@ -125,6 +126,7 @@ OPTIONS=" \ --disable-vision-class-token \ ${EXTRA_ARGS} \ --distributed-timeout-minutes 60 \ + --ckpt-format torch " export NVTE_APPLY_QK_LAYER_SCALING=0 diff --git a/examples/multimodal/text_generation_mistral_clip.sh b/examples/multimodal/text_generation_mistral_clip.sh index 6423464e6..ca98ff277 100755 --- a/examples/multimodal/text_generation_mistral_clip.sh +++ b/examples/multimodal/text_generation_mistral_clip.sh @@ -91,8 +91,9 @@ do --max-position-embeddings 4096 \ --no-masked-softmax-fusion \ --load ${MODEL_PATH} \ - --tokenizer-type HuggingFaceTokenizer \ + --tokenizer-type MultimodalTokenizer \ --tokenizer-model ${TOKENIZER_PATH} \ + --tokenizer-prompt-format mistral \ --bf16 \ --micro-batch-size 1 \ --seq-length 2048 \ @@ -112,6 +113,6 @@ do --gt-path ${GROUNDTRUTH_PATH} \ --task ${TASK} \ --disable-vision-class-token \ - --prompt-format mistral \ - --num-frames ${NUM_FRAMES} + --num-frames ${NUM_FRAMES} \ + --ckpt-format torch done diff --git a/examples/multimodal/train.py b/examples/multimodal/train.py index 1615531af..5ff2121b3 100644 --- a/examples/multimodal/train.py +++ b/examples/multimodal/train.py @@ -7,26 +7,32 @@ import torch import yaml -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.path.pardir, os.path.pardir))) +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) +) -from dataloader_provider import train_valid_test_dataloaders_provider +from dataloader_provider import train_valid_test_dataloaders_provider, is_first_or_last_stage from model import model_provider from multimodal_args import add_multimodal_extra_args from megatron.core import mpu, tensor_parallel from megatron.core.enums import ModelType -from megatron.core.models.multimodal.llava_model import LLaVAModel -from megatron.core.parallel_state import get_tensor_model_parallel_rank +from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, LLaVAModel +from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.parallel_state import ( + get_tensor_model_parallel_rank, + get_pipeline_model_parallel_world_size, + is_pipeline_last_stage, +) from megatron.training import get_args, get_timers, get_tokenizer, pretrain from megatron.training.utils import is_last_rank def get_batch(data_iterator): - """Generate a batch""" - - args = get_args() + """Generate a batch + Note: attn_mask_type in layer_specs.py sets the attention mask. Attention mask is None here. + """ imgs = None tokens = None labels = None @@ -34,6 +40,15 @@ def get_batch(data_iterator): attention_mask = None position_ids = None num_tiles = None + packed_seq_params = None + + args = get_args() + + # Dataloader doesn't run on the middle stages in a pipeline parallel model. + pp_size = get_pipeline_model_parallel_world_size() + if not is_first_or_last_stage(pp_size, args.encoder_pipeline_model_parallel_size): + # Note these are all set to None above. + return tokens, labels, loss_mask, attention_mask, position_ids, imgs, num_tiles # Broadcast data. torch.cuda.nvtx.range_push("get_data") @@ -42,18 +57,41 @@ def get_batch(data_iterator): else: data = None - data_text = tensor_parallel.broadcast_data(["text"], data, torch.int64)["text"] - prompt_len = tensor_parallel.broadcast_data(["prompt_len"], data, torch.int64)["prompt_len"] - target = tensor_parallel.broadcast_data(["target"], data, torch.int64)["target"] + data_text = tensor_parallel.broadcast_data(["tokens"], data, torch.int64)["tokens"] + labels = tensor_parallel.broadcast_data(["labels"], data, torch.int64)["labels"] imgs = tensor_parallel.broadcast_data(["imgs"], data, torch.float32)["imgs"] - num_tiles = tensor_parallel.broadcast_data(["num_tiles"], data, torch.int)["num_tiles"] + num_tiles = tensor_parallel.broadcast_data(["num_tiles"], data, torch.int32)["num_tiles"] + + cu_lengths = tensor_parallel.broadcast_data(["cu_lengths"], data, torch.int32)["cu_lengths"] + max_lengths = tensor_parallel.broadcast_data(["max_lengths"], data, torch.int32)["max_lengths"] # Dummy image, no image. if imgs.shape == torch.Size([1, 1]): + # FIXME: text-only data can cause a hang if the vision model is own its own pipeline rank and --freeze-ViT is enabled. imgs = torch.tensor([], dtype=torch.float32, device=data_text.device) num_tiles = torch.tensor([], dtype=torch.int, device=data_text.device) + # Last pipeline parallel stage doesn't need images. + if pp_size > 1 and is_pipeline_last_stage(): + imgs = None + + # If cu_lengths and max_lengths are non-dummy, construct PackedSeqParams. Otherwise, leave it at None. + if cu_lengths.shape != torch.Size([1, 1]): + assert ( + cu_lengths.shape[0] == max_lengths.shape[0] == 1 + ), "micro-batch-size must be 1 for packing" + cu_lengths = cu_lengths[0] + max_lengths = max_lengths[0] + + packed_seq_params = PackedSeqParams( + qkv_format="thd", + cu_seqlens_q=cu_lengths, + cu_seqlens_kv=cu_lengths, + max_seqlen_q=max_lengths, + max_seqlen_kv=max_lengths, + ) + torch.cuda.nvtx.range_pop() tokens_ = data_text.long() @@ -62,107 +100,41 @@ def get_batch(data_iterator): tokenizer = get_tokenizer() text_length = tokens_.shape[1] tokens = tokens_[:, :text_length].contiguous() - labels = target[:, 1:text_length+1].contiguous() + labels = labels[:, 1 : text_length + 1].contiguous() assert tokens.shape == labels.shape, f"tokens: {tokens.shape} != labels: {labels.shape}" torch.cuda.nvtx.range_pop() torch.cuda.nvtx.range_push("get_ltor_masks_and_position_ids") - if hasattr(tokenizer, 'eod'): - eod_token = tokenizer.eod - elif hasattr(tokenizer, 'eos_id'): - eod_token = tokenizer.eos_id - attention_mask, loss_mask, position_ids = \ - get_ltor_masks_and_position_ids(tokens, eod_token, - args.reset_position_ids, - args.reset_attention_mask, - args.eod_mask_loss, - question_length=prompt_len, - target=target[:, 1:text_length+1] - ) + loss_mask, position_ids = get_ltor_masks_and_position_ids(tokens, labels, tokenizer.pad) torch.cuda.nvtx.range_pop() - return tokens, labels, loss_mask, attention_mask, position_ids, imgs, num_tiles + return ( + tokens, + labels, + loss_mask, + attention_mask, + position_ids, + imgs, + num_tiles, + packed_seq_params, + ) -def get_ltor_masks_and_position_ids(data, - eod_token, - reset_position_ids, - reset_attention_mask, - eod_mask_loss, - question_length=None, - target=None, - weights=None): +def get_ltor_masks_and_position_ids(input_ids, target, pad_token): """Build masks and position id for left to right model.""" + seq_length = input_ids.shape[1] - # Extract batch size and sequence length. - micro_batch_size, seq_length = data.size() + # Position ids. + position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device) + position_ids = position_ids.unsqueeze(0).expand_as(input_ids) - # Attention mask (lower triangular). - if reset_attention_mask: - att_mask_batch = micro_batch_size - else: - att_mask_batch = 1 - - attention_mask = torch.tril(torch.ones( - (att_mask_batch, seq_length, seq_length), device=data.device)).view( - att_mask_batch, 1, seq_length, seq_length) - - # Loss mask. - if target != None: # use target to create loss mask that is created in data preparation step - loss_mask = torch.ones(target.size(), dtype=torch.float, device=data.device) - loss_mask[target == eod_token] = 0.0 # mask paddings - loss_mask[target == -100] = 0.0 # mask prompts - - else: # default creation - loss_mask = torch.ones(data.size(), dtype=torch.float, device=data.device) - if eod_mask_loss: - loss_mask[data == eod_token] = 0.0 - - if question_length is not None: - # Create a mask based on question_length - question_length_mask = torch.arange(loss_mask.size(1), device=loss_mask.device)[None, :] < question_length[:, None] - # Invert the mask (1 where we want to keep the loss, 0 where we want to zero it out) - inverted_mask = ~question_length_mask - # Apply the mask to loss_mask - loss_mask = loss_mask * inverted_mask.float() + # Loss mask. + loss_mask = torch.ones(target.size(), dtype=torch.float, device=input_ids.device) + loss_mask[target == pad_token] = 0.0 # mask paddings + loss_mask[target == IGNORE_INDEX] = 0.0 # mask prompts - # Position ids. - position_ids = torch.arange(seq_length, dtype=torch.long, - device=data.device) - position_ids = position_ids.unsqueeze(0).expand_as(data) - # We need to clone as the ids will be modifed based on batch index. - if reset_position_ids: - position_ids = position_ids.clone() - - if reset_position_ids or reset_attention_mask: - # Loop through the batches: - for b in range(micro_batch_size): - - # Find indecies where EOD token is. - eod_index = position_ids[b, data[b] == eod_token] - # Detach indecies from positions if going to modify positions. - if reset_position_ids: - eod_index = eod_index.clone() - - # Loop through EOD indecies: - prev_index = 0 - for j in range(eod_index.size()[0]): - i = eod_index[j] - # Mask attention loss. - if reset_attention_mask: - attention_mask[b, 0, (i + 1):, :(i + 1)] = 0 - # Reset positions. - if reset_position_ids: - position_ids[b, (i + 1):] -= (i + 1 - prev_index) - prev_index = i + 1 - - # Convert attention mask to binary: - attention_mask = (attention_mask < 0.5) - if weights is not None: - loss_mask = loss_mask * weights - - return attention_mask, loss_mask, position_ids + return loss_mask, position_ids def loss_func(loss_mask, output_tensor): @@ -179,11 +151,7 @@ def loss_func(loss_mask, output_tensor): local_num_tokens = loss[1].clone().detach().to(torch.int) - return ( - total_loss, - local_num_tokens, - {'lm loss': (reporting_loss[0], reporting_loss[1])}, - ) + return (total_loss, local_num_tokens, {'lm loss': (reporting_loss[0], reporting_loss[1])}) def forward_step(data_iterator, model: LLaVAModel): @@ -201,10 +169,28 @@ def forward_step(data_iterator, model: LLaVAModel): # Get the batch. timers('batch-generator', log_level=2).start() - tokens, labels, loss_mask, attention_mask, position_ids, images, num_image_tiles = get_batch(data_iterator) + ( + tokens, + labels, + loss_mask, + attention_mask, + position_ids, + images, + num_image_tiles, + packed_seq_params, + ) = get_batch(data_iterator) timers('batch-generator').stop() - output_tensor, loss_mask = model(images, tokens, position_ids, attention_mask, labels, loss_mask, num_image_tiles=num_image_tiles) + output_tensor, loss_mask = model( + images, + tokens, + position_ids, + attention_mask, + labels, + loss_mask, + num_image_tiles=num_image_tiles, + packed_seq_params=packed_seq_params, + ) return output_tensor, partial(loss_func, loss_mask) @@ -243,7 +229,6 @@ def llava_position_embedding_ranks(pp_ranks): return [pp_ranks[epp]] - def run_online_eval(model): """Run an evaluation benchmark during training.""" args = get_args() @@ -253,15 +238,13 @@ def run_online_eval(model): return [] from config import EvaluationConfig - from run_text_generation import generate_and_write_samples, patch_tokenizer + from run_text_generation import generate_and_write_samples with open(args.online_evaluation_config, "r") as f: config_dict = yaml.safe_load(f) config = EvaluationConfig(**config_dict) - patch_tokenizer(args) - # The inference code assumes the first rank is the leader. # Tensorboard writer is on the last rank. # We must write to a storage space that all ranks see. @@ -281,6 +264,7 @@ def run_online_eval(model): # Run evaluation. if config.task == "TextVQA": from evaluate_textvqa import textvqa_eval + avg_acc = textvqa_eval(config.output_path) return [{"TextVQA accuracy": avg_acc}] @@ -299,6 +283,7 @@ def write_online_eval_to_tensorboard(data, iteration, writer): if __name__ == "__main__": + train_valid_test_dataloaders_provider.is_distributed = True pretrain( @@ -311,5 +296,5 @@ def write_online_eval_to_tensorboard(data, iteration, writer): process_non_loss_data_func=write_online_eval_to_tensorboard, get_embedding_ranks=llava_embedding_ranks, get_position_embedding_ranks=llava_position_embedding_ranks, - non_loss_data_func=run_online_eval + non_loss_data_func=run_online_eval, ) diff --git a/megatron/core/datasets/masked_dataset.py b/megatron/core/datasets/masked_dataset.py index 9db6c67eb..c2a02ebae 100644 --- a/megatron/core/datasets/masked_dataset.py +++ b/megatron/core/datasets/masked_dataset.py @@ -5,7 +5,7 @@ import time from abc import abstractmethod from dataclasses import dataclass -from typing import List, Optional, Tuple, Union +from typing import List, Optional, Tuple import numpy import torch @@ -33,7 +33,7 @@ class MaskedWordPieceDatasetConfig(BlendedMegatronDatasetConfig): """The maximum length N-gram to consider masking or permuting""" masking_do_full_word: bool = None - """Whether we mask the the whole word or its component parts""" + """Whether we mask the whole word or its component parts""" masking_do_permutation: bool = None """Whether we shuffle a subset of candidate N-grams in addition""" @@ -84,13 +84,15 @@ class MaskedWordPieceDataset(MegatronDataset): first token/piece. Args: - indexed_dataset (IndexedDataset): The IndexedDataset around which to build the MegatronDataset + indexed_dataset (IndexedDataset): The IndexedDataset around which to build the + MegatronDataset dataset_path (str): The real path on disk to the dataset, for bookkeeping indexed_indices (numpy.ndarray): The set of the documents indices to expose - num_samples (Optional[int]): The number of samples to draw from the indexed dataset. When None, build as many samples as correspond to one epoch. + num_samples (Optional[int]): The number of samples to draw from the indexed dataset. + When None, build as many samples as correspond to one epoch. index_split (Split): The indexed_indices Split @@ -274,7 +276,7 @@ def _create_masked_lm_predictions( ngram_nvals = numpy.arange(self.config.masking_max_ngram, dtype=numpy.int64) + 1 - # By default, the N-gram probabilites are inversely proportional to N + # By default, the N-gram probabilities are inversely proportional to N # e.g. N = 3 # -> P = array([0.54545455, 0.27272727, 0.18181818]) nprobs = 1.0 / ngram_nvals diff --git a/megatron/core/datasets/t5_dataset.py b/megatron/core/datasets/t5_dataset.py index b54e4f531..f356426ed 100644 --- a/megatron/core/datasets/t5_dataset.py +++ b/megatron/core/datasets/t5_dataset.py @@ -1,10 +1,13 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import os from collections import deque from dataclasses import dataclass, field from typing import Dict, List, Optional, Union import numpy +import torch +from packaging.version import Version as PkgVersion from megatron.core.datasets.indexed_dataset import IndexedDataset from megatron.core.datasets.masked_dataset import ( @@ -12,6 +15,7 @@ MaskedWordPieceDatasetConfig, ) from megatron.core.datasets.utils import Split +from megatron.core.utils import get_te_version @dataclass @@ -45,13 +49,15 @@ class T5MaskedWordPieceDataset(MaskedWordPieceDataset): """The T5 dataset that assumes WordPiece tokenization Args: - indexed_dataset (IndexedDataset): The IndexedDataset around which to build the MegatronDataset + indexed_dataset (IndexedDataset): The IndexedDataset around + which to build the MegatronDataset dataset_path (str): The real path on disk to the dataset, for bookkeeping indexed_indices (numpy.ndarray): The set of the documents indices to expose - num_samples (Optional[int]): The number of samples to draw from the indexed dataset. When None, build as many samples as correspond to one epoch. + num_samples (Optional[int]): The number of samples to draw from the indexed + dataset. When None, build as many samples as correspond to one epoch. index_split (Split): The indexed_indices Split @@ -86,6 +92,135 @@ def _key_config_attributes() -> List[str]: T5MaskedWordPieceDataset, T5MaskedWordPieceDataset )._key_config_attributes() + ["sequence_length_decoder"] + @staticmethod + def _build_b1ss_attention_mask( + source_block: torch.tensor, target_block: torch.tensor, make_history_mask: bool = False + ) -> torch.tensor: + """Build an attention-mask having shape (bs, 1, q_len, kv_len) + from source_block and target_block + + Args: + source_block (torch.tensor): A 2-D array of tokens (bs, q_len) + target_block (torch.tensor): A 2-D array of tokens (bs, kv_len) + make_history_mask (bool): Whether to turn mask into causal mask + + Returns: + torch.tensor: The 4-D attention mask (bs, 1, q_len, kv_len) + """ + batch_size = source_block.shape[0] + attention_mask = [] + for i in range(batch_size): + source_sample = source_block[i] + target_sample = target_block[i] + mask = (target_sample[None, :] >= 1) * (source_sample[:, None] >= 1) + if make_history_mask: + arange = numpy.arange(source_sample.shape[0]) + history_mask = arange[None,] <= arange[:, None] + history_mask = torch.tensor(history_mask).to(mask.device) + mask = mask * history_mask + mask = ~(mask) # flip True to False + attention_mask.append(mask) + attention_mask = torch.stack(attention_mask) + attention_mask = attention_mask.unsqueeze(1) + return attention_mask + + @staticmethod + def config_attention_mask( + encoder_tokens: torch.tensor, + decoder_tokens: torch.tensor, + encoder_mask: torch.tensor, + decoder_mask: torch.tensor, + use_local: bool = False, + test_te_version: str = None, + ) -> torch.tensor: + """Config attention-mask for encoder_mask, decoder_mask, encoder_decoder_mask + conditioned on transformer-implementation (e.g. TE vs local), TE versions, + and TE backends + + Args: + encoder_tokens (torch.tensor): A 2-D array of tokens (bs, kv_len) + decoder_tokens (torch.tensor): A 2-D array of tokens (bs, q_len) + encoder_mask (torch.tensor): A 2-D array of tokens (bs, kv_len) + decoder_mask (torch.tensor): A 2-D array of tokens (bs, q_len) + use_local (bool): Whether the current T5 model uses local (vs TE) + transformer implmentation + + Returns: + Configured encoder_mask, decoder_mask, encoder_decoder_mask + torch.tensor: configured encoder attention mask + torch.tensor: configured decoder attention mask + torch.tensor: configured encoder-decoder attention mask + """ + # If using local transformer implementation (not transformer_engine): + # re-organize all attention masks, because local and transformer_engine + # backbones use different masks shapes. E.g.: + # (local: b1ss - transformer_engine: b11s) + if use_local: + encoder_mask = T5MaskedWordPieceDataset._build_b1ss_attention_mask( + encoder_tokens, encoder_tokens + ) + decoder_mask = T5MaskedWordPieceDataset._build_b1ss_attention_mask( + decoder_tokens, decoder_tokens, make_history_mask=True + ) + encoder_decoder_mask = T5MaskedWordPieceDataset._build_b1ss_attention_mask( + decoder_tokens, encoder_tokens + ) + + else: + # If using transformer_engine transformer implementation: + # 1. For TE version >= 1.10, across all 3 backends, + # The padding mask is configued as + # [bs, 1, 1, seq_len] for self-attention and + # ([bs, 1, 1, q_len], [bs, 1, 1, kv_len]) for cross-attention + # 2. For TE version >=1.7 and <1.10, when using Non-fused backend, + # The padding mask is configued as + # [bs, 1, q_len, kv_len] for both self-attention and for cross-attention + # 3. For TE version <1.7, only support Non-fused backend + # The padding mask is configued as + # [bs, 1, q_len, kv_len] for both self-attention and for cross-attention + + # Process for Flash/Fused + encoder_mask = encoder_mask.unsqueeze(1).unsqueeze(1) + decoder_mask = decoder_mask.unsqueeze(1).unsqueeze(1) + encoder_decoder_mask = (decoder_mask, encoder_mask) + # set decoder_mask to None because decoder uses AttnMaskType.causal + decoder_mask = None + + # get TE version, using test TE version if not None + if test_te_version is not None: + te_version = PkgVersion(test_te_version) + else: + te_version = get_te_version() + + # Check for older TE version than 1.10, adjust attention mask accordingly + flash_attention_enabled = os.getenv('NVTE_FLASH_ATTN') == '1' + fused_attention_enabled = os.getenv('NVTE_FUSED_ATTN') == '1' + if (te_version < PkgVersion("1.10.0")) and (te_version >= PkgVersion("1.7.0")): + if not (flash_attention_enabled) and not (fused_attention_enabled): + encoder_mask = T5MaskedWordPieceDataset._build_b1ss_attention_mask( + encoder_tokens, encoder_tokens + ) + encoder_decoder_mask = T5MaskedWordPieceDataset._build_b1ss_attention_mask( + decoder_tokens, encoder_tokens + ) + else: + pass + elif te_version < PkgVersion("1.7.0"): + if not (flash_attention_enabled) and not (fused_attention_enabled): + encoder_mask = T5MaskedWordPieceDataset._build_b1ss_attention_mask( + encoder_tokens, encoder_tokens + ) + encoder_decoder_mask = T5MaskedWordPieceDataset._build_b1ss_attention_mask( + decoder_tokens, encoder_tokens + ) + else: + assert not flash_attention_enabled and not fused_attention_enabled, ( + "Flash and fused attention is not supported with transformer " + "engine version < 1.7. Set NVTE_FLASH_ATTN=0 and NVTE_FUSED_ATTN=0" + "or upgrade transformer engine >= 1.7" + ) + return encoder_mask, decoder_mask, encoder_decoder_mask + def __getitem__(self, idx: int) -> Dict[str, Union[int, numpy.ndarray]]: """Abstract method implementation @@ -160,10 +295,9 @@ def __getitem__(self, idx: int) -> Dict[str, Union[int, numpy.ndarray]]: ) # Create attention and history masks - mask_encoder = self._make_attention_mask(encoder_input, encoder_input) - mask_encoder_decoder = self._make_attention_mask(decoder_input, encoder_input) - mask_decoder = self._make_attention_mask(decoder_input, decoder_input) - mask_decoder = mask_decoder * self._make_history_mask(decoder_input) + mask_encoder = numpy.array([1] * length_toks_encoder + [0] * length_pads_encoder) + mask_decoder = numpy.array([1] * length_toks_decoder + [0] * length_pads_decoder) + mask_encoder_decoder = None # Mask the labels decoder_output = numpy.array(decoder_output, dtype=numpy.int64) @@ -181,39 +315,8 @@ def __getitem__(self, idx: int) -> Dict[str, Union[int, numpy.ndarray]]: "truncated": int(truncated), "enc_mask": mask_encoder, "dec_mask": mask_decoder, - "enc_dec_mask": mask_encoder_decoder, } - @staticmethod - def _make_attention_mask( - source_block: numpy.ndarray, target_block: numpy.ndarray - ) -> numpy.ndarray: - """Return a 2-D attention mask - - Args: - source_block (numpy.ndarray): A 1-D array - target_block (numpy.ndarray): A 1-D array - - Returns: - numpy.ndarray: The 2-D attention mask - """ - mask = (target_block[None, :] >= 1) * (source_block[:, None] >= 1) - return mask.astype(numpy.int64) - - @staticmethod - def _make_history_mask(block: numpy.ndarray) -> numpy.ndarray: - """Return a 2-D history (lower-left-triangular) mask - - Args: - block (numpy.ndarray): A 1-D array - - Returns: - numpy.ndarray: The 2-D history (lower-left-triangular) mask - """ - arange = numpy.arange(block.shape[0]) - mask = arange[None,] <= arange[:, None] - return mask.astype(numpy.int64) - def _get_token_mask(self, numpy_random_state: numpy.random.RandomState) -> int: """Abstract method implementation diff --git a/megatron/core/dist_checkpointing/__init__.py b/megatron/core/dist_checkpointing/__init__.py index a065b5f36..eb7ad78a4 100644 --- a/megatron/core/dist_checkpointing/__init__.py +++ b/megatron/core/dist_checkpointing/__init__.py @@ -7,5 +7,6 @@ load_common_state_dict, load_plain_tensors, load_tensors_metadata, + remove_sharded_tensors, save, ) diff --git a/megatron/core/dist_checkpointing/dict_utils.py b/megatron/core/dist_checkpointing/dict_utils.py index 438925112..cd46134ea 100644 --- a/megatron/core/dist_checkpointing/dict_utils.py +++ b/megatron/core/dist_checkpointing/dict_utils.py @@ -104,7 +104,10 @@ def diff(x1: Any, x2: Any, prefix: Tuple = ()) -> Tuple[list, list, list]: only_left = [] only_right = [] if isinstance(x1, torch.Tensor) and isinstance(x2, torch.Tensor): - _is_mismatch = not torch.all(x1 == x2) + if x1.device != x2.device: + _is_mismatch = not torch.all(x1.cpu() == x2.cpu()) + else: + _is_mismatch = not torch.all(x1 == x2) # TODO: change with concrete type that has both replica_id and data attrs elif hasattr(x1, 'replica_id') and hasattr(x2, 'replica_id'): assert type(x1) == type(x2) diff --git a/megatron/core/dist_checkpointing/mapping.py b/megatron/core/dist_checkpointing/mapping.py index 90d4fcdc2..2ddfcf3b3 100644 --- a/megatron/core/dist_checkpointing/mapping.py +++ b/megatron/core/dist_checkpointing/mapping.py @@ -24,6 +24,7 @@ # dict (StateDict) from a state dict with tensors replaced with ShardedTensors # (ShardedStateDict). StateDict = Dict[str, Any] +CommonStateDict = Dict[str, Any] ShardedStateDict = Dict[str, Any] ReplicaId = Union[int, Tuple[int, ...]] @@ -265,16 +266,8 @@ def from_rank_offsets( axis_fragmentations = [1] * (data.ndim + prepend_axis_num) _seen_axis = set() for axis, axis_rank_offset, axis_fragm in rank_offsets: - assert axis >= 0 and axis_rank_offset >= 0 and axis_fragm >= 0, ( - axis, - axis_rank_offset, - axis_fragm, - ) - assert ( - axis_rank_offset < axis_fragm - ), 'Rank offset must be lower than axis fragmentation' - if axis in _seen_axis: - raise CheckpointingException('Duplicated axis specified') + if axis < 0 or axis_rank_offset < 0 or axis_fragm < 1 or axis_rank_offset >= axis_fragm: + raise CheckpointingException(f'Invalid rank offsets: {rank_offsets} for key {key}.') _seen_axis.add(axis) local_axis_shape = 1 if axis < prepend_axis_num else data.shape[axis - prepend_axis_num] diff --git a/megatron/core/dist_checkpointing/optimizer.py b/megatron/core/dist_checkpointing/optimizer.py index 2d231a24f..b3fcc7c64 100644 --- a/megatron/core/dist_checkpointing/optimizer.py +++ b/megatron/core/dist_checkpointing/optimizer.py @@ -1,17 +1,20 @@ # Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -""" Helpers for defining sharding for optimizer states based on existing sharding for model parameters. """ +""" Helpers for defining sharding for optimizer states based on existing sharding +for model parameters. +""" import logging from copy import deepcopy from dataclasses import replace -from itertools import chain -from typing import Dict, Iterable, List, Tuple, Union +from typing import Dict, Iterable, Tuple, Union logger = logging.getLogger(__name__) import torch +from megatron.core.utils import to_local_if_dtensor + from .dict_utils import nested_values from .mapping import ( LocalNonpersistentObject, @@ -24,8 +27,10 @@ def get_optim_param_to_id_map(optim_params_iter: Iterable[torch.nn.Parameter]) -> Dict[int, int]: + """Generate mapping from optimizer param to optimizer state id.""" param_mappings = {} for i, param in enumerate(optim_params_iter): + param = to_local_if_dtensor(param) if id(param) not in param_mappings: param_mappings[id(param)] = i return param_mappings @@ -37,7 +42,8 @@ def get_param_id_to_sharded_param_map( """Generate mapping from optimizer state ids to model sharded parameters. Args: - model_sharded_state_dict: sharded state dict with all model sharded tensors (can have any structure) + model_sharded_state_dict: sharded state dict with all model sharded tensors + (can have any structure) optim_params_iter: iterable which iterates over model parameters tracked by the optimizer. The iteration must be in the same order as in the optimizer parameters. @@ -48,6 +54,9 @@ def get_param_id_to_sharded_param_map( model_sharded_state_dict, _ = extract_sharded_tensors_and_factories(model_sharded_state_dict) id_to_sharded_param_map = {} param_to_id_map = get_optim_param_to_id_map(optim_params_iter) + # If using PyTorch FSDP2 the values in model_sharded_state_dict would + # have been converted to local tensors during initialization. + # See the make_(tp)_sharded_tensor_for_checkpoint functions. for ten in nested_values(model_sharded_state_dict): if id(ten.data) in param_to_id_map: id_to_sharded_param_map[param_to_id_map[id(ten.data)]] = ten @@ -76,12 +85,14 @@ def make_sharded_optimizer_tensor( Returns: Union[ShardedTensor, ShardedTensorFactory]: wrapped optimizer parameter """ + optim_param = to_local_if_dtensor(optim_param) if isinstance(model_param, ShardedTensorFactory): return replace(model_param, key=f'{prefix}.{model_param.key}', data=optim_param) - assert ( - tuple(optim_param.shape) == model_param.local_shape - ), f'Optimizer shape ({tuple(optim_param.shape)} does not match model shape ({model_param.local_shape})' + assert tuple(optim_param.shape) == model_param.local_shape, ( + f'Optimizer shape ({tuple(optim_param.shape)} does not match model shape ' + f'({model_param.local_shape})' + ) sh_ten = replace( model_param, key=f'{prefix}.{model_param.key}', data=optim_param, dtype=optim_param.dtype ) @@ -102,9 +113,11 @@ def optim_state_to_sharding_state( Args: optim_state_dict (StateDict): optimizer state dict with - state parameters under `state` key and group hyperparameters under `param_groups` -> `params` key. - id_to_sharded_param_map (Dict[int, ShardedTensor]): mapping from optimizer param ids to model sharded tensors. - Can be generated with `get_param_id_to_sharded_param_map` function + state parameters under `state` key and group hyperparameters under + `param_groups` -> `params` key. + id_to_sharded_param_map (Dict[int, ShardedTensor]): mapping from optimizer param ids + to model sharded tensors. Can be generated with `get_param_id_to_sharded_param_map` + function. exclude_keys (Tuple[str]): optimizer state keys to exclude from the final state dict. Returns: diff --git a/megatron/core/dist_checkpointing/serialization.py b/megatron/core/dist_checkpointing/serialization.py index 5493c96bb..3be5777e7 100644 --- a/megatron/core/dist_checkpointing/serialization.py +++ b/megatron/core/dist_checkpointing/serialization.py @@ -10,7 +10,7 @@ import logging from pathlib import Path -from typing import Dict, Optional, Set, Tuple, Union +from typing import Callable, Dict, Optional, Set, Tuple, Union import torch @@ -19,6 +19,7 @@ from .dict_utils import extract_matching_values, merge from .mapping import ( CheckpointingException, + CommonStateDict, ShardedObject, ShardedStateDict, StateDict, @@ -280,6 +281,12 @@ def load_plain_tensors(checkpoint_dir: str) -> StateDict: # return load(sharded_state_dict, checkpoint_dir, validate_access_integrity=False) +def remove_sharded_tensors(checkpoint_dir: str, key_prefix: str): + """determine the appropriate sharding strategy and delegate removal to the sharded strategy""" + sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy(checkpoint_dir) + sharded_strategy.remove_sharded_tensors(checkpoint_dir, key_prefix) + + def save( sharded_state_dict: ShardedStateDict, checkpoint_dir: str, @@ -287,6 +294,7 @@ def save( common_strategy: Union[SaveCommonStrategy, Tuple[str, int], None] = None, validate_access_integrity: bool = True, async_sharded_save: bool = False, + preprocess_common_before_consistancy_check: Callable[[CommonStateDict], StateDict] = None, ) -> Optional[AsyncRequest]: """Saving entrypoint. @@ -320,11 +328,16 @@ def save( common_strategy (SaveCommonStrategy, Tuple[str, int], optional): configures common data saving behavior and backend validate_access_integrity (bool default = True): checks if each tensor shard is accessed - exactly once (as main replica) by some process + exactly once (as main replica) by some process. + It also makes sure the common state dict is consistant across all ranks async_sharded_save (bool, optional): if True, for the sharded state dict part an async save implementation will be called, with the AsyncRequest being returned to the caller. Note that it is the caller responsibility to actually schedule the async save. Defaults to False. + preprocess_common_before_consistancy_check (Callable[[CommonStateDict], StateDict], None): + A callable function that will preprocess the common state dict (i.e can be used to + remove keys that we expect to be different in the state dict). The function must not + modify the original state dict Returns: AsyncRequest (optional): if `async_sharded_save` is True, returns @@ -359,7 +372,9 @@ def save( assert isinstance(common_strategy, tuple), type(common_strategy) common_strategy = get_default_strategy(StrategyAction.SAVE_COMMON, *common_strategy) - sharded_state_dict, state_dict = save_preprocess(sharded_state_dict, validate_access_integrity) + sharded_state_dict, state_dict = save_preprocess( + sharded_state_dict, validate_access_integrity, preprocess_common_before_consistancy_check + ) common_strategy.save_common(state_dict, checkpoint_dir) diff --git a/megatron/core/dist_checkpointing/state_dict_transformation.py b/megatron/core/dist_checkpointing/state_dict_transformation.py index ebb960e38..c8f01dd4a 100644 --- a/megatron/core/dist_checkpointing/state_dict_transformation.py +++ b/megatron/core/dist_checkpointing/state_dict_transformation.py @@ -4,17 +4,19 @@ import logging from time import time -from typing import Any, Optional +from typing import Any, Callable, Optional import torch from .dict_utils import dict_list_map_inplace, extract_matching_values, merge, nested_values from .exchange_utils import determine_main_replica_uniform_distribution, exchange_by_distribution from .mapping import ( + CommonStateDict, ShardedObject, ShardedStateDict, ShardedTensor, ShardedTensorFactory, + StateDict, apply_factories, apply_factory_merges, ) @@ -29,7 +31,11 @@ logger = logging.getLogger(__name__) -def save_preprocess(sharded_state_dict: ShardedStateDict, validate_access_integrity: bool = True): +def save_preprocess( + sharded_state_dict: ShardedStateDict, + validate_access_integrity: bool = True, + preprocess_common_before_consistancy_check: Callable[[CommonStateDict], StateDict] = None, +): """Preprocesses the given state dictionary by applying factories, discarding non-persistent data and extracting the common state dictionary. Optionally, it can validate sharding integrity. @@ -37,6 +43,9 @@ def save_preprocess(sharded_state_dict: ShardedStateDict, validate_access_integr Args: sharded_state_dict (ShardedStateDict): The initial state dictionary to be preprocessed. validate_access_integrity (bool): If True, triggers validation of sharding integrity. + preprocess_common_before_consistancy_check (callable, None): A callable function + that will preprocess the common state dict (i.e can be used to remove keys + that we expect to be different in the state dict) Returns: Tuple[ShardedStateDict, dict]: @@ -46,7 +55,15 @@ def save_preprocess(sharded_state_dict: ShardedStateDict, validate_access_integr _, sharded_state_dict = extract_nonpersistent(sharded_state_dict) sharded_part, common_state_dict = extract_sharded_base(sharded_state_dict) if validate_access_integrity: - validate_sharding_integrity(determine_global_metadata(sharded_part)[1]) + preprocessed_common_state_dict = common_state_dict + if preprocess_common_before_consistancy_check: + preprocessed_common_state_dict = preprocess_common_before_consistancy_check( + common_state_dict + ) + validate_sharding_integrity( + determine_global_metadata(sharded_part)[1], + common_state_dict=preprocessed_common_state_dict, + ) return sharded_part, common_state_dict diff --git a/megatron/core/dist_checkpointing/strategies/base.py b/megatron/core/dist_checkpointing/strategies/base.py index 35fca1f35..cdcdd49f4 100644 --- a/megatron/core/dist_checkpointing/strategies/base.py +++ b/megatron/core/dist_checkpointing/strategies/base.py @@ -169,6 +169,10 @@ def load_sharded_metadata(self, checkpoint_dir: Path): f'Loading only sharded metadata not implemented for {self.__class__.__name__}' ) + def remove_sharded_tensors(self, checkpoint_dir: str, key_prefix: str): + """Remove all tensors whose key starts with key_prefix""" + raise NotImplementedError + class SaveCommonStrategy(SaveStrategyBase): """Save strategy for common (non-sharded) objects""" diff --git a/megatron/core/dist_checkpointing/strategies/filesystem_async.py b/megatron/core/dist_checkpointing/strategies/filesystem_async.py index 9d0be4d6e..47ab4d112 100644 --- a/megatron/core/dist_checkpointing/strategies/filesystem_async.py +++ b/megatron/core/dist_checkpointing/strategies/filesystem_async.py @@ -69,7 +69,7 @@ class FileSystemWriterAsync(FileSystemWriter): (intermediate state is stored as writer attributes). """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, separation_hint: Optional[str] = None, **kwargs): super().__init__(*args, **kwargs) if not self.single_file_per_rank: raise NotImplementedError( @@ -79,6 +79,7 @@ def __init__(self, *args, **kwargs): # Intermediate state between preparation and finalization self.write_buckets: Optional[List[WriteBucket]] = None self.results_queue: Optional[mp.Queue] = None + self.separation_hint = separation_hint def prepare_write_data(self, plan: SavePlan, planner: SavePlanner) -> None: """ @@ -93,7 +94,12 @@ def prepare_write_data(self, plan: SavePlan, planner: SavePlanner) -> None: storage_plan: _StoragePrefix = plan.storage_data start = time() logger.debug(f"thread_count: {self.thread_count}, time: {start}") - item_buckets = _split_by_size_and_type(self.thread_count, plan.items) + if self.separation_hint: + assert ( + self.thread_count > 1 + ), "thread_count must be at least 2 if separation_hint is provided" + bins = self.thread_count // 2 if self.separation_hint is not None else self.thread_count + item_buckets = _split_by_size_and_type(bins, plan.items, self.separation_hint) logger.debug(f"bucket_prep, time: {time() - start}") start = time() @@ -101,30 +107,33 @@ def prepare_write_data(self, plan: SavePlan, planner: SavePlanner) -> None: # We do D2H synchronously for now file_count = 0 - def gen_file(): + def gen_file(prefix=""): nonlocal file_count - file_name = f"{storage_plan.prefix}{file_count}{DEFAULT_SUFFIX}" + file_name = f"{prefix}{storage_plan.prefix}{file_count}{DEFAULT_SUFFIX}" file_count += 1 return file_name # Prepare bytes / tensor data in each bucket, which will be assigned to each writer process self.write_buckets = [] - for bucket in item_buckets: - bytes_data = [ - (item, planner.resolve_data(item)) - for item in bucket - if item.type == WriteItemType.BYTE_IO - ] - tensor_data = [ - (item, planner.resolve_data(item).detach().to("cpu", non_blocking=True)) - for item in bucket - if item.type != WriteItemType.BYTE_IO - ] - if len(bytes_data) > 0 or len(tensor_data) > 0: - file_name = gen_file() - self.write_buckets.append( - (self.path / file_name, file_name, (bytes_data, tensor_data)) - ) + for group_name, group_buckets in _split_by_separation_hint( + item_buckets, self.separation_hint + ).items(): + for bucket in group_buckets: + bytes_data = [ + (item, planner.resolve_data(item)) + for item in bucket + if item.type == WriteItemType.BYTE_IO + ] + tensor_data = [ + (item, planner.resolve_data(item).detach().to("cpu", non_blocking=True)) + for item in bucket + if item.type != WriteItemType.BYTE_IO + ] + if len(bytes_data) > 0 or len(tensor_data) > 0: + file_name = gen_file(prefix=group_name) + self.write_buckets.append( + (self.path / file_name, file_name, (bytes_data, tensor_data)) + ) # Check if there is anything to write on this rank if len(self.write_buckets) > 0: @@ -173,8 +182,8 @@ def write_preloaded_data_multiproc( Args: write_buckets (List[WriteBucket]): write plan - global_results_queue (mp.Queue): mp.Queue to collect Dict[List[WriteResults]] (or an Exception) - from parallel write processes to the main training process + global_results_queue (mp.Queue): mp.Queue to collect Dict[List[WriteResults]] + (or an Exception) from parallel write processes to the main training process Returns: None """ w_start = time() @@ -205,18 +214,23 @@ def write_preloaded_data_multiproc( # To make sure all nodes are completed count_queue.join() - # At this point, all workers completed, so the queue should have exactly `len(write_buckets)` items + # At this point, all workers completed, so the queue should have exactly + # `len(write_buckets)` items for proc_idx in range(len(write_buckets)): try: local_proc_idx, local_results_or_exc = local_results_queue.get() except queue.Empty: write_results_or_exc = RuntimeError( - f'Unexpected empty `local_results_queue` (got only {proc_idx}/{len(write_buckets)} items)' + f'Unexpected empty `local_results_queue`' + f' (got only {proc_idx}/{len(write_buckets)} items)' ) break else: if isinstance(local_results_or_exc, Exception): - err_msg = f"Local process {local_proc_idx} encountered an error: {local_results_or_exc}" + err_msg = ( + f"Local process {local_proc_idx} encountered" + f" an error: {local_results_or_exc}" + ) logger.error(err_msg) write_results_or_exc = local_results_or_exc break @@ -231,7 +245,8 @@ def write_preloaded_data_multiproc( w_end = time() logger.debug( - f"{w_end}, rank: {torch.distributed.get_rank()}, write(sync,parallel): {w_end - w_start}" + f"{w_end}, rank: {torch.distributed.get_rank()}," + f" write(sync,parallel): {w_end - w_start}" ) @staticmethod @@ -249,7 +264,8 @@ def write_preloaded_data( Args: local_proc_idx (int): index of a local process that performs writing write_bucket (WriteBucket): data to write to storage - results_queue (mp.Queue): queue to return the write results to the proxy checkpoint process. + results_queue (mp.Queue): queue to return the write results + to the proxy checkpoint process. count_queue (mp.JoinableQueue): queue to marks worker task as completed use_fsync (bool): if True, calls os.fsync at the end of saving @@ -281,17 +297,21 @@ def write_preloaded_data( mem_after = _process_memory() logger.debug( - f"{local_proc_idx} consumed: {mem_after - mem_before}, before: {mem_before}, after: {mem_after}" + f"{local_proc_idx} consumed: {mem_after - mem_before}," + f" before: {mem_before}, after: {mem_after}" ) def write_data(self, plan: SavePlan, planner: SavePlanner) -> Future[List[WriteResult]]: + """Write all items from ``plan``.""" raise NotImplementedError('write_data not implemented for FileSystemWriterAsync') def retrieve_write_results(self) -> List[WriteResult]: """ - Turn the latest dict including write results from `self.results_queue` into a single results lists. Includes error check. + Turn the latest dict including write results from `self.results_queue` + into a single results lists. Includes error check. - Returns (List[WriteResult]): the list of write results from all local processes performing the save. + Returns (List[WriteResult]): the list of write results + from all local processes performing the save. """ assert self.write_buckets is not None @@ -309,13 +329,15 @@ def retrieve_write_results(self) -> List[WriteResult]: write_results: dict = write_results_or_exc if len(write_results) != len(self.write_buckets): raise RuntimeError( - f'Incomplete worker results (expected {len(self.write_buckets)}, got {len(write_results)}.' - f' This probably indicates a worker failure.' + f'Incomplete worker results (expected {len(self.write_buckets)},' + f' got {len(write_results)}. This probably indicates a worker failure.' ) return list(chain.from_iterable(write_results.values())) -def _split_by_size_and_type(bins: int, items: List[WriteItem]) -> List[List[WriteItem]]: +def _split_by_size_and_type( + bins: int, items: List[WriteItem], separation_hint: Optional[str] = None +) -> List[List[WriteItem]]: """ Splits write items according to item size into close to uniform bins. @@ -353,6 +375,37 @@ def _split_by_size_and_type(bins: int, items: List[WriteItem]) -> List[List[Writ return buckets +def _split_by_separation_hint( + buckets: List[List[WriteItem]], separation_hint: Optional[str] = None +) -> Dict[str, List[List[WriteItem]]]: + """ + Splits buckets into those whose keys begin with the separation_hint and those whose keys do not + + Args: + buckets (List[List[WriteItem]]): buckets to split + separation_hint (Optional[str]): optional prefix to split on + + Returns (Dict[str, List[List[WriteItem]]]): a dictionary + mapping the prefix to the relevant buckets + """ + bins = len(buckets) + buckets_with_separation_hint = {} + if separation_hint is not None: + buckets_default = [[] for _ in range(bins)] + buckets_hint = [[] for _ in range(bins)] + for i in range(bins): + for item in buckets[i]: + if item.index.fqn.startswith(separation_hint): + buckets_hint[i].append(item) + else: + buckets_default[i].append(item) + buckets_with_separation_hint[""] = buckets_default + buckets_with_separation_hint[separation_hint] = buckets_hint + else: + buckets_with_separation_hint[""] = buckets + return buckets_with_separation_hint + + def _item_size(item: WriteItem) -> int: """ Calculates size (in bytes) of a single write item. diff --git a/megatron/core/dist_checkpointing/strategies/torch.py b/megatron/core/dist_checkpointing/strategies/torch.py index 077d94eb7..ea95254ad 100644 --- a/megatron/core/dist_checkpointing/strategies/torch.py +++ b/megatron/core/dist_checkpointing/strategies/torch.py @@ -2,6 +2,9 @@ """ Strategies using PyTorch distributed.checkpoint as an underlying format. """ import io +import os +import pickle +import warnings from collections import ChainMap, defaultdict from dataclasses import dataclass from itertools import product @@ -13,14 +16,15 @@ from packaging.version import Version as PkgVersion from torch.distributed import checkpoint from torch.distributed._shard.metadata import ShardMetadata -from torch.distributed._shard.sharded_tensor import Shard, ShardedTensorMetadata, TensorProperties -from torch.distributed._sharded_tensor import ShardedTensor as TorchShardedTensor -from torch.distributed._tensor import DTensor +from torch.distributed._shard.sharded_tensor import Shard +from torch.distributed._shard.sharded_tensor import ShardedTensor as TorchShardedTensor +from torch.distributed._shard.sharded_tensor import ShardedTensorMetadata, TensorProperties from torch.distributed.checkpoint import ( BytesStorageMetadata, DefaultLoadPlanner, DefaultSavePlanner, FileSystemReader, + FileSystemWriter, LoadPlan, Metadata, ReadItem, @@ -33,6 +37,7 @@ from torch.distributed.checkpoint.metadata import Metadata from torch.distributed.checkpoint.planner_helpers import _create_write_items +from ...utils import get_torch_version, is_torch_min_version from ..core import CheckpointingException from ..dict_utils import nested_values from ..mapping import ( @@ -69,6 +74,15 @@ except ImportError: HAVE_TE = False +try: + from torch.distributed._tensor import DTensor + + HAVE_DTENSOR = True +except ImportError: + HAVE_DTENSOR = False + +_metadata_fn: str = ".metadata" + def register_default_torch_strategies(): """Register default strategies related to PyT Distributed backend.""" @@ -450,7 +464,7 @@ def __init__( ) -> None: # `dedup_replicated_tensors` was deprecated in 2.3; this check avoids warnings # during saving. - if PkgVersion(torch.__version__) <= PkgVersion("2.2"): + if get_torch_version() <= PkgVersion("2.2"): kwargs['dedup_replicated_tensors'] = dedup_replicated_tensors super().__init__(*args, **kwargs) self.nd_flattened_global_shapes = nd_flattened_global_shapes or {} @@ -465,7 +479,7 @@ def create_local_plan(self) -> SavePlan: # add those requests on all ranks. We inline a simplified version of this method below. write_items = [] for fqn, obj in self.state_dict.items(): - assert not isinstance( + assert not HAVE_DTENSOR or not isinstance( obj, DTensor ) # translation from MCore ShardedTensors shouldn't result in DTensors # Create write requests for tensor and bytes values. @@ -510,6 +524,11 @@ def __init__( def _validate_global_shapes(self, metadata, sharded_tensors): for sh_ten in sharded_tensors: + if sh_ten.key not in metadata.state_dict_metadata: + raise KeyError( + f"{sh_ten.key} from model not in state dict:" + f" {sorted(metadata.state_dict_metadata.keys())}" + ) loaded_shape = metadata.state_dict_metadata[sh_ten.key].size if not is_nd_flattened_tensor(sh_ten): expected_shape = sh_ten.global_shape @@ -578,6 +597,7 @@ def __init__( keep_only_main_replica: bool = True, thread_count: int = 2, cached_metadata: bool = False, + separation_hint: str = None, ): """Adds parameters specific to PyT Distributed format Args: @@ -590,6 +610,8 @@ def __init__( Affects the number of files in the checkpoint (saving ranks * num_threads). cached_metadata (bool, optional): Enables using cached global metadata to avoid gathering local metadata every checkpointing invocation + separation_hint(str, optional): If provided, all tensors whose keys have this + prefix will be saved to a separate file. """ super().__init__(backend, version) self.keep_only_main_replica = keep_only_main_replica @@ -610,6 +632,8 @@ def __init__( # The knob to enable cached metadata communication in saving self.use_cached_ckpt_structure: bool = cached_metadata + self.separation_hint = separation_hint + def async_save( self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path ) -> AsyncRequest: @@ -629,7 +653,9 @@ def async_save( ) pyt_state_dict = mcore_to_pyt_state_dict(sharded_state_dict, False) # Use PyT saving mechanism - writer = FileSystemWriterAsync(checkpoint_dir, thread_count=self.thread_count) + writer = FileSystemWriterAsync( + checkpoint_dir, separation_hint=self.separation_hint, thread_count=self.thread_count + ) # This should be set differently if we run in a smaller process group than the default coordinator = 0 # Try twice to validate the generated `central_plan` is the same across iterations @@ -825,6 +851,84 @@ def load_sharded_metadata(self, checkpoint_dir: Path) -> ShardedStateDict: sharded_metadata.update(self.load_tensors_metadata(checkpoint_dir, metadata)) return sharded_metadata + def remove_sharded_tensors(self, checkpoint_dir: str, key_prefix: str): + """Removes checkpoint files whose keys have the given prefix. + + Performs the following steps: + 1. checks whether there are files that start with the key_prefix + 2. loads metadata + 3. removes all entries from the metadata that start with the key_prefix + 4. resaves the new metadata and removes the old metadata + 5. removes the relevant files + """ + + assert is_torch_min_version( + "2.3.0" + ), f'torch >= 2.3.0 is required for remove_sharded_tensors' + + distckpt_files = [f for f in os.listdir(checkpoint_dir) if f.endswith("distcp")] + files_to_remove = [f for f in distckpt_files if f.startswith(key_prefix)] + + if not files_to_remove: + warnings.warn( + f'There are no files in {checkpoint_dir} that begin with "{key_prefix}".' + f' Skipping removal.' + ) + return + + fs_reader = FileSystemReader(checkpoint_dir) + original_metadata = fs_reader.read_metadata() + + new_state_dict_metadata = {} + new_planner_data = {} + new_storage_data = {} + for k in original_metadata.state_dict_metadata.keys(): + if k.startswith(key_prefix): + continue + new_state_dict_metadata[k] = original_metadata.state_dict_metadata[k] + for k in original_metadata.planner_data.keys(): + if k.startswith(key_prefix): + continue + new_planner_data[k] = original_metadata.planner_data[k] + for k in original_metadata.storage_data.keys(): + if k.fqn.startswith(key_prefix): + continue + new_storage_data[k] = original_metadata.storage_data[k] + metadata = Metadata( + state_dict_metadata=new_state_dict_metadata, + planner_data=new_planner_data, + storage_data=new_storage_data, + ) + fs_writer = FileSystemWriter(checkpoint_dir) + metadata_filename = cast(Path, fs_writer.fs.concat_path(fs_writer.path, _metadata_fn)) + tmp_path = cast( + metadata_filename, fs_writer.fs.concat_path(fs_writer.path, f"{_metadata_fn}.tmp") + ) + old_path = cast( + metadata_filename, fs_writer.fs.concat_path(fs_writer.path, f"{_metadata_fn}.bck") + ) + ## save the new metadata + with fs_writer.fs.create_stream(tmp_path, "wb") as metadata_file: + pickle.dump(metadata, metadata_file) + try: + os.fsync(metadata_file.fileno()) + except AttributeError: + os.sync() + ## move the old metadata + fs_writer.fs.rename(fs_writer.metadata_path, old_path) + try: + ## rename the new metadata + fs_writer.fs.rename(tmp_path, fs_writer.metadata_path) + + ## finally, remove the files we want to drop + for f in files_to_remove: + fs_writer.fs.rm_file(checkpoint_dir / f) + except Exception as e: + fs_writer.fs.rename(old_path, fs_writer.metadata_path) + raise e + else: + fs_writer.fs.rm_file(old_path) + def can_handle_sharded_objects(self): return True diff --git a/megatron/core/dist_checkpointing/validation.py b/megatron/core/dist_checkpointing/validation.py index cd11b82ed..8f39ddc05 100644 --- a/megatron/core/dist_checkpointing/validation.py +++ b/megatron/core/dist_checkpointing/validation.py @@ -11,11 +11,13 @@ from megatron.core.dist_checkpointing import ShardedTensor from megatron.core.dist_checkpointing.core import CheckpointingException, maybe_load_config from megatron.core.dist_checkpointing.dict_utils import ( + diff, extract_matching_values, map_reduce, nested_values, ) from megatron.core.dist_checkpointing.mapping import ( + CommonStateDict, ShardedBase, ShardedObject, ShardedStateDict, @@ -34,10 +36,10 @@ from megatron.core.dist_checkpointing.serialization import CkptShardedMetadata logger = logging.getLogger(__name__) - +# pylint: disable=line-too-long # list of local saved/loaded ShardedBase objects _LocalMetadata = List[Union[ShardedTensor, ShardedObject]] -# list of lists of global saved/loaded ShardedBase objects (each list element corresponds to global rank) +# list of lists of global saved/loaded ShardedBase objects (each element corresponds to global rank) _GlobalMetadata = List[_LocalMetadata] @@ -362,7 +364,33 @@ def maybe_report_missing_and_unexpected_keys( logger.warning(error_msg) -def validate_sharding_integrity(global_metadata: _GlobalMetadata) -> None: +def _validate_common_state_dict(common_state_dict: CommonStateDict): + """Validate consistancy across ranks for the common state dict + + We save the common state dict only on rank 0. We validate to make sure that the common dict is consistant across ranks before saving. + + Args: + common_state_dict: The common state dict present in all ransk + """ + other_rank_state_dicts = [None] * torch.distributed.get_world_size() + torch.distributed.all_gather_object(other_rank_state_dicts, common_state_dict) + common_state_dict_diff = {} + if torch.distributed.get_rank() == 0: + main_rank_state_dict = common_state_dict + for rank, rank_state_dict in enumerate(other_rank_state_dicts[1:], 1): + only_left, only_right, mismatch = diff(main_rank_state_dict, rank_state_dict) + if only_left or only_right or mismatch: + common_state_dict_diff[rank] = (only_left, only_right, mismatch) + + if len(common_state_dict_diff) != 0: + logger.warning( + f'There is difference in the common state dict in different ranks. The differences are {common_state_dict_diff}' + ) + + +def validate_sharding_integrity( + global_metadata: _GlobalMetadata, common_state_dict: CommonStateDict = None +) -> None: """Validate if the ShardedTensors and ShardedObjects from multiple processes define correct sharding. Local ShardedTensors and ShardedObject metadata is exchanged with `torch.distributed.all_gather_object` @@ -372,6 +400,7 @@ def validate_sharding_integrity(global_metadata: _GlobalMetadata) -> None: Args: global_metadata (_GlobalMetadata): ShardedTensor and ShardedObject objects from all ranks. + common_state_dict (CommonStateDict): The common state dict stored by rank 0 Returns: None @@ -379,6 +408,10 @@ def validate_sharding_integrity(global_metadata: _GlobalMetadata) -> None: Raises: CheckpointingException for invalid access pattern """ + + if common_state_dict: + _validate_common_state_dict(common_state_dict) + if torch.distributed.get_rank() != 0: return diff --git a/megatron/core/distributed/README.md b/megatron/core/distributed/README.md new file mode 100644 index 000000000..c4a752844 --- /dev/null +++ b/megatron/core/distributed/README.md @@ -0,0 +1,11 @@ +## How to use pytorch FSDP2? + +Add these flag to enable Torch FSDP2. + +``` +--use-torch-fsdp2 +--no-gradient-accumulation-fusion +--ckpt-format torch_dist +``` + +It is worth noting that CUDA_MAX_CONNECTIONS=1 should not be enabled to ensure that the communication of FSDP and the computation on the primary stream can be fully parallelized. diff --git a/megatron/core/distributed/__init__.py b/megatron/core/distributed/__init__.py index e43ae115a..9dbf83c80 100644 --- a/megatron/core/distributed/__init__.py +++ b/megatron/core/distributed/__init__.py @@ -1,10 +1,8 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from packaging.version import Version + from .distributed_data_parallel import DistributedDataParallel from .distributed_data_parallel_config import DistributedDataParallelConfig from .finalize_model_grads import finalize_model_grads - -# For backwards compatibility. ParamAndGradBuffer will be deprecated in future release. -# ParamAndGradBuffer (which is an alias of _ParamAndGradBuffer) is not intended to be -# consumed directly by external code. -from .param_and_grad_buffer import ParamAndGradBuffer +from .torch_fully_sharded_data_parallel import TorchFullyShardedDataParallel diff --git a/megatron/core/distributed/data_parallel_base.py b/megatron/core/distributed/data_parallel_base.py new file mode 100644 index 000000000..aed576a7a --- /dev/null +++ b/megatron/core/distributed/data_parallel_base.py @@ -0,0 +1,96 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from contextlib import contextmanager + +import torch + +from ..transformer.module import MegatronModule +from ..transformer.transformer_config import TransformerConfig + + +class _BaseDataParallel(MegatronModule): + """A template class for DistributedDataParallel implementations.""" + + def __init__(self, config: TransformerConfig, module: torch.nn.Module): + super().__init__(config=config) + self.module = module + + def forward(self, *inputs, **kwargs): + """ + Calls the wrapped module's forward() method. + """ + return self.module(*inputs, **kwargs) + + @contextmanager + def no_sync(self): + """ + Context manager that turns off gradient synchronization. + """ + try: + yield + finally: + pass + + def start_grad_sync(self, *unused): + """ + Initiates grad sync (all-reduce or reduce-scatter) communication operations + for all model gradients. + + When overlap_grad_reduce is set to True, dispatches asynchronous communication + calls. When overlap_grad_reduce is set to False, calls synchronous + communication ops. + """ + pass + + def scale_gradients(self, scaling_factor: float) -> None: + """Scale all gradients inside the buffers by `scaling_factor`.""" + pass + + def finish_grad_sync(self): + """ + Finishes grad sync (all-reduce or reduce-scatter) communication operations + for all model gradients. + + When overlap_grad_reduce is set to True, waits for asynchronous communication + calls to complete. When overlap_grad_reduce is set to False, calls synchronous + communication ops. + """ + pass + + def zero_grad_buffer(self): + """ + Zeros out all grad buffers. Needs to be called at the beginning of each + training iteration. + """ + pass + + def broadcast_params(self): + """ + Syncs parameters across all DP ranks. + """ + pass + + def state_dict(self, prefix='', keep_vars=False): + """ + Returns a dictionary containing references to the whole state of the + wrapped module. + + Both parameters and persistent buffers (e.g. running averages) are included. + Keys are corresponding parameter and buffer names. Parameters and buffers + set to None are not included. + """ + return self.module.state_dict(prefix=prefix, keep_vars=keep_vars) + + def state_dict_for_save_checkpoint(self, prefix='', keep_vars=False): + """ + Returns wrapped module's state_dict for checkpoint saving. + """ + return self.module.state_dict_for_save_checkpoint(prefix=prefix, keep_vars=keep_vars) + + def load_state_dict(self, state_dict, strict=True): + """ + Copies parameters and buffers from state_dict into the wrapped module and its + descendants. If strict is True, then the keys of state_dict must exactly match + the keys returned by this module’s state_dict() function. + """ + self.module.load_state_dict(state_dict, strict=strict) diff --git a/megatron/core/distributed/distributed_data_parallel.py b/megatron/core/distributed/distributed_data_parallel.py index 6e5bbd96d..3a23426ec 100644 --- a/megatron/core/distributed/distributed_data_parallel.py +++ b/megatron/core/distributed/distributed_data_parallel.py @@ -7,16 +7,16 @@ from .. import parallel_state from ..config_logger import has_config_logger_enabled, log_config_to_disk -from ..transformer.module import MegatronModule from ..transformer.transformer_config import TransformerConfig from ..utils import is_float8tensor, log_single_rank +from .data_parallel_base import _BaseDataParallel from .distributed_data_parallel_config import DistributedDataParallelConfig from .param_and_grad_buffer import _ParamAndGradBuffer, partition_buckets logger = logging.getLogger(__name__) -class DistributedDataParallel(MegatronModule): +class DistributedDataParallel(_BaseDataParallel): """ DDP wrapper which stores grads in contiguous buffers. Also has option of overlapping communication with backprop computation by breaking up full model's gradients into smaller @@ -41,7 +41,7 @@ def __init__( module: torch.nn.Module, disable_bucketing: bool = False, ): - super().__init__(config=config) + super().__init__(config=config, module=module) if has_config_logger_enabled(config): log_config_to_disk(config, locals(), prefix=type(self).__name__) @@ -154,7 +154,7 @@ def _allocate_buffers_for_parameters( # Collective is averaging gradients in collective with data_parallel_group. assert ( gradient_scaling_factor - / torch.distributed.get_world_size(group=data_parallel_group) + / parallel_state.get_data_parallel_world_size(with_context_parallel=True) == target_gradient_scaling_factor ) else: @@ -188,6 +188,17 @@ def _allocate_buffers_for_parameters( # bucket group. bucket_groups = partition_buckets(buffers, force_single_bucket_group=disable_bucketing) + if self.ddp_config.num_distributed_optimizer_instances > 1: + assert ( + self.ddp_config.use_distributed_optimizer + ), 'Partial DistOpt cannot be used without DistOpt' + communication_stream = torch.cuda.Stream(device=torch.cuda.current_device()) + for bucket_group in bucket_groups: + bucket_group.inter_distributed_optimizer_instance_group = ( + parallel_state.get_inter_partial_data_parallel_group() + ) + bucket_group.communication_stream = communication_stream + # Set `next_param_gather_bucket_group` for different bucket groups by iterating through # buckets in reverse order (since all-gathers happen in reverse order of buckets). if self.ddp_config.use_distributed_optimizer and self.ddp_config.overlap_param_gather: @@ -218,13 +229,16 @@ def _allocate_buffers_for_parameters( data_parallel_world_size = parallel_state.get_data_parallel_world_size( with_context_parallel=True ) + gradient_scaling_factor = 1.0 / data_parallel_world_size expert_gradient_scaling_factor = 1.0 / data_parallel_world_size # Allocate the param+grad buffers for dense params' grads. self.buffers, self.bucket_groups = _allocate_buffers_for_parameters( dense_params, - parallel_state.get_data_parallel_group(with_context_parallel=True), + parallel_state.get_data_parallel_group( + with_context_parallel=True, partial_data_parallel=True + ), gradient_scaling_factor=gradient_scaling_factor, ) @@ -232,7 +246,7 @@ def _allocate_buffers_for_parameters( self.expert_parallel_buffers, self.expert_parallel_bucket_groups = ( _allocate_buffers_for_parameters( expert_parallel_params, - parallel_state.get_data_modulo_expert_parallel_group(with_context_parallel=True), + parallel_state.get_expert_data_parallel_group(), gradient_scaling_factor=expert_gradient_scaling_factor, ) ) @@ -298,12 +312,6 @@ def disable_forward_pre_hook(self): # Force synchronize parameters. self.start_param_sync(force_sync=True) - def forward(self, *inputs, **kwargs): - """ - Calls the wrapped module's forward() method. - """ - return self.module(*inputs, **kwargs) - def _make_forward_pre_hook(self): """ Create a forward pre-hook to wait on all-gather handles when necessary (i.e., @@ -446,40 +454,13 @@ def broadcast_params(self): is_expert_parallel = not getattr(param, 'allreduce', True) if is_expert_parallel: - data_parallel_group = parallel_state.get_data_modulo_expert_parallel_group( - with_context_parallel=True - ) + data_parallel_group = parallel_state.get_expert_data_parallel_group() else: data_parallel_group = parallel_state.get_data_parallel_group( - with_context_parallel=True + with_context_parallel=True, partial_data_parallel=True ) torch.distributed.broadcast( param.data, src=torch.distributed.get_global_rank(data_parallel_group, 0), group=data_parallel_group, ) - - def state_dict(self, prefix='', keep_vars=False): - """ - Returns a dictionary containing references to the whole state of the - wrapped module. - - Both parameters and persistent buffers (e.g. running averages) are included. - Keys are corresponding parameter and buffer names. Parameters and buffers - set to None are not included. - """ - return self.module.state_dict(prefix=prefix, keep_vars=keep_vars) - - def state_dict_for_save_checkpoint(self, prefix='', keep_vars=False): - """ - Returns wrapped module's state_dict for checkpoint saving. - """ - return self.module.state_dict_for_save_checkpoint(prefix=prefix, keep_vars=keep_vars) - - def load_state_dict(self, state_dict, strict=True): - """ - Copies parameters and buffers from state_dict into the wrapped module and its - descendants. If strict is True, then the keys of state_dict must exactly match - the keys returned by this module’s state_dict() function. - """ - self.module.load_state_dict(state_dict, strict=strict) diff --git a/megatron/core/distributed/distributed_data_parallel_config.py b/megatron/core/distributed/distributed_data_parallel_config.py index 14068ea36..fbcd93019 100644 --- a/megatron/core/distributed/distributed_data_parallel_config.py +++ b/megatron/core/distributed/distributed_data_parallel_config.py @@ -27,6 +27,11 @@ class DistributedDataParallelConfig: originally allocated model parameters, otherwise issue all-reduce collectives. """ + num_distributed_optimizer_instances: int = 1 + """Sets the factor by which the DP domain is sharded to have the partial DistOpt + enabled. Defaults to 1, which means DistOpt is across entire DP domain. + """ + check_for_nan_in_grad: bool = False """ If true, check for NaNs in gradients _before_ communication collective.""" diff --git a/megatron/core/distributed/finalize_model_grads.py b/megatron/core/distributed/finalize_model_grads.py index ff5046afa..db31fc013 100644 --- a/megatron/core/distributed/finalize_model_grads.py +++ b/megatron/core/distributed/finalize_model_grads.py @@ -1,15 +1,112 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from typing import List, Optional +from typing import List, Optional, Union import torch from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors +try: + from torch.distributed._tensor import DTensor, distribute_tensor + + HAVE_DTENSOR = True +except ImportError: + HAVE_DTENSOR = False + from .. import parallel_state from ..transformer.transformer_config import TransformerConfig from ..utils import get_attr_wrapped_model, get_model_config +def _unshard_if_dtensor(tensor: Union[torch.Tensor, "DTensor"]) -> torch.Tensor: + """ + Unshards the input tensor if it is a DTensor and otherwise returns the + tensor unmodified. + + Args: + tensor (Union[torch.Tensor, DTensor]): The tensor to potentially unshard. + + Returns: + An unsharded version of the input tensor if it is a DTensor, or the + input tensor unmodified if it is not a DTensor. + """ + if HAVE_DTENSOR and isinstance(tensor, DTensor): + unsharded_tensor = tensor.full_tensor() + for k, v in vars(tensor).items(): + setattr(unsharded_tensor, k, v) + return unsharded_tensor + return tensor + + +def _reshard_if_dtensor( + tensor_to_shard: torch.Tensor, reference_tensor: Union[torch.Tensor, "DTensor"] +) -> Union[torch.Tensor, "DTensor"]: + """ + Reshards the input tensor to match the sharding configuration of the + reference tensor if the reference tensor is a DTensor. Otherwise, returns + the reference tensor unmodified. + + Args: + tensor_to_shard (torch.Tensor): The tensor to be potentially sharded. + reference_tensor (Union[torch.Tensor, DTensor]): The reference tensor + for the sharding configuration. + + Returns: + Union[torch.Tensor, DTensor]: The sharded tensor matching the reference tensor's + configuration, or the reference tensor itself if it is not a DTensor. + """ + if HAVE_DTENSOR and isinstance(reference_tensor, DTensor): + sharded_tensor = distribute_tensor( + tensor_to_shard, + device_mesh=reference_tensor.device_mesh, + placements=reference_tensor.placements, + ) + for k, v in vars(reference_tensor).items(): + setattr(sharded_tensor, k, v) + return sharded_tensor + return reference_tensor + + +def _allreduce_conditional_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): + """ + All-reduce conditional embedding grads. + + Reduce grads across all the pp stages to ensure that parameters of the conditional embedders + (e.g., timestep embedder, FPS embedder, label embedder) stay in sync. + This is for the models with replicated embedders on each PP / VPP rank, like diffusion models. + """ + + if parallel_state.get_pipeline_model_parallel_world_size() > 1 and getattr( + config, "has_cond_embedder", False + ): + grads_dict = {} + for model_chunk in model: + for name, param in get_attr_wrapped_model(model_chunk, 'named_parameters')(): + if param.requires_grad and getattr(param, 'pipeline_parallel', False): + grad = param.main_grad + if name in grads_dict: + # Add all the virtual PP rank's gradients to + # the first local virtual PP rank. + grads_dict[name][0].add_(grad) + # Append to the end for later update after cross-rank reduce. + grads_dict[name].append(grad) + else: + grads_dict[name] = [grad] + if grads_dict: + # All-reduce the gradient on the first VPP rank. + grads = [param_grad[0] for _, param_grad in grads_dict.items()] + coalesced = _flatten_dense_tensors(grads) + torch.distributed.all_reduce( + coalesced, group=parallel_state.get_pipeline_model_parallel_group() + ) + for buf, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)): + buf.copy_(synced) + + # Update the gradients on other VPP ranks. + for grads in grads_dict.values(): + for grad in grads[1:]: + grad.copy_(grads[0]) + + def _allreduce_word_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): """ All-reduce word embedding grads. @@ -32,8 +129,11 @@ def _allreduce_word_embedding_grads(model: List[torch.nn.Module], config: Transf model_module = get_attr_wrapped_model(model_module, 'pre_process', return_model_obj=True) if model_module.share_embeddings_and_output_weights: weight = model_module.shared_embedding_or_output_weight() - grad = weight.main_grad + grad_attr = "main_grad" if hasattr(weight, "main_grad") else "grad" + orig_grad = getattr(weight, grad_attr) + grad = _unshard_if_dtensor(orig_grad) torch.distributed.all_reduce(grad, group=parallel_state.get_embedding_group()) + setattr(weight, grad_attr, _reshard_if_dtensor(grad, orig_grad)) def _allreduce_position_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): @@ -54,8 +154,12 @@ def _allreduce_position_embedding_grads(model: List[torch.nn.Module], config: Tr model_module = get_attr_wrapped_model(model_module, 'pre_process', return_model_obj=True) assert hasattr(model_module, 'position_embeddings') - grad = model_module.position_embeddings.weight.main_grad + weight = model_module.position_embeddings.weight + grad_attr = "main_grad" if hasattr(weight, "main_grad") else "grad" + orig_grad = getattr(weight, grad_attr) + grad = _unshard_if_dtensor(orig_grad) torch.distributed.all_reduce(grad, group=parallel_state.get_position_embedding_group()) + setattr(weight, grad_attr, _reshard_if_dtensor(grad, orig_grad)) def _allreduce_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): @@ -76,6 +180,7 @@ def _allreduce_layernorm_grads(model: List[torch.nn.Module], config: Transformer if parallel_state.get_tensor_model_parallel_world_size() > 1 and ( config.sequence_parallel or config.qk_layernorm ): + params = [] grads = [] for model_chunk in model: for name, param in get_attr_wrapped_model(model_chunk, 'named_parameters')(): @@ -85,15 +190,23 @@ def _allreduce_layernorm_grads(model: List[torch.nn.Module], config: Transformer or 'q_layernorm' in name or 'k_layernorm' in name ): - grad = param.main_grad + params.append(param) + grad_attr = "main_grad" if hasattr(param, "main_grad") else "grad" + grad = getattr(param, grad_attr) + grad = _unshard_if_dtensor(grad) grads.append(grad.data) if grads: coalesced = _flatten_dense_tensors(grads) torch.distributed.all_reduce( coalesced, group=parallel_state.get_tensor_model_parallel_group() ) - for buf, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)): + for param, buf, synced in zip( + params, grads, _unflatten_dense_tensors(coalesced, grads) + ): buf.copy_(synced) + grad_attr = "main_grad" if hasattr(param, "main_grad") else "grad" + orig_grad = getattr(param, grad_attr) + setattr(param, grad_attr, _reshard_if_dtensor(buf, orig_grad)) def finalize_model_grads(model: List[torch.nn.Module], num_tokens: Optional[torch.Tensor] = None): @@ -113,6 +226,15 @@ def finalize_model_grads(model: List[torch.nn.Module], num_tokens: Optional[torc if config.timers is not None: config.timers('all-grads-sync').stop() + # All-reduce t_embedder grads (for pp & vpp of DiT). + if config.timers is not None: + config.timers('conditional-embedder-grads-all-reduce', log_level=1).start( + barrier=config.barrier_with_L1_time + ) + _allreduce_conditional_embedding_grads(model, config) + if config.timers is not None: + config.timers('conditional-embedder-grads-all-reduce').stop() + # All-reduce layer-norm grads (for sequence parallelism). if config.timers is not None: config.timers('layernorm-grads-all-reduce', log_level=1).start( diff --git a/megatron/core/distributed/param_and_grad_buffer.py b/megatron/core/distributed/param_and_grad_buffer.py index 351ff9e0b..bd69e9239 100644 --- a/megatron/core/distributed/param_and_grad_buffer.py +++ b/megatron/core/distributed/param_and_grad_buffer.py @@ -3,19 +3,27 @@ import logging import math import os -import warnings +from contextlib import nullcontext from enum import Enum from typing import Dict, List, Optional import torch from torch.distributed import _coalescing_manager -from ..utils import is_float8tensor, log_on_each_pipeline_stage +from ..utils import is_float8tensor, is_torch_min_version, log_on_each_pipeline_stage from .distributed_data_parallel_config import DistributedDataParallelConfig logger = logging.getLogger(__name__) +if is_torch_min_version("1.13.0"): + dist_all_gather_func = torch.distributed.all_gather_into_tensor + dist_reduce_scatter_func = torch.distributed.reduce_scatter_tensor +else: + dist_all_gather_func = torch.distributed._all_gather_base + dist_reduce_scatter_func = torch.distributed._reduce_scatter_base + + class BufferType(Enum): """ Enumeration for buffer type. @@ -43,9 +51,9 @@ class _ParamAndGradBucket: Args: params: List of parameters whose gradients are collated in this bucket. - param_data: View in ParamAndGradBuffer.param_data that this bucket is responsible for. - grad_data: View in ParamAndGradBuffer.grad_data that this bucket is responsible for. - offset: Offset of this bucket's view in the larger ParamAndGradBuffer. + param_data: View in _ParamAndGradBuffer.param_data that this bucket is responsible for. + grad_data: View in _ParamAndGradBuffer.grad_data that this bucket is responsible for. + offset: Offset of this bucket's view in the larger _ParamAndGradBuffer. numel_unpadded: Number of unpadded elements in bucket. gradient_scaling_factor: This factor is utilized to scale gradients prior to their communication. Its application is twofold: it facilitates the averaging of gradients @@ -87,22 +95,29 @@ class _ParamAndGradBucketGroup: Args: buckets: A list of buckets. ddp_config: DistributedDataParallel config object. - data_parallel_group: Data-parallel process group. - data_parallel_world_size: World size using the data-parallel group group. + collective_group: intra_distributed_optimizer_instance_group if using distributed + optimizer, data_parallel_group if not. + collective_group_size: World size using the intra data-parallel group. """ def __init__( self, buckets: List[_ParamAndGradBucket], ddp_config: DistributedDataParallelConfig, - data_parallel_group: torch.distributed.ProcessGroup, - data_parallel_world_size: int, + collective_group: torch.distributed.ProcessGroup, + collective_group_size: int, ): self.buckets = buckets self.ddp_config = ddp_config - self.data_parallel_group = data_parallel_group - self.data_parallel_world_size = data_parallel_world_size - self.data_parallel_rank = torch.distributed.get_rank(group=data_parallel_group) + + if self.ddp_config.use_distributed_optimizer: + self.intra_distributed_optimizer_instance_group = collective_group + self.intra_distributed_optimizer_instance_size = collective_group_size + self.intra_distributed_optimizer_instance_rank = torch.distributed.get_rank( + group=collective_group + ) + else: + self.data_parallel_group = collective_group # State for bookkeeping: params is the set of parameters this bucket group is # responsible for, params_with_grad is the set of parameters with grads @@ -117,6 +132,10 @@ def __init__( self.next_param_gather_bucket_group = None + if self.ddp_config.num_distributed_optimizer_instances > 1: + self.inter_distributed_optimizer_instance_group = None + self.communication_stream = None + self.reset() self.param_gather_handle = None self.param_gather_dispatched = False @@ -168,15 +187,17 @@ def start_param_sync(self, force_sync: bool = False): async_op = self.ddp_config.overlap_param_gather and not force_sync # Coalesce communication kernels across buckets in the bucket group. - with _coalescing_manager(self.data_parallel_group, async_ops=async_op) as cm: + with _coalescing_manager( + self.intra_distributed_optimizer_instance_group, async_ops=async_op + ) as cm: for bucket in self.buckets: - local_data_view = shard_buffer(bucket.param_data, self.data_parallel_world_size)[ - self.data_parallel_rank - ] - torch.distributed._all_gather_base( + local_data_view = shard_buffer( + bucket.param_data, self.intra_distributed_optimizer_instance_size + )[self.intra_distributed_optimizer_instance_rank] + dist_all_gather_func( bucket.param_data, local_data_view, - group=self.data_parallel_group, + group=self.intra_distributed_optimizer_instance_group, async_op=async_op, ) if async_op: @@ -247,20 +268,51 @@ def start_grad_sync(self): if self.ddp_config.average_in_collective: reduce_op = torch.distributed.ReduceOp.AVG + # Stream synchronization logic of the CUDA streams that is + # implemented below for the gradient reduction within and across + # distributed optimizer instances. + + # Compute Stream - -------------Gradient Compute------------------- + # Comm. Stream - ------(wait for nccl)-----(wait for nccl)------- + # NCCL Stream - -------RS------ -------AR------ + # Use async communications only when overlap_grad_reduce is True. - async_op = self.ddp_config.overlap_grad_reduce + async_op = ( + self.ddp_config.overlap_grad_reduce + and self.ddp_config.num_distributed_optimizer_instances == 1 + ) + if ( + self.ddp_config.num_distributed_optimizer_instances > 1 + and self.ddp_config.overlap_grad_reduce + ): + # Assign a communication stream if we use partial DP DistOpt and we + # need to overlap communication + stream_context = torch.cuda.stream(self.communication_stream) + + # The RS/AR communication stream needs to wait for the default stream + # to complete its gradient computation before launching the next + # gradient reduction collective + self.communication_stream.wait_stream(torch.cuda.default_stream()) + else: + stream_context = nullcontext() + + if self.ddp_config.use_distributed_optimizer: + communication_group = self.intra_distributed_optimizer_instance_group + else: + communication_group = self.data_parallel_group + # Coalesce communication kernels across buckets in the bucket group. - with _coalescing_manager(self.data_parallel_group, async_ops=async_op) as cm: + with stream_context, _coalescing_manager(communication_group, async_ops=async_op) as cm: for bucket in self.buckets: if self.ddp_config.use_distributed_optimizer: - local_data_view = shard_buffer(bucket.grad_data, self.data_parallel_world_size)[ - self.data_parallel_rank - ] - torch.distributed._reduce_scatter_base( + local_data_view = shard_buffer( + bucket.grad_data, self.intra_distributed_optimizer_instance_size + )[self.intra_distributed_optimizer_instance_rank] + dist_reduce_scatter_func( local_data_view, bucket.grad_data, op=reduce_op, - group=self.data_parallel_group, + group=self.intra_distributed_optimizer_instance_group, async_op=async_op, ) else: @@ -270,6 +322,29 @@ def start_grad_sync(self): group=self.data_parallel_group, async_op=async_op, ) + + # When enabling partial DP domain DistOpt, we need to All-Reduce across all partial domains + if ( + self.ddp_config.use_distributed_optimizer + and self.ddp_config.num_distributed_optimizer_instances > 1 + ): + + # Create a new coalescing facility for the inter partial DP-AllReduce here + with stream_context, _coalescing_manager( + self.inter_distributed_optimizer_instance_group, async_ops=async_op + ) as cm: + for bucket in self.buckets: + local_data_view = shard_buffer( + bucket.grad_data, self.intra_distributed_optimizer_instance_size + )[self.intra_distributed_optimizer_instance_rank] + + torch.distributed.all_reduce( + local_data_view, + op=reduce_op, + group=self.inter_distributed_optimizer_instance_group, + async_op=async_op, + ) + if async_op: self.grad_reduce_handle = cm else: @@ -294,6 +369,11 @@ def finish_grad_sync(self): if not self.ddp_config.overlap_grad_reduce: self.start_grad_sync() return + # When using partial DP DistOpt, we don't need to sync as we launch comms on a separate + # communication stream + if self.ddp_config.num_distributed_optimizer_instances > 1: + torch.cuda.default_stream().wait_stream(self.communication_stream) + return assert self.grad_reduce_handle is not None, ( f'Communication call has not been issued for this bucket ' f'({len(self.params_with_grad)}/{len(self.params)} params have grad available)' @@ -619,7 +699,7 @@ def _new_bucket( assert end_index % self.data_parallel_world_size == 0 assert (start_index, end_index) == self.bucket_indices[bucket_id] - # Get appropriate view into global ParamAndGradBuffer. + # Get appropriate view into global _ParamAndGradBuffer. bucketed_param_data = None if self.param_data is not None: bucketed_param_data = self._get( @@ -756,14 +836,3 @@ def partition_buckets( ) ) return bucket_groups - - -# For backwards compatibility. ParamAndGradBuffer will be deprecated in future release. -# _ParamAndGradBuffer is not intended to be consumed directly by external code. -class ParamAndGradBuffer(_ParamAndGradBuffer): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - warnings.warn( - "`ParamAndGradBuffer` will be deprecated in a future release, and is not " - "intended to be used by external code." - ) diff --git a/megatron/core/distributed/torch_fully_sharded_data_parallel.py b/megatron/core/distributed/torch_fully_sharded_data_parallel.py new file mode 100644 index 000000000..6d2e84e77 --- /dev/null +++ b/megatron/core/distributed/torch_fully_sharded_data_parallel.py @@ -0,0 +1,115 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from typing import List + +import torch + +try: + from torch.distributed import DeviceMesh + from torch.distributed._composable.fsdp import fully_shard + + HAVE_FSDP = True +except ImportError: + HAVE_FSDP = False + +from .. import parallel_state, tensor_parallel +from ..models.common.embeddings.language_model_embedding import LanguageModelEmbedding +from ..models.common.embeddings.rotary_pos_embedding import RotaryEmbedding +from ..transformer.transformer_config import TransformerConfig +from ..transformer.transformer_layer import TransformerLayer +from .data_parallel_base import _BaseDataParallel + + +class TorchFullyShardedDataParallel(_BaseDataParallel): + """ + Enables fully sharded data parallelism by wrapping the given model with + the PyTorch FSDP2 API: + https://github.com/pytorch/torchtitan/blob/main/docs/fsdp.md + To utilize this class, PyTorch version >= 2.4.0 is required. + + Args: + config: Transformer config object. + module: Underlying model. + sub_modules_to_wrap: List of sub_modules to shard with FSDP. + Parameters within each sub_module will be all-gathered just-in-time. + The default list includes the following submodules derived from the + GPT model architecture: + TransformerLayer (all Transformer layers) + LanguageModelEmbedding (initial embedding layer) + RotaryEmbedding (initial RoPE layer) + tensor_parallel.ColumnParallelLinear (final output layer) + """ + + def __init__( + self, + config: TransformerConfig, + module: torch.nn.Module, + sub_modules_to_wrap: List[torch.nn.Module] = [ + TransformerLayer, + LanguageModelEmbedding, + RotaryEmbedding, + tensor_parallel.ColumnParallelLinear, + ], + **kwargs + ): + + assert ( + HAVE_FSDP + ), 'TorchFullyShardedDataParallel requires PyTorch >= 2.4.0 with FSDP 2 support.' + + super().__init__(config=config, module=module) + self.data_parallel_group = parallel_state.get_data_parallel_group( + with_context_parallel=True + ) + + mesh = DeviceMesh.from_group(self.data_parallel_group, "cuda") + + kwargs = {"mesh": mesh} + + def save_custom_attrs(module): + custom_attrs = {} + for name, param in module.named_parameters(): + attrs = vars(param) + custom_attrs[name] = {k: v for k, v in attrs.items()} + return custom_attrs + + def restore_custom_attrs(module, custom_attrs): + for name, param in module.named_parameters(): + if name in custom_attrs: + for attr_name, attr_value in custom_attrs[name].items(): + setattr(param, attr_name, attr_value) + + # Save the custom attributes on Parameters before FSDP overwrites them. + # See https://github.com/pytorch/pytorch/issues/136929. + attrs = save_custom_attrs(self.module) + + prev_module = None + for sub_module in self.module.modules(): + # Wrap individual submodules to fetch parameters just-in-time rather than + # conservatively fetching all parameters at the start of each iteration. + # See https://github.com/pytorch/pytorch/issues/114299. + if any( + isinstance(sub_module, sub_module_to_wrap) + for sub_module_to_wrap in sub_modules_to_wrap + ): + fully_shard(sub_module, **kwargs) + + # Explicitly set the FSDP backward prefetch schedule to prevent activation + # recomputation from disrupting the automatically generated default schedule. + if config.recompute_granularity is not None: + sub_module.set_modules_to_backward_prefetch( + [prev_module] if prev_module else [] + ) + prev_module = sub_module + + # Wrap the root module as required by the FSDP API. + # See https://github.com/pytorch/pytorch/issues/114299. + fully_shard(self.module, **kwargs) + + restore_custom_attrs(self.module, attrs) + + def load_state_dict(self, state_dict, strict=True): + """ + No-op because tensors are already loaded in-place by + `_load_base_checkpoint` with FSDP2.""" + pass diff --git a/megatron/core/export/trtllm/engine_builder/trtllm_engine_builder.py b/megatron/core/export/trtllm/engine_builder/trtllm_engine_builder.py index e729fec41..df8ea627b 100644 --- a/megatron/core/export/trtllm/engine_builder/trtllm_engine_builder.py +++ b/megatron/core/export/trtllm/engine_builder/trtllm_engine_builder.py @@ -38,6 +38,7 @@ def build_and_save_engine( multiple_profiles: bool = False, gpt_attention_plugin: str = "auto", gemm_plugin: str = "auto", + reduce_fusion: bool = False, ): """Method to build the TRTLLM Engine @@ -90,6 +91,7 @@ def build_and_save_engine( plugin_config.remove_input_padding = remove_input_padding plugin_config.use_paged_context_fmha = paged_context_fmha plugin_config.multiple_profiles = multiple_profiles + plugin_config.reduce_fusion = reduce_fusion if max_seq_len is None: max_seq_len = max_input_len + max_output_len @@ -137,12 +139,16 @@ def build_and_save_engine( build_config.lora_config = lora_config model = model_cls.from_config(trtllm_model_config) + model = optimize_model( model, use_parallel_embedding=trtllm_model_config.use_parallel_embedding, share_embedding_table=trtllm_model_config.share_embedding_table, ) + preprocess_weights(trtllm_model_weights, trtllm_model_config) model.load(trtllm_model_weights) engine = build_trtllm(model, build_config) + engine.save(engine_dir) + return engine diff --git a/megatron/core/export/trtllm/model_to_trllm_mapping/default_conversion_dict.py b/megatron/core/export/trtllm/model_to_trllm_mapping/default_conversion_dict.py index cad931503..7a1401fb2 100644 --- a/megatron/core/export/trtllm/model_to_trllm_mapping/default_conversion_dict.py +++ b/megatron/core/export/trtllm/model_to_trllm_mapping/default_conversion_dict.py @@ -1,18 +1,36 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from megatron.core.export.trtllm.trtllm_layers import TRTLLMLayers -from megatron.core.export.model_type import ModelType -from megatron.core.export.trtllm.model_to_trllm_mapping.falcon_model import FALCON_DICT -from megatron.core.export.trtllm.model_to_trllm_mapping.gemma_model import GEMMA_DICT -from megatron.core.export.trtllm.model_to_trllm_mapping.gpt_model import GPT_DICT -from megatron.core.export.trtllm.model_to_trllm_mapping.gpt_next_model import GPT_NEXT_DICT -from megatron.core.export.trtllm.model_to_trllm_mapping.llama_model import LLAMA_DICT -from megatron.core.export.trtllm.model_to_trllm_mapping.starcoder_model import STARCODER_DICT - +# Map the most common mcore layers to TRTLLM layers +# pylint: disable=line-too-long DEFAULT_CONVERSION_DICT = { - ModelType.llama: LLAMA_DICT, - ModelType.falcon: FALCON_DICT, - ModelType.gemma: GEMMA_DICT, - ModelType.starcoder: STARCODER_DICT, - ModelType.gpt: GPT_DICT, - ModelType.gptnext: GPT_NEXT_DICT, + # INPUT + 'embedding.word_embeddings.weight': TRTLLMLayers.vocab_embedding, + 'embedding.position_embeddings.weight': TRTLLMLayers.position_embedding, + # ATTENTION + 'decoder.layers.input_layernorm.weight': TRTLLMLayers.input_layernorm_weight, + 'decoder.layers.input_layernorm.bias': TRTLLMLayers.input_layernorm_bias, + 'decoder.layers.self_attention.linear_qkv.weight': TRTLLMLayers.attention_qkv_weight, + 'decoder.layers.self_attention.linear_qkv.bias': TRTLLMLayers.attention_qkv_bias, + 'decoder.layers.self_attention.linear_proj.weight': TRTLLMLayers.attention_dense_weight, + 'decoder.layers.self_attention.linear_proj.bias': TRTLLMLayers.attention_dense_bias, + # MLP + 'decoder.layers.pre_mlp_layernorm.weight': TRTLLMLayers.post_layernorm_weight, + 'decoder.layers.pre_mlp_layernorm.bias': TRTLLMLayers.post_layernorm_bias, + 'decoder.layers.mlp.linear_fc1.weight': TRTLLMLayers.mlp_fc_weight, + 'decoder.layers.mlp.linear_fc1.bias': TRTLLMLayers.mlp_fc_bias, + 'decoder.layers.mlp.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight, + 'decoder.layers.mlp.linear_fc2.bias': TRTLLMLayers.mlp_projection_bias, + # FINAL LAYER NORM + 'decoder.final_layernorm.weight': TRTLLMLayers.final_layernorm_weight, + 'decoder.final_layernorm.bias': TRTLLMLayers.final_layernorm_bias, + # OUTPUT LAYER + 'output_layer.weight': TRTLLMLayers.lm_head, + # TRANSFORMER ENGINE LAYER NORM + # ATTENTION + 'decoder.layers.self_attention.linear_qkv.layer_norm_weight': TRTLLMLayers.input_layernorm_weight, + 'decoder.layers.self_attention.linear_qkv.layer_norm_bias': TRTLLMLayers.input_layernorm_bias, + # MLP + 'decoder.layers.mlp.linear_fc1.layer_norm_weight': TRTLLMLayers.post_layernorm_weight, + 'decoder.layers.mlp.linear_fc1.layer_norm_bias': TRTLLMLayers.post_layernorm_bias, } diff --git a/megatron/core/export/trtllm/model_to_trllm_mapping/falcon_model.py b/megatron/core/export/trtllm/model_to_trllm_mapping/falcon_model.py deleted file mode 100644 index d1469d02b..000000000 --- a/megatron/core/export/trtllm/model_to_trllm_mapping/falcon_model.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.export.trtllm.trtllm_layers import TRTLLMLayers - -# pylint: disable=line-too-long -FALCON_DICT = { - # INPUT - 'embedding.word_embeddings.weight': TRTLLMLayers.vocab_embedding, - 'embedding.position_embeddings.weight': TRTLLMLayers.position_embedding, - # ATTENTION - 'decoder.layers.input_layernorm.weight': TRTLLMLayers.input_layernorm_weight, - 'decoder.layers.input_layernorm.bias': TRTLLMLayers.input_layernorm_bias, - 'decoder.layers.self_attention.linear_qkv.weight': TRTLLMLayers.attention_qkv_weight, - 'decoder.layers.self_attention.linear_qkv.layer_norm_weight': TRTLLMLayers.input_layernorm_weight, - 'decoder.layers.self_attention.linear_proj.weight': TRTLLMLayers.attention_dense_weight, - # MLP - 'decoder.layers.pre_mlp_layernorm.weight': TRTLLMLayers.post_layernorm_weight, - 'decoder.layers.pre_mlp_layernorm.bias': TRTLLMLayers.post_layernorm_bias, - 'decoder.layers.mlp.linear_fc1.weight': TRTLLMLayers.mlp_fc_weight, - 'decoder.layers.mlp.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight, - # FINAL LAYER NORM - 'decoder.final_layernorm.weight': TRTLLMLayers.final_layernorm_weight, - 'decoder.final_layernorm.bias': TRTLLMLayers.final_layernorm_bias, - # OUTPUT LAYER - 'output_layer.weight': TRTLLMLayers.lm_head, -} diff --git a/megatron/core/export/trtllm/model_to_trllm_mapping/gemma_model.py b/megatron/core/export/trtllm/model_to_trllm_mapping/gemma_model.py deleted file mode 100644 index 47a021170..000000000 --- a/megatron/core/export/trtllm/model_to_trllm_mapping/gemma_model.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.export.trtllm.trtllm_layers import TRTLLMLayers - -# pylint: disable=line-too-long -GEMMA_DICT = { - # INPUT - 'embedding.word_embeddings.weight': TRTLLMLayers.vocab_embedding, - # ATTENTION - 'decoder.layers.self_attention.linear_qkv.weight': TRTLLMLayers.attention_qkv_weight, - 'decoder.layers.self_attention.linear_qkv.layer_norm_weight': TRTLLMLayers.input_layernorm_weight, - 'decoder.layers.self_attention.linear_proj.weight': TRTLLMLayers.attention_dense_weight, - # MLP - 'decoder.layers.mlp.linear_fc1.weight': TRTLLMLayers.mlp_fc_weight, - 'decoder.layers.mlp.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight, - 'decoder.layers.mlp.linear_fc1.layer_norm_weight': TRTLLMLayers.post_layernorm_weight, - # FINAL LAYER NORM - 'decoder.final_layernorm.weight': TRTLLMLayers.final_layernorm_weight, - # OUTPUT LAYER - 'output_layer.weight': TRTLLMLayers.lm_head, -} diff --git a/megatron/core/export/trtllm/model_to_trllm_mapping/gpt_model.py b/megatron/core/export/trtllm/model_to_trllm_mapping/gpt_model.py deleted file mode 100644 index eda27600c..000000000 --- a/megatron/core/export/trtllm/model_to_trllm_mapping/gpt_model.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.export.trtllm.trtllm_layers import TRTLLMLayers - -GPT_DICT = { - # INPUT - 'embedding.word_embeddings.weight': TRTLLMLayers.vocab_embedding, - 'embedding.position_embeddings.weight': TRTLLMLayers.position_embedding, - # ATTENTION - 'decoder.layers.input_layernorm.weight': TRTLLMLayers.input_layernorm_weight, - 'decoder.layers.input_layernorm.bias': TRTLLMLayers.input_layernorm_bias, - 'decoder.layers.self_attention.linear_qkv.weight': TRTLLMLayers.attention_qkv_weight, - 'decoder.layers.self_attention.linear_qkv.bias': TRTLLMLayers.attention_qkv_bias, - 'decoder.layers.self_attention.linear_proj.weight': TRTLLMLayers.attention_dense_weight, - 'decoder.layers.self_attention.linear_proj.bias': TRTLLMLayers.attention_dense_bias, - # MLP - 'decoder.layers.pre_mlp_layernorm.weight': TRTLLMLayers.post_layernorm_weight, - 'decoder.layers.pre_mlp_layernorm.bias': TRTLLMLayers.post_layernorm_bias, - 'decoder.layers.mlp.linear_fc1.weight': TRTLLMLayers.mlp_fc_weight, - 'decoder.layers.mlp.linear_fc1.bias': TRTLLMLayers.mlp_fc_bias, - 'decoder.layers.mlp.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight, - 'decoder.layers.mlp.linear_fc2.bias': TRTLLMLayers.mlp_projection_bias, - # FINAL LAYER NORM - 'decoder.final_layernorm.weight': TRTLLMLayers.final_layernorm_weight, - 'decoder.final_layernorm.bias': TRTLLMLayers.final_layernorm_bias, - # OUTPUT LAYER - 'output_layer.weight': TRTLLMLayers.lm_head, -} diff --git a/megatron/core/export/trtllm/model_to_trllm_mapping/gpt_next_model.py b/megatron/core/export/trtllm/model_to_trllm_mapping/gpt_next_model.py deleted file mode 100644 index ac5f84ef1..000000000 --- a/megatron/core/export/trtllm/model_to_trllm_mapping/gpt_next_model.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.export.trtllm.trtllm_layers import TRTLLMLayers - -# pylint: disable=line-too-long -GPT_NEXT_DICT = { - # INPUT - 'embedding.word_embeddings.weight': TRTLLMLayers.vocab_embedding, - # ATTENTION - 'decoder.layers.self_attention.linear_proj.weight': TRTLLMLayers.attention_dense_weight, - 'decoder.layers.self_attention.linear_qkv.layer_norm_weight': TRTLLMLayers.input_layernorm_weight, - 'decoder.layers.self_attention.linear_qkv.layer_norm_bias': TRTLLMLayers.input_layernorm_bias, - 'decoder.layers.self_attention.linear_qkv.weight': TRTLLMLayers.attention_qkv_weight, - # MLP - 'decoder.layers.mlp.linear_fc1.layer_norm_weight': TRTLLMLayers.post_layernorm_weight, - 'decoder.layers.mlp.linear_fc1.layer_norm_bias': TRTLLMLayers.post_layernorm_bias, - 'decoder.layers.mlp.linear_fc1.weight': TRTLLMLayers.mlp_fc_weight, - 'decoder.layers.mlp.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight, - # FINAL LAYER NORM - 'decoder.final_layernorm.weight': TRTLLMLayers.final_layernorm_weight, - 'decoder.final_layernorm.bias': TRTLLMLayers.final_layernorm_bias, - # OUTPUT LAYER - 'output_layer.weight': TRTLLMLayers.lm_head, -} diff --git a/megatron/core/export/trtllm/model_to_trllm_mapping/llama_model.py b/megatron/core/export/trtllm/model_to_trllm_mapping/llama_model.py deleted file mode 100644 index 5fd206708..000000000 --- a/megatron/core/export/trtllm/model_to_trllm_mapping/llama_model.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.export.trtllm.trtllm_layers import TRTLLMLayers - -# pylint: disable=line-too-long -LLAMA_DICT = { - # INPUT - 'embedding.word_embeddings.weight': TRTLLMLayers.vocab_embedding, - 'embedding.position_embeddings.weight': TRTLLMLayers.position_embedding, - # ATTENTION - 'decoder.layers.self_attention.linear_qkv.weight': TRTLLMLayers.attention_qkv_weight, - 'decoder.layers.self_attention.linear_qkv.layer_norm_weight': TRTLLMLayers.input_layernorm_weight, - 'decoder.layers.self_attention.linear_proj.weight': TRTLLMLayers.attention_dense_weight, - # MLP - 'decoder.layers.mlp.linear_fc1.weight': TRTLLMLayers.mlp_fc_weight, - 'decoder.layers.mlp.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight, - 'decoder.layers.mlp.linear_fc1.layer_norm_weight': TRTLLMLayers.post_layernorm_weight, - # FINAL LAYER NORM - 'decoder.final_layernorm.weight': TRTLLMLayers.final_layernorm_weight, - # OUTPUT LAYER - 'output_layer.weight': TRTLLMLayers.lm_head, -} diff --git a/megatron/core/export/trtllm/model_to_trllm_mapping/starcoder_model.py b/megatron/core/export/trtllm/model_to_trllm_mapping/starcoder_model.py deleted file mode 100644 index dce61d26c..000000000 --- a/megatron/core/export/trtllm/model_to_trllm_mapping/starcoder_model.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.export.trtllm.trtllm_layers import TRTLLMLayers - -# pylint: disable=line-too-long -STARCODER_DICT = { - # INPUT - 'embedding.word_embeddings.weight': TRTLLMLayers.vocab_embedding, - # ATTENTION - 'decoder.layers.input_layernorm.weight': TRTLLMLayers.input_layernorm_weight, - 'decoder.layers.input_layernorm.bias': TRTLLMLayers.input_layernorm_bias, - 'decoder.layers.self_attention.linear_qkv.weight': TRTLLMLayers.attention_qkv_weight, - 'decoder.layers.self_attention.linear_qkv.bias': TRTLLMLayers.attention_qkv_bias, - 'decoder.layers.self_attention.linear_qkv.layer_norm_weight': TRTLLMLayers.input_layernorm_weight, - 'decoder.layers.self_attention.linear_qkv.layer_norm_bias': TRTLLMLayers.input_layernorm_bias, - 'decoder.layers.self_attention.linear_proj.weight': TRTLLMLayers.attention_dense_weight, - 'decoder.layers.self_attention.linear_proj.bias': TRTLLMLayers.attention_dense_bias, - # MLP - 'decoder.layers.mlp.linear_fc1.weight': TRTLLMLayers.mlp_fc_weight, - 'decoder.layers.mlp.linear_fc1.bias': TRTLLMLayers.mlp_fc_bias, - 'decoder.layers.mlp.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight, - 'decoder.layers.mlp.linear_fc2.bias': TRTLLMLayers.mlp_projection_bias, - 'decoder.layers.mlp.linear_fc1.layer_norm_weight': TRTLLMLayers.post_layernorm_weight, - 'decoder.layers.mlp.linear_fc1.layer_norm_bias': TRTLLMLayers.post_layernorm_bias, - # FINAL LAYER NORM - 'decoder.final_layernorm.weight': TRTLLMLayers.final_layernorm_weight, - 'decoder.final_layernorm.bias': TRTLLMLayers.final_layernorm_bias, - # OUTPUT LAYER - 'output_layer.weight': TRTLLMLayers.lm_head, -} diff --git a/megatron/core/export/trtllm/trtllm_helper.py b/megatron/core/export/trtllm/trtllm_helper.py index d8bef18b3..45093b673 100644 --- a/megatron/core/export/trtllm/trtllm_helper.py +++ b/megatron/core/export/trtllm/trtllm_helper.py @@ -1,6 +1,9 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from typing import Union + import tensorrt_llm +import torch from tensorrt_llm.functional import non_gated_version from tensorrt_llm.layers import MoeConfig @@ -13,6 +16,7 @@ ) from megatron.core.export.trtllm.trt_model_config import TRT_MODEL_CONFIG from megatron.core.export.trtllm.trt_model_type import TRT_MODEL_TYPE_STRING +from megatron.core.export.trtllm.trtllm_layers import TRTLLMLayers # pylint: disable=line-too-long from megatron.core.export.trtllm.trtllm_weights_converter.distributed_trtllm_model_weights_converter import ( @@ -52,7 +56,7 @@ def __init__( Args: transformer_config (TransformerConfig): The transformer config model_type (ModelType): The type of the input model. Enum (megatron.core.export.model_type.ModelType) - conversion_dict (dict, optional): A conversion dictionary that will map your model layer names to trtllm equivalent layer names. Sample dictionaries are given megatron/core/export/model_mapping. NOTE: Ingore layer numbers in the model layer names. (e.g) decoder.layers.0.attention_qkv.weight will be decoder.layers.attention_qkv.weight in the mapping dictionary. Defaults to {}. + trtllm_conversion_dict (dict, optional): A conversion dictionary that will map your model layer names to trtllm equivalent layer names. Default dictionary is given megatron/core/export/model_to_trtllm_mapping. This dict is merged into the default dict. NOTE: Ignore layer numbers in the model layer names. (e.g) decoder.layers.0.attention_qkv.weight will be decoder.layers.attention_qkv.weight in the mapping dictionary. Defaults to {}. position_embedding_type (str, optional): The position embedding type. Defaults to None. max_position_embeddings (int, optional): Max posistion embeddings value. Defaults to None. rotary_percentage (int, optional): The rotary percentage if using rope embedding. Defaults to 1.0. @@ -67,7 +71,7 @@ def __init__( self.transformer_config = transformer_config self.model_type = model_type - self.trtllm_conversion_dict = DEFAULT_CONVERSION_DICT[model_type] + self.trtllm_conversion_dict = DEFAULT_CONVERSION_DICT.copy() self.trtllm_conversion_dict.update(trtllm_conversion_dict) assert position_embedding_type in [ 'learned_absolute', @@ -83,6 +87,7 @@ def __init__( self.seq_len_interpolation_factor = seq_len_interpolation_factor self.moe_renorm_mode = moe_renorm_mode self.share_embeddings_and_output_weights = share_embeddings_and_output_weights + self.weights_converter = None def _get_trtllm_config( self, @@ -91,6 +96,8 @@ def _get_trtllm_config( gpus_per_node: int, vocab_size_padded: int, dtype: DataType, + fp8_quantized: bool = False, + fp8_kvcache: bool = False, ): """Get TRTLLM Config @@ -136,7 +143,10 @@ def _get_trtllm_config( 'use_parallel_embedding': export_config.use_parallel_embedding, 'embedding_sharding_dim': 0, 'share_embedding_table': export_config.use_embedding_sharing, - 'quantization': {'quant_algo': None, 'kv_cache_quant_algo': None}, + 'quantization': { + 'quant_algo': "FP8" if fp8_quantized else None, + 'kv_cache_quant_algo': "FP8" if fp8_kvcache else None, + }, 'bias': self.transformer_config.add_bias_linear, 'apply_query_key_layer_scaling': False, 'rotary_pct': self.rotary_percentage, @@ -172,6 +182,59 @@ def _get_trtllm_config( config_cls = TRT_MODEL_CONFIG[self.model_type] return config_cls(**config) + def _load_scaling_factors(self, model_state_dict: dict) -> dict: + """Loads scaling factors from model state dictionary. + + Args: + model_state_dict (dict): Model state dictionary + Returns: + dict: Maps scaling factor key, to its value and the inverse. The inverse is used for casting the quantized weights. + """ + weight_scaling_suffix = '.weights_scaling_factor' + activation_scaling_suffix = '.activation_scaling_factor' + mock_scales_dict = {} + extra_state_infix = "._extra_state" + mock_suffix = '.weight' + + for key, val in model_state_dict.items(): + if extra_state_infix in key and not key.endswith("core_attention._extra_state"): + mock_key = key.split(extra_state_infix)[0] + mock_suffix + mock_scales_dict[mock_key] = val + + mock_scales_dict = TRTLLMLayers.rename_input_layer_names_to_trtllm_layer_names( + mock_scales_dict, self.trtllm_conversion_dict, False + ) + split_gated_activation = self.activation in ["swiglu", "geglu", "fast-swiglu", "fast-geglu"] + + scales = {} + for key, val in mock_scales_dict.items(): + if val is None: + continue + + val.seek(0) + extra_states = torch.load(val) + + activation_scaling_factor_key = key.replace(mock_suffix, activation_scaling_suffix) + weight_scaling_factor_key = key.replace(mock_suffix, weight_scaling_suffix) + + activation_scales = { + 'trt_llm_scale': extra_states['scale_inv_fwd'][0].view(1), + 'weight_multiplier': extra_states['scale_fwd'][0].view(1), + } + + weight_scales = { + 'trt_llm_scale': extra_states['scale_inv_fwd'][1].view(1), + 'weight_multiplier': extra_states['scale_fwd'][1].view(1), + } + + scales[activation_scaling_factor_key] = activation_scales + scales[weight_scaling_factor_key] = weight_scales + if split_gated_activation and ".mlp.fc" in key: + scales[activation_scaling_factor_key.replace("fc", "gate")] = activation_scales + scales[weight_scaling_factor_key.replace("fc", "gate")] = weight_scales + + return scales + # pylint: disable=line-too-long def get_trtllm_pretrained_config_and_model_weights( self, @@ -182,6 +245,8 @@ def get_trtllm_pretrained_config_and_model_weights( vocab_size: int = None, gpus_per_node: int = None, state_dict_split_by_layer_numbers: bool = True, + fp8_quantized: bool = False, + fp8_kvcache: bool = False, ): """Get TRTLLM Config and Converted Model Weights @@ -192,8 +257,7 @@ def get_trtllm_pretrained_config_and_model_weights( Same thing happens with the pretrained config Args: - model_state_dict (dict, optional): The input model state dictionary (Entire model state loaded on CPU). Used only when on device conversion is set to False. Defaults to None. - False, or the model state dict of each GPU in the case of on_device conversion) + model_state_dict (dict): The input model state dictionary (Entire model state loaded on CPU) or the model state dict of each GPU in the case of on_device conversion) export_config (ExportConfig): The export config used to define inference tp size, pp size etc. Used only for on device conversion. dtype (DataType): The data type of model precision on_device_distributed_conversion (bool, optional): Convert on gpus in distributed setting. This assumes that the model state dict is sharded according to required inference model parallelism and that each gpu gets its part of the model state dict . Defaults to False. @@ -204,22 +268,34 @@ def get_trtllm_pretrained_config_and_model_weights( Returns: Two lists . First list of trtllm converted model weights(Either on device, or a list of weights for each gpu) and the trtllm_model_configs. """ + assert model_state_dict is not None, "Model state dict is not set" + + scales = self._load_scaling_factors(model_state_dict) if fp8_quantized else {} + model_state_dict = {k: v for k, v in model_state_dict.items() if 'extra_state' not in k} + if on_device_distributed_conversion: - assert (vocab_size is not None, "Need to pass in vocab_size for on device") - assert ( - self.model_type in [ModelType.gpt, ModelType.gptnext, ModelType.llama], - "On device conversion only supported for model types gptnext and llama", - ) + assert vocab_size is not None, "Need to pass in vocab_size for on device" + supported_model = self.model_type in [ModelType.gpt, ModelType.gptnext, ModelType.llama] assert ( - export_config is None, - "Export config is inferred based on the parallel state. If you want to set inference tp 2, then load the model with this TP2 setting and just pass in the model state dict. ", + supported_model + ), "On device conversion only supported for model types gptnext and llama" + assert export_config is None, ( + "Export config is inferred based on the parallel state. " + "If you want to set inference tp 2, then load the model with this TP2 setting and just pass in the model state dict." ) + assert ( gpus_per_node is not None ), "Need to pass in gpus_per_node for on device conversion" trtllm_model_weights_on_device, trtllm_model_config = ( self._get_trtllm_pretrained_config_and_model_weights_in_distributed_setting( - model_state_dict, dtype, vocab_size, gpus_per_node + model_state_dict, + dtype, + vocab_size, + gpus_per_node, + scales, + fp8_quantized, + fp8_kvcache, ) ) return [trtllm_model_weights_on_device], [trtllm_model_config] @@ -238,13 +314,48 @@ def get_trtllm_pretrained_config_and_model_weights( dtype, gpus_per_node, state_dict_split_by_layer_numbers, + scales, + fp8_quantized, + fp8_kvcache, ) ) return trtllm_model_weights_list, trtllm_model_config_list + def _add_scales_to_converter( + self, + converter: Union[ + SingleDeviceTRTLLMModelWeightsConverter, DistributedTRTLLMModelWeightsConverter + ], + scales: dict, + fp8_kvcache: bool, + ): + """Adds scaling factors to the distributed and single device converters. + + Args: + converter (ModelWeightConverter): Converter, holding the TRT-LLM model weights. + scales (dict): Dictionary holding TRT-LLM scaling factors + fp8_kvcache (bool): If true, creates scaling factors (equal to 1.0) for kv_cache quantization + """ + trt_scales = {key: scale['trt_llm_scale'] for key, scale in scales.items()} + kv_scales = {} + if fp8_kvcache: + for key in converter.trtllm_model_weights: + if '.attention.qkv.weight' in key: + kv_key = key.split('.qkv')[0] + '.kv_cache_scaling_factor' + kv_scales[kv_key] = torch.tensor([1.0], dtype=torch.float32) + + converter.trtllm_model_weights |= trt_scales | kv_scales + def _get_trtllm_pretrained_config_and_model_weights_in_distributed_setting( - self, model_state_dict: dict, dtype: DataType, vocab_size: int, gpus_per_node: int + self, + model_state_dict: dict, + dtype: DataType, + vocab_size: int, + gpus_per_node: int, + scales: dict, + fp8_quantized: bool, + fp8_kvcache: bool, ): """Get the TRTLLM Pretrained config and model weights list in a distributed setting @@ -257,26 +368,30 @@ def _get_trtllm_pretrained_config_and_model_weights_in_distributed_setting( dtype (DataType): The data type or model precision vocab_size (int): Tokenizer vocab size gpus_per_node (int): The number of gpus per node - + scales (dict): Dictionary with fp8 scaling factors + fp8_quantized (bool): True for fp8 checkpoint export + fp8_kvcache (bool): True for fp8 KV-cache quantization Returns: Two lists . List of trtllm converted model weights and trtllm model configs (One for each gpu). """ - distributed_trtllm_model_weights_converter = DistributedTRTLLMModelWeightsConverter( + self.weights_converter = DistributedTRTLLMModelWeightsConverter( transformer_config=self.transformer_config, dtype=dtype, multi_query_mode=self.multi_query_mode, activation=self.activation, + scales=scales, ) - distributed_trtllm_model_weights_converter.convert( + self.weights_converter.convert( model_state_dict=model_state_dict, trtllm_conversion_dict=self.trtllm_conversion_dict, tokenizer_vocab_size=vocab_size, ) + self._add_scales_to_converter(self.weights_converter, scales, fp8_kvcache) export_config = ExportConfig( - inference_pp_size=distributed_trtllm_model_weights_converter.inference_pp_size, - inference_tp_size=distributed_trtllm_model_weights_converter.inference_tp_size, + inference_pp_size=self.weights_converter.inference_pp_size, + inference_tp_size=self.weights_converter.inference_tp_size, use_parallel_embedding=True, use_embedding_sharing=self.share_embeddings_and_output_weights, ) @@ -289,12 +404,13 @@ def _get_trtllm_pretrained_config_and_model_weights_in_distributed_setting( gpus_per_node=gpus_per_node, vocab_size_padded=vocab_size, dtype=dtype, + fp8_quantized=fp8_quantized, + fp8_kvcache=fp8_kvcache, ) model_parallel_rank = ( - distributed_trtllm_model_weights_converter.pp_rank - * distributed_trtllm_model_weights_converter.inference_tp_size - + distributed_trtllm_model_weights_converter.tp_rank + self.weights_converter.pp_rank * self.weights_converter.inference_tp_size + + self.weights_converter.tp_rank ) trtllm_model_config.mapping = tensorrt_llm.Mapping( @@ -304,15 +420,18 @@ def _get_trtllm_pretrained_config_and_model_weights_in_distributed_setting( pp_size=export_config.inference_pp_size, ) - return distributed_trtllm_model_weights_converter.trtllm_model_weights, trtllm_model_config + return self.weights_converter.trtllm_model_weights, trtllm_model_config def _get_trtllm_pretrained_config_and_model_weights_list_on_single_device( self, export_config: ExportConfig, model_state_dict: dict, dtype: DataType, - gpus_per_node=None, - state_dict_split_by_layer_numbers=True, + gpus_per_node, + state_dict_split_by_layer_numbers, + scales: dict, + fp8_quantized: bool, + fp8_kvcache: bool, ): """Get the TRTLLM Pretrained config and model weights list (one per gpu rank) on single device (CPU/GPU) @@ -324,6 +443,9 @@ def _get_trtllm_pretrained_config_and_model_weights_list_on_single_device( dtype (DataType): The data type or model precision gpus_per_node (int, optional): Number of gpus per node state_dict_split_by_layer_numbers (bool, optional): Are the model layers split by layer numbers in state dict. For example : mlp.fc1.weight can be represented like mlp.fc1.weight of shape [num_layers, hidden_dim, ffn_hidden_dim]} or it can be like mlp.fc1.layers.0.weight of shape [hidden_dim, ffn_hidden_dim], then mlp.fc1.layers.1.weight ... for all layers. If you use represenation 2 set this to True. Defaults to True + scales (dict): Dictionary with fp8 scaling factors + fp8_quantized (bool): True for fp8 checkpoint export + fp8_kvcache (bool): True for fp8 KV-cache quantization Returns: Two lists . List of trtllm converted model weights and trtllm model configs (One for each gpu). @@ -331,21 +453,24 @@ def _get_trtllm_pretrained_config_and_model_weights_list_on_single_device( trtllm_model_configs_list = [] trtllm_model_weights_list = [] - single_device_trtllm_model_weights_converter = SingleDeviceTRTLLMModelWeightsConverter( + self.weights_converter = SingleDeviceTRTLLMModelWeightsConverter( export_config=export_config, transformer_config=self.transformer_config, dtype=dtype, activation=self.activation, multi_query_mode=self.multi_query_mode, + scales=scales, ) # Convert the input model state dict to trtllm model weights dictionary - single_device_trtllm_model_weights_converter.convert( + self.weights_converter.convert( model_state_dict=model_state_dict, trtllm_conversion_dict=self.trtllm_conversion_dict, state_dict_split_by_layer_numbers=state_dict_split_by_layer_numbers, ) - vocab_size_padded = single_device_trtllm_model_weights_converter.get_padded_vocab_size() + self._add_scales_to_converter(self.weights_converter, scales, fp8_kvcache) + + vocab_size_padded = self.weights_converter.get_padded_vocab_size() world_size = export_config.inference_tp_size * export_config.inference_pp_size gpus_per_node = gpus_per_node or export_config.inference_tp_size @@ -364,15 +489,15 @@ def _get_trtllm_pretrained_config_and_model_weights_list_on_single_device( gpus_per_node=gpus_per_node, vocab_size_padded=vocab_size_padded, dtype=dtype, + fp8_quantized=fp8_quantized, + fp8_kvcache=fp8_kvcache, ) trtllm_model_config.mapping = mapping trtllm_model_configs_list.append(trtllm_model_config) # Get the model weights for each rank and append it to the trtllm_model_weights_list - trtllm_model_weights_per_gpu = ( - single_device_trtllm_model_weights_converter.get_local_model_weights_per_gpu( - mapping, trtllm_model_config - ) + trtllm_model_weights_per_gpu = self.weights_converter.get_local_model_weights_per_gpu( + mapping, trtllm_model_config ) trtllm_model_weights_list.append(trtllm_model_weights_per_gpu) @@ -434,7 +559,7 @@ def build_and_save_engine( gemm_plugin (str, optional): Gemma plugin to use. Defaults to "auto". """ - TRTLLMEngineBuilder.build_and_save_engine( + engine = TRTLLMEngineBuilder.build_and_save_engine( engine_dir, trtllm_model_weights, trtllm_model_config, @@ -459,3 +584,5 @@ def build_and_save_engine( gpt_attention_plugin, gemm_plugin, ) + + return engine diff --git a/megatron/core/export/trtllm/trtllm_weights_converter/distributed_trtllm_model_weights_converter.py b/megatron/core/export/trtllm/trtllm_weights_converter/distributed_trtllm_model_weights_converter.py index 035e23a16..401988d78 100644 --- a/megatron/core/export/trtllm/trtllm_weights_converter/distributed_trtllm_model_weights_converter.py +++ b/megatron/core/export/trtllm/trtllm_weights_converter/distributed_trtllm_model_weights_converter.py @@ -1,5 +1,7 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from typing import Optional + import torch from tqdm import tqdm @@ -31,6 +33,7 @@ def __init__( dtype: DataType, multi_query_mode: bool = False, activation: str = "gelu", + scales: Optional[dict] = None, ): """Constructor for the TRTLLMModelWeightsConverterGPU class @@ -41,11 +44,15 @@ def __init__( dtype (DataType): The data type or model precision multi_query_mode (bool, optional): Defaults to False. activation (str, optional): Defaults to "gelu". + scales (dict, optional): Dictionary with fp8 scaling factors. """ + if scales is None: + scales = {} self.transformer_config = transformer_config self.trtllm_model_weights = {} self.storage_type = str_dtype_to_torch(dtype) self.activation = activation + self.scales = scales num_kv_heads = self.transformer_config.num_query_groups if num_kv_heads == 0: if multi_query_mode: @@ -67,7 +74,13 @@ def __init__( def _add_to_trtllm_model_weights(self, val: torch.Tensor, layer_name: str): assert torch.is_tensor(val), f"Expected a tensor for {layer_name} but got {type(val)}" - val = val.to(self.storage_type) + scale_key = '.'.join(layer_name.split('.')[:-1]) + '.weights_scaling_factor' + storage = self.storage_type + if scale_key in self.scales and layer_name.endswith("weight"): + storage = torch.float8_e4m3fn + val = val * self.scales[scale_key]['weight_multiplier'].to(val.device) + + val = val.to(storage) val = val.detach().contiguous() if val.ndim >= 2: val = torch.transpose(val.reshape(val.shape[0], -1), 0, 1) @@ -75,7 +88,7 @@ def _add_to_trtllm_model_weights(self, val: torch.Tensor, layer_name: str): self.trtllm_model_weights[layer_name] = torch.empty( val.size(), dtype=val.dtype, layout=val.layout, device="cpu", pin_memory=True ) - self.trtllm_model_weights[layer_name] = val + self.trtllm_model_weights[layer_name].copy_(val, non_blocking=True) def _convert_transformer_layer(self, layer_name: str, val: torch.Tensor): """Convert Transformer layers to TRTLLM weights @@ -232,6 +245,8 @@ def convert( # Convert the non transformer layers for layer_name in NON_TRANSFORMER_LAYERS_NAMES: + if layer_name not in model_state_dict: + continue if ( layer_name in TRTLLMLayers.vocab_embedding.value or layer_name in TRTLLMLayers.lm_head.value @@ -248,6 +263,13 @@ def convert( self.tp_rank ] model_state_dict[layer_name] = req_position_embedding.T + if layer_name == TRTLLMLayers.final_layernorm_weight.value: + # Same as layernorm1p in NeMo + if ( + self.transformer_config.layernorm_zero_centered_gamma + and self.transformer_config.normalization == "LayerNorm" + ): + model_state_dict[layer_name] = model_state_dict[layer_name] + 1.0 self._convert_non_transformer_layer( model_state_dict=model_state_dict, layer_name=layer_name ) diff --git a/megatron/core/export/trtllm/trtllm_weights_converter/single_device_trtllm_model_weights_converter.py b/megatron/core/export/trtllm/trtllm_weights_converter/single_device_trtllm_model_weights_converter.py index c7a98972d..7e669fc1c 100644 --- a/megatron/core/export/trtllm/trtllm_weights_converter/single_device_trtllm_model_weights_converter.py +++ b/megatron/core/export/trtllm/trtllm_weights_converter/single_device_trtllm_model_weights_converter.py @@ -1,6 +1,7 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. import re +from typing import Optional import torch from tqdm import tqdm @@ -39,6 +40,7 @@ def __init__( dtype: DataType, multi_query_mode: bool = False, activation: str = "gelu", + scales: Optional[dict] = None, ): """Constructor for the TRTLLMModelWeightsConverterCPU class @@ -50,12 +52,17 @@ def __init__( dtype (DataType): The data type or model precision multi_query_mode (bool, optional): Defaults to False. activation (str, optional): Defaults to "gelu". + scales (dict, optional): Dictionary with fp8 scaling factors. """ + if scales is None: + scales = {} + self.export_config = export_config self.transformer_config = transformer_config self.trtllm_model_weights = {} self.storage_type = str_dtype_to_torch(dtype) self.activation = activation + self.scales = scales num_kv_heads = self.transformer_config.num_query_groups if num_kv_heads == 0: if multi_query_mode: @@ -78,6 +85,25 @@ def _convert_non_transformer_layer(self, model_state_dict: dict, layer_name: str val = val.to(self.storage_type).detach().contiguous() self.trtllm_model_weights[layer_name] = val + def _cast_value(self, val: torch.Tensor, layer_name: str) -> torch.Tensor: + """Casts weights to the expected datatype. + When appropriate scaling factor is found inside self.scales, the weight gets scaled before the cast. + + Args: + val (torch.Tensor): Model weight + layer_name (str): Layer name, used for determining the scaling factor dictionary key + Returns: + torch.Tensor: The casted weight + """ + storage = self.storage_type + + scale_key = '.'.join(layer_name.split('.')[:-1]) + '.weights_scaling_factor' + if scale_key in self.scales and layer_name.endswith("weight"): + storage = torch.float8_e4m3fn + val = val * self.scales[scale_key]['weight_multiplier'].to(val.device) + + return val.to(storage) + def _convert_transformer_layer(self, layer_name: str, val: torch.Tensor): """Convert Transformer layers to TRTLLM weights @@ -101,7 +127,7 @@ def _add_to_trtllm_model_weights(val: torch.Tensor, layer_name: str, split_type= if split_type == 'expert_split': for split_num, split_val in enumerate(val): self.trtllm_model_weights[f'{layer_name}.{split_num}.bin'] = ( - split_val.to(self.storage_type).detach().contiguous() + self._cast_value(split_val, layer_name).detach().contiguous() ) elif split_type == 'tensor_split': for split_num, split_val in enumerate(val): @@ -109,13 +135,14 @@ def _add_to_trtllm_model_weights(val: torch.Tensor, layer_name: str, split_type= split_val = torch.transpose(split_val.reshape(split_val.shape[0], -1), 1, 0) self.trtllm_model_weights[f'{layer_name}.{split_num}.bin'] = ( - split_val.to(self.storage_type).detach().contiguous() + self._cast_value(split_val, layer_name).detach().contiguous() ) else: if val.ndim >= 2: val = torch.transpose(val.reshape(val.shape[0], -1), 1, 0) + self.trtllm_model_weights[layer_name] = ( - val.to(self.storage_type).detach().contiguous() + self._cast_value(val, layer_name).detach().contiguous() ) if val.ndim == 2: @@ -301,6 +328,13 @@ def convert( pad_width = vocab_size_padded - vocab_size val = torch.nn.functional.pad(val, (0, 0, 0, pad_width), value=0) model_state_dict[layer_name] = val + if layer_name == TRTLLMLayers.final_layernorm_weight.value: + # Same as layernorm1p in NeMo + if ( + self.transformer_config.layernorm_zero_centered_gamma + and self.transformer_config.normalization == "LayerNorm" + ): + model_state_dict[layer_name] = model_state_dict[layer_name] + 1.0 self._convert_non_transformer_layer( model_state_dict=model_state_dict, layer_name=layer_name diff --git a/megatron/core/extensions/transformer_engine.py b/megatron/core/extensions/transformer_engine.py index bf5159c75..f64862c3c 100644 --- a/megatron/core/extensions/transformer_engine.py +++ b/megatron/core/extensions/transformer_engine.py @@ -1,7 +1,9 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. import dataclasses +import io import os +import pickle import warnings from typing import Callable @@ -11,13 +13,19 @@ from torch import Tensor from torch.nn.parameter import Parameter -from megatron.core import ModelParallelConfig, parallel_state +from megatron.core import ModelParallelConfig from megatron.core.dist_checkpointing.utils import replace_prefix_for_sharding from megatron.core.packed_seq_params import PackedSeqParams from megatron.core.parallel_state import ( get_context_parallel_global_ranks, get_context_parallel_group, - get_tensor_and_expert_parallel_world_size, + get_expert_data_parallel_rank, + get_expert_model_parallel_rank, + get_expert_model_parallel_world_size, + get_expert_tensor_parallel_group, + get_expert_tensor_parallel_rank, + get_expert_tensor_parallel_world_size, + get_hierarchical_context_parallel_groups, get_tensor_model_parallel_group, get_tensor_model_parallel_rank, get_tensor_model_parallel_world_size, @@ -159,19 +167,23 @@ def __init__( extra_kwargs["ub_name"] = tp_comm_buffer_name self.expert_parallel = self.config.expert_model_parallel_size > 1 - if is_expert and self.expert_parallel: + if is_expert: rng_tracker_name = get_expert_parallel_rng_tracker_name() else: rng_tracker_name = None if is_te_min_version("1.7.0"): extra_kwargs["rng_tracker_name"] = rng_tracker_name - # Disable communications in TE when using SP or EP by making TE agnostic of model parallel. - tp_size = self.config.tensor_model_parallel_size - tp_group = get_tensor_model_parallel_group(check_initialized=False) - if is_expert and (self.config.sequence_parallel or self.expert_parallel): - if self.config.moe_extended_tp: - tp_size = get_tensor_and_expert_parallel_world_size() + # Disable communications in TE when using TP or EP by making TE agnostic of model parallel. + if is_expert: + tp_group = get_expert_tensor_parallel_group(check_initialized=False) + tp_size = get_expert_tensor_parallel_world_size() + else: + tp_group = get_tensor_model_parallel_group(check_initialized=False) + tp_size = get_tensor_model_parallel_world_size() + explicit_expert_comm = is_expert and (tp_size > 1 or self.expert_parallel) + + if explicit_expert_comm: if parallel_mode == "column": output_size = divide(output_size, tp_size) elif parallel_mode == "row": @@ -336,7 +348,7 @@ def __init__( input_size, output_size_per_partition, 0, - init_method, + init_method=condition_init_method(config, init_method), stride=1, return_master_weight=False, rank=rank, @@ -415,9 +427,13 @@ def __init__( tp_comm_buffer_name=tp_comm_buffer_name, ) - world_size = get_tensor_model_parallel_world_size() - rank = get_tensor_model_parallel_rank() if config.use_cpu_initialization: + if is_expert: + world_size = get_expert_tensor_parallel_world_size() + rank = get_expert_tensor_parallel_rank() + else: + world_size = get_tensor_model_parallel_world_size() + rank = get_tensor_model_parallel_rank() output_size_per_partition = divide(output_size, world_size) _ = _initialize_affine_weight_cpu( self.weight, @@ -425,7 +441,7 @@ def __init__( input_size, output_size_per_partition, 0, - init_method, + init_method=condition_init_method(config, init_method), stride=1, return_master_weight=False, rank=rank, @@ -489,9 +505,13 @@ def __init__( is_expert=is_expert, tp_comm_buffer_name=tp_comm_buffer_name, ) - world_size = get_tensor_model_parallel_world_size() - rank = get_tensor_model_parallel_rank() if config.use_cpu_initialization: + if is_expert: + world_size = get_expert_tensor_parallel_world_size() + rank = get_expert_tensor_parallel_rank() + else: + world_size = get_tensor_model_parallel_world_size() + rank = get_tensor_model_parallel_rank() input_size_per_partition = divide(input_size, world_size) self.master_weight = _initialize_affine_weight_cpu( self.weight, @@ -499,7 +519,7 @@ def __init__( input_size, input_size_per_partition, 1, - init_method, + init_method=condition_init_method(config, init_method), stride=1, return_master_weight=False, params_dtype=config.params_dtype, @@ -545,6 +565,7 @@ def __init__( softmax_scale: float = None, k_channels: int = None, v_channels: int = None, + cp_comm_type: str = "p2p", ): self.config = config self.te_forward_mask_type = False @@ -578,8 +599,12 @@ def __init__( if is_te_min_version("0.12.0", check_equality=False): self.te_forward_mask_type = True - # Only Transformer-Engine version >= 1.0.0 supports context parallelism - if is_te_min_version("1.0.0"): + # This check is important as CP config can be disabled while having a valid CP group + # Example - Disabling CP for encoder while a valid CP group exists for decoder + if self.config.context_parallel_size > 1: + assert is_te_min_version( + "1.0.0" + ), "Only Transformer-Engine version >= 1.0.0 supports context parallelism!" if getattr(TEDotProductAttention, "cp_stream") is None: TEDotProductAttention.cp_stream = torch.cuda.Stream() extra_kwargs["cp_group"] = get_context_parallel_group(check_initialized=False) @@ -587,10 +612,20 @@ def __init__( check_initialized=False ) extra_kwargs["cp_stream"] = TEDotProductAttention.cp_stream - else: - assert ( - self.config.context_parallel_size == 1 - ), "Only Transformer-Engine version >= 1.0.0 supports context parallelism!" + if is_te_min_version("1.10.0"): + if cp_comm_type is None: + extra_kwargs["cp_comm_type"] = "p2p" + elif cp_comm_type == "a2a+p2p": + assert is_te_min_version("1.12.0"), ( + f"Transformer-Engine v{get_te_version()} must be >= 1.12.0 to support" + "hierarchical cp commucation." + ) + extra_kwargs["cp_comm_type"] = "a2a+p2p" + extra_kwargs["cp_group"] = get_hierarchical_context_parallel_groups( + check_initialized=False + ) + else: + extra_kwargs["cp_comm_type"] = cp_comm_type if self.config.deterministic_mode: if int(os.getenv("NVTE_ALLOW_NONDETERMINISTIC_ALGO", "1")) != 0: @@ -643,17 +678,13 @@ def forward( value: Tensor, attention_mask: Tensor, attn_mask_type: AttnMaskType, + attention_bias: Tensor = None, packed_seq_params: PackedSeqParams = None, ): """Forward.""" packed_seq_kwargs = ( dataclasses.asdict(packed_seq_params) if packed_seq_params is not None else {} ) - # overwrite self.qkv_format depending on self.config.apply_rope_fusion, which can be set - # after init - if self.config.apply_rope_fusion and is_te_min_version("0.13.0", check_equality=False): - self.qkv_format = 'bshd' - qkv_format = packed_seq_kwargs.get('qkv_format', self.qkv_format) if get_te_version() < PkgVersion("1.3.0"): @@ -670,16 +701,15 @@ def forward( packed_seq_kwargs.pop("cu_seqlens_q_padded", None) packed_seq_kwargs.pop("cu_seqlens_kv_padded", None) - if self.config.apply_rope_fusion and qkv_format == 'bshd': - query, key, value = [x.transpose(0, 1).contiguous() for x in (query, key, value)] - # In PyTorch, the following two tensors are in fact the same: - # Tensor with shape (1, S, H, D) and stride (S*H*D, H*D, D, 1) - # Tensor with shape (1, S, H, D) and stride (H*D, H*D, D, 1) - # Stride for a dimension that is 1 has no meaning, so tensors created two different ways - # can have same shape but different strides. - # We unify them to the first one to pass the stride check in TE - if value.shape == key.shape and value.shape[0] == 1 and value.stride() != key.stride(): - value = value.as_strided(value.shape, key.stride()) + attention_bias_kwargs = {} + if attention_bias is not None: + assert is_te_min_version("1.2.0"), ( + f"Transformer-Engine v{get_te_version()} must be >= 1.2.0 to support" + "`attention_bias`." + ) + attention_bias_kwargs = dict( + core_attention_bias_type='post_scale_bias', core_attention_bias=attention_bias + ) if self.te_forward_mask_type: if qkv_format == 'thd' and is_te_min_version("1.7.0"): @@ -696,15 +726,15 @@ def forward( value, attention_mask, attn_mask_type=attn_mask_type.name, + **attention_bias_kwargs, **packed_seq_kwargs, ) else: - core_attn_out = super().forward(query, key, value, attention_mask, **packed_seq_kwargs) + core_attn_out = super().forward( + query, key, value, attention_mask, **attention_bias_kwargs, **packed_seq_kwargs + ) - if self.config.apply_rope_fusion and qkv_format == 'bshd': - return core_attn_out.transpose(0, 1) - else: - return core_attn_out + return core_attn_out if is_te_min_version("1.9.0.dev0"): @@ -747,19 +777,19 @@ def __init__( extra_kwargs["ub_name"] = tp_comm_buffer_name self.expert_parallel = self.config.expert_model_parallel_size > 1 - if self.expert_parallel: + if is_expert: extra_kwargs["rng_tracker_name"] = get_expert_parallel_rng_tracker_name() - # For MoE models, the comms between TP and EP group is explicitly handled by - # MoE token dispatcher. So we disable comms by making TE agnostic of model parallel. - self.explicit_expert_comm = is_expert and ( - config.tensor_model_parallel_size > 1 or self.expert_parallel - ) - tp_group = get_tensor_model_parallel_group(check_initialized=False) - if self.explicit_expert_comm and config.moe_extended_tp: - tp_size = parallel_state.get_tensor_and_expert_parallel_world_size() + # The comms between TP and EP group is explicitly handled by MoE token dispatcher. + # So we disable comms by making TE agnostic of model parallel. + if is_expert: + tp_group = get_expert_tensor_parallel_group(check_initialized=False) + tp_size = get_expert_tensor_parallel_world_size() else: - tp_size = parallel_state.get_tensor_model_parallel_world_size() + tp_group = get_tensor_model_parallel_group(check_initialized=False) + tp_size = get_tensor_model_parallel_world_size() + self.explicit_expert_comm = is_expert and (tp_size > 1 or self.expert_parallel) + if self.explicit_expert_comm: if parallel_mode == "column": output_size = divide(output_size, tp_size) @@ -790,6 +820,61 @@ def __init__( for param in self.parameters(): setattr(param, 'allreduce', not (is_expert and self.expert_parallel)) + def merge_extra_states( + self, + state_dict, + prefix, + local_metadata, + strict, + missing_keys, + unexpected_keys, + error_msgs, + ): + """ + Merge multiple "_extra_state" into one. + """ + self.init_fp8_metadata(num_gemms=self.num_gemms) + fp8_checkpoint = self.fp8_meta["fp8_checkpoint"] or self.fp8 or self.fp8_calibration + + try: + state_list = [ + state_dict.pop(f"{prefix}_extra_state{i}") for i in range(1, self.num_gemms) + ] + except KeyError: + # "_extra_state{i}" only exists for dist-ckpt. Return for torch native ckpt. + return + + if not fp8_checkpoint: + return + state_list = [state_dict.pop(f"{prefix}_extra_state")] + state_list + state_list = [self._decode_extra_state(state) for state in state_list] + extra_fp8_variables = state_list[0]['extra_fp8_variables'] + extra_fp8_variables['num_gemms'] = self.num_gemms + extra_state = { + "scale_fwd": torch.cat( + [state['scale_fwd'].view(-1, 1) for state in state_list], dim=1 + ).view(-1), + "scale_inv_fwd": torch.cat( + [state['scale_inv_fwd'].view(-1, 1) for state in state_list], dim=1 + ).view(-1), + "amax_history_fwd": torch.cat( + [state['amax_history_fwd'].view(-1, 1) for state in state_list], dim=1 + ).view(self.fp8_meta["recipe"].amax_history_len, -1), + "scale_bwd": torch.cat( + [state['scale_bwd'].view(-1, 1) for state in state_list], dim=1 + ).view(-1), + "scale_inv_bwd": torch.cat( + [state['scale_inv_bwd'].view(-1, 1) for state in state_list], dim=1 + ).view(-1), + "amax_history_bwd": torch.cat( + [state['amax_history_bwd'].view(-1, 1) for state in state_list], dim=1 + ).view(self.fp8_meta["recipe"].amax_history_len, -1), + "extra_fp8_variables": extra_fp8_variables, + } + state_dict[f"{prefix}_extra_state"] = self._encode_extra_state(extra_state) + + self._register_load_state_dict_pre_hook(merge_extra_states, with_module=True) + def forward(self, x, m_splits): """Forward.""" _is_first_microbatch = ( @@ -805,6 +890,47 @@ def forward(self, x, m_splits): return out return out, None + def _encode_extra_state(self, state): + state_serialized = io.BytesIO() + torch.save(state, state_serialized) + return state_serialized + + def _decode_extra_state(self, state): + if isinstance(state, torch.Tensor): + return pickle.loads(state.detach().cpu().numpy().tobytes()) + elif isinstance(state, io.BytesIO): + state.seek(0) + return torch.load(state, map_location="cuda") + else: + raise RuntimeError("Unsupported checkpoint format.") + + def _split_extra_state(self, state): + fp8_checkpoint = self.fp8_meta["fp8_checkpoint"] or self.fp8 or self.fp8_calibration + + if not fp8_checkpoint: + return [state] * self.num_gemms + + state = self._decode_extra_state(state) + extra_states = [] + extra_fp8_variables = state['extra_fp8_variables'] + extra_fp8_variables['num_gemms'] = 1 + for gemm_idx in range(self.num_gemms): + tmp_state = { + "scale_fwd": state['scale_fwd'].view(3, -1)[:, gemm_idx], + "scale_inv_fwd": state['scale_inv_fwd'].view(3, -1)[:, gemm_idx], + "amax_history_fwd": state['amax_history_fwd'].view( + self.fp8_meta["recipe"].amax_history_len, 3, -1 + )[:, :, gemm_idx], + "scale_bwd": state['scale_bwd'].view(2, -1)[:, gemm_idx], + "scale_inv_bwd": state['scale_inv_bwd'].view(2, -1)[:, gemm_idx], + "amax_history_bwd": state['amax_history_bwd'].view( + self.fp8_meta["recipe"].amax_history_len, 2, -1 + )[:, :, gemm_idx], + "extra_fp8_variables": extra_fp8_variables, + } + extra_states.append(self._encode_extra_state(tmp_state)) + return extra_states + def _sharded_state_dict_grouped( self, tp_axis_map, prefix='', sharded_offsets=(), metadata=None ): @@ -813,17 +939,14 @@ def _sharded_state_dict_grouped( """ sharded_state_dict = {} full_state_dict = self.state_dict(prefix='', keep_vars=True) - num_global_experts = ( - parallel_state.get_expert_model_parallel_world_size() * self.num_gemms - ) - local_expert_indices_offset = ( - parallel_state.get_expert_model_parallel_rank() * self.num_gemms - ) + num_global_experts = get_expert_model_parallel_world_size() * self.num_gemms + local_expert_indices_offset = get_expert_model_parallel_rank() * self.num_gemms ep_axis = len(sharded_offsets) + extra_states = self._split_extra_state(full_state_dict['_extra_state']) for gemm_idx in range(self.num_gemms): state_dict = { f'{gemm_idx}.weight': full_state_dict[f'weight{gemm_idx}'], - f'{gemm_idx}._extra_state': full_state_dict['_extra_state'], + f'{gemm_idx}._extra_state': extra_states[gemm_idx], } if self.use_bias: state_dict[f'{gemm_idx}.bias'] = full_state_dict[f'bias{gemm_idx}'] @@ -841,8 +964,6 @@ def _sharded_state_dict_grouped( sharded_state_dict.update( { f'{prefix}weight{gemm_idx}': sub_sd[f'{gemm_idx}.weight'], - # TODO: TE's GroupedLinear only has one _extra_state for all experts. - # We need sharding or build/merge fn to handle _extra_state correctly. f'{prefix}_extra_state{"" if gemm_idx == 0 else gemm_idx}': sub_sd[ f'{gemm_idx}._extra_state' ], @@ -856,10 +977,7 @@ def _sharded_state_dict_grouped( assert ( len(replica_id) == 3 ), f'Expected replica_id for {k} to be in (PP, TP, DP) format, got: {replica_id}' - sh_ten.replica_id = ( - *replica_id[:2], - parallel_state.get_data_modulo_expert_parallel_rank(), - ) + sh_ten.replica_id = (*replica_id[:2], get_expert_data_parallel_rank()) return sharded_state_dict class TEColumnParallelGroupedLinear(TEGroupedLinear): @@ -1085,3 +1203,39 @@ def get_cpu_offload_context( except ImportError: get_cpu_offload_context = None + +try: + + from transformer_engine.pytorch.attention import FusedRoPEFunc + + def fused_apply_rotary_pos_emb(t: torch.Tensor, freqs: torch.Tensor) -> torch.Tensor: + """Apply rotary positional embedding to input tensor T in `sbhd` format.""" + return FusedRoPEFunc.apply(t, freqs, "sbhd") + + def fused_apply_rotary_pos_emb_thd( + t: torch.Tensor, + cu_seqlens: torch.Tensor, + freqs: torch.Tensor, + cp_size: int = 1, + cp_rank: int = 0, + ) -> torch.Tensor: + """ + Apply rotary positional embedding to input tensor T in `thd` format with CP support. + """ + if is_te_min_version("1.11.0", check_equality=False): + return FusedRoPEFunc.apply(t, freqs, "thd", cu_seqlens, cp_size, cp_rank) + else: + return FusedRoPEFunc.apply(t, freqs, "thd", cu_seqlens) + +except ImportError: + + pass + +try: + + from transformer_engine.pytorch import Fp8Padding, Fp8Unpadding # pylint: disable=unused-import + +except ImportError: + + Fp8Padding = None + Fp8Unpadding = None diff --git a/megatron/core/inference/model_inference_wrappers/abstract_model_inference_wrapper.py b/megatron/core/inference/model_inference_wrappers/abstract_model_inference_wrapper.py index b7f58efcf..647c4d191 100644 --- a/megatron/core/inference/model_inference_wrappers/abstract_model_inference_wrapper.py +++ b/megatron/core/inference/model_inference_wrappers/abstract_model_inference_wrapper.py @@ -1,13 +1,11 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. import abc import math -from argparse import Namespace from typing import Iterable, List, Union import torch from megatron.core import parallel_state, tensor_parallel -from megatron.core.inference.common_inference_params import CommonInferenceParams from megatron.core.inference.communication_utils import ( recv_from_prev_pipeline_rank_, send_to_next_pipeline_rank, @@ -19,7 +17,13 @@ from megatron.core.models.gpt.gpt_model import GPTModel +# pylint: disable=line-too-long class AbstractModelInferenceWrapper(abc.ABC): + """Abstract inference wrapper + + Extend this to create a version for your model. + """ + def __init__( self, model: Union['LegacyGPTModel', GPTModel], @@ -31,7 +35,7 @@ def __init__( Args: model (Union[GPTModel, LegacyGPTModel]): The actual GPT model (MCore or MLM) - args (Namespace): The commadline arguments that were passed + inference_wrapper_config (InferenceWrapperConfig): Has info like hidden size, vocab size etc. """ assert not isinstance( model, Iterable diff --git a/megatron/core/inference/model_inference_wrappers/gpt/gpt_inference_wrapper.py b/megatron/core/inference/model_inference_wrappers/gpt/gpt_inference_wrapper.py index 87b1d2df7..166ed5e06 100644 --- a/megatron/core/inference/model_inference_wrappers/gpt/gpt_inference_wrapper.py +++ b/megatron/core/inference/model_inference_wrappers/gpt/gpt_inference_wrapper.py @@ -1,5 +1,4 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from argparse import Namespace from typing import List, Tuple import torch @@ -7,20 +6,26 @@ from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import ( AbstractModelInferenceWrapper, ) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) from megatron.core.models.gpt import GPTModel +# pylint: disable=line-too-long class GPTInferenceWrapper(AbstractModelInferenceWrapper): - def __init__(self, model: GPTModel, args: Namespace): + """Inference wrapper for GPT model""" + + def __init__(self, model: GPTModel, inference_wrapper_config: InferenceWrapperConfig): """Constructor for the model inference wrapper The wrapper prepares the model for inference, provides the required input data, and runs the forward pass Args: model (GPTModel): The GPT model (MCore or legacy) - args (Namespace): The command line arguments that were passed + inference_wrapper_config (InferenceWrapperConfig): Has info like hidden size, vocab size etc """ - super().__init__(model, args) + super().__init__(model, inference_wrapper_config) def prep_model_for_inference(self, prompts_tokens: torch.Tensor): """A utility function for preparing model for inference diff --git a/megatron/core/inference/model_inference_wrappers/inference_wrapper_config.py b/megatron/core/inference/model_inference_wrappers/inference_wrapper_config.py index e22550e7e..14ca0f6fe 100644 --- a/megatron/core/inference/model_inference_wrappers/inference_wrapper_config.py +++ b/megatron/core/inference/model_inference_wrappers/inference_wrapper_config.py @@ -18,10 +18,12 @@ class InferenceWrapperConfig: """Can be torch.float or torch.half if --fp16 is used, or torch.bfloat16 if --bf16 is used""" inference_batch_times_seqlen_threshold: int - """if batch-size times sequence-length is smaller than this threshold then we will not use pipelining, otherwise we will.""" + """if (batch-size * sequence-length) is smaller than this threshold then we will not pipeline + the batch.""" padded_vocab_size: int - """The final padded vocab size (Padded to make it divisible by --make-vocab-size-divisible-by value)""" + """The final padded vocab size (Padded to make it divisible by + --make-vocab-size-divisible-by value)""" fp32_residual_connection: bool = False """Move residual connections to fp32. Obtained from arguments.py""" @@ -29,12 +31,14 @@ class InferenceWrapperConfig: def add_attributes(self, attribute_value_pair: dict): """Utility to add more attributes to inference params - Use this method to pass in a custom dictonary to add more config to the instance you created. Use as follows + Use this method to pass in a custom dictionary to add more configs to the instance created. + Use as follows: c = InferenceWrapperConfig c.add_attributes({'precision':'fp32'}) Args: - attribute_value_pair (dict): A dictionary containing attributes as the key names and their values as the values. + attribute_value_pair (dict): A dictionary containing attributes as the key names and + corresponding values. """ for key, value in attribute_value_pair.items(): setattr(self, key, value) diff --git a/megatron/core/inference/model_inference_wrappers/t5/t5_inference_wrapper.py b/megatron/core/inference/model_inference_wrappers/t5/t5_inference_wrapper.py index 10e1da481..2e5f8466d 100644 --- a/megatron/core/inference/model_inference_wrappers/t5/t5_inference_wrapper.py +++ b/megatron/core/inference/model_inference_wrappers/t5/t5_inference_wrapper.py @@ -1,5 +1,4 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from argparse import Namespace from collections import deque from typing import Any, List, Tuple @@ -11,9 +10,13 @@ from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import ( AbstractModelInferenceWrapper, ) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) from megatron.core.models.T5 import T5Model +# pylint: disable=line-too-long class T5InferenceWrapper(AbstractModelInferenceWrapper): """Constructor for the model inference wrapper @@ -22,11 +25,19 @@ class T5InferenceWrapper(AbstractModelInferenceWrapper): Args: model (T5Model): The T5 model (MCore or legacy) - args (Namespace): The command line arguments that were passed + inference_wrapper_config (InferenceWrapperConfig): The command line arguments that were passed + use_local (bool): Whether the T5 model's transformer impl + is local (vs transformer_engine) """ - def __init__(self, model: T5Model, args: Namespace): - super().__init__(model, args) + def __init__( + self, + model: T5Model, + inference_wrapper_config: InferenceWrapperConfig, + use_local: bool = False, + ): + super().__init__(model, inference_wrapper_config) + self.use_local = use_local def prep_model_for_inference( self, prompts_tokens: torch.Tensor, encoder_prompts: List[str] = None, tokenizer: Any = None @@ -45,12 +56,18 @@ def prep_model_for_inference( super().prep_model_for_inference(prompts_tokens=prompts_tokens) + # get max_sequence_length + if hasattr(self.model, "module"): # if self.model is Float16Module + max_sequence_length = self.model.module.max_sequence_length + else: + max_sequence_length = self.model.max_sequence_length + encoder_prompts_tokens_list = [ self.tokenize_encoder_prompt(encoder_prompt, tokenizer) for encoder_prompt in encoder_prompts ] self.batch_encoder_prompts_tokens = self.pad_encoder_prompts_tokens( - encoder_prompts_tokens_list, self.model.max_sequence_length, tokenizer + encoder_prompts_tokens_list, max_sequence_length, tokenizer ) # create batch mask for encoder_prompt (self.batch_input_tokens) and @@ -59,32 +76,13 @@ def prep_model_for_inference( encoder_prompts_tokens = self.batch_encoder_prompts_tokens.cpu().numpy() self.batch_mask_encoder = [] self.batch_mask_decoder = [] - self.batch_mask_encoder_decoder = [] for i in range(len(self.prompts_tokens)): - self.batch_mask_encoder.append( - T5MaskedWordPieceDataset._make_attention_mask( - encoder_prompts_tokens[i], encoder_prompts_tokens[i] - ) - ) - self.batch_mask_decoder.append( - T5MaskedWordPieceDataset._make_attention_mask( - decoder_prompts_tokens[i], decoder_prompts_tokens[i] - ) - * T5MaskedWordPieceDataset._make_history_mask(decoder_prompts_tokens[i]) - ) - self.batch_mask_encoder_decoder.append( - T5MaskedWordPieceDataset._make_attention_mask( - decoder_prompts_tokens[i], encoder_prompts_tokens[i] - ) - ) + mask_encoder = encoder_prompts_tokens[i] == tokenizer.pad + mask_decoder = decoder_prompts_tokens[i] == tokenizer.pad + self.batch_mask_encoder.append(mask_encoder) + self.batch_mask_decoder.append(mask_decoder) self.batch_mask_encoder = torch.tensor(numpy.array(self.batch_mask_encoder)).cuda() self.batch_mask_decoder = torch.tensor(numpy.array(self.batch_mask_decoder)).cuda() - self.batch_mask_encoder_decoder = torch.tensor( - numpy.array(self.batch_mask_encoder_decoder) - ).cuda() - self.batch_mask_encoder = self.batch_mask_encoder < 0.5 - self.batch_mask_decoder = self.batch_mask_decoder < 0.5 - self.batch_mask_encoder_decoder = self.batch_mask_encoder_decoder < 0.5 def tokenize_encoder_prompt( self, encoder_prompt: str, tokenizer @@ -112,6 +110,7 @@ def tokenize_encoder_prompt( if masks_count > 0: sentinel = sentinels.popleft() encoder_prompt_tokens.extend([sentinel]) + masks_count -= 1 return encoder_prompt_tokens @@ -156,13 +155,24 @@ def get_batch_for_context_window( List: A list of inputs that will be used by your model in the forward step """ - # rerun encoder every step # T5 inference not yet support kv_cache encoder_tokens2use = self.batch_encoder_prompts_tokens decoder_tokens2use = self.prompts_tokens[:, :context_end_position] encoder_mask2use = self.batch_mask_encoder - decoder_mask2use = self.batch_mask_decoder[:, :context_end_position, :context_end_position] - encoder_decoder_mask2use = self.batch_mask_encoder_decoder[:, :context_end_position, :] + decoder_mask2use = self.batch_mask_decoder[:, :context_end_position] + + # Configure attention mask based on different conditions + # (e.g., transformer-impl, TE versions, TE backends) + [encoder_mask2use, decoder_mask2use, encoder_decoder_mask2use] = ( + T5MaskedWordPieceDataset.config_attention_mask( + encoder_tokens2use, + decoder_tokens2use, + encoder_mask2use, + decoder_mask2use, + self.use_local, + ) + ) + data_at_step_idx = [ encoder_tokens2use, decoder_tokens2use, diff --git a/megatron/core/inference/modelopt_support/gpt/model_specs.py b/megatron/core/inference/modelopt_support/gpt/model_specs.py index ba1ab8993..4d422bc2f 100644 --- a/megatron/core/inference/modelopt_support/gpt/model_specs.py +++ b/megatron/core/inference/modelopt_support/gpt/model_specs.py @@ -2,18 +2,21 @@ from megatron.core.extensions.transformer_engine import TEDotProductAttention, TENorm from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add +from megatron.core.models.gpt.gpt_layer_specs import _get_mlp_module_spec from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules from megatron.core.transformer.enums import AttnMaskType from megatron.core.transformer.identity_op import IdentityOp -from megatron.core.transformer.mlp import MLP, MLPSubmodules from megatron.core.transformer.spec_utils import ModuleSpec from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules # Use this spec for ModelOpt PTQ and TensorRT-LLM export def get_gpt_layer_modelopt_spec( - remap_te_layernorm: bool = False, qk_layernorm: bool = False + num_experts: int = None, + moe_grouped_gemm: bool = False, + remap_te_layernorm: bool = False, + qk_layernorm: bool = False, ) -> ModuleSpec: """Mix the native spec with TENorm. @@ -21,12 +24,20 @@ def get_gpt_layer_modelopt_spec( is using TENorm from Transformer-Engine. The issue is that FusedLayerNorm from apex has stopped supporting RMSNorm needed by llama. """ + mlp = _get_mlp_module_spec( + use_te=False, num_experts=num_experts, moe_grouped_gemm=moe_grouped_gemm, fp8=False + ) sharded_state_dict_keys_map = {} if remap_te_layernorm: - sharded_state_dict_keys_map = { - 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_', - 'pre_mlp_layernorm.': 'mlp.linear_fc1.layer_norm_', - } + if num_experts: + sharded_state_dict_keys_map = { + 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_' + } + else: + sharded_state_dict_keys_map = { + 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_', + 'pre_mlp_layernorm.': 'mlp.linear_fc1.layer_norm_', + } return ModuleSpec( module=TransformerLayer, submodules=TransformerLayerSubmodules( @@ -44,12 +55,7 @@ def get_gpt_layer_modelopt_spec( ), self_attn_bda=get_bias_dropout_add, pre_mlp_layernorm=TENorm, - mlp=ModuleSpec( - module=MLP, - submodules=MLPSubmodules( - linear_fc1=ColumnParallelLinear, linear_fc2=RowParallelLinear - ), - ), + mlp=mlp, mlp_bda=get_bias_dropout_add, # Map TE-layernorm-fusion keys back sharded_state_dict_keys_map=sharded_state_dict_keys_map, diff --git a/megatron/core/inference/text_generation_controllers/simple_text_generation_controller.py b/megatron/core/inference/text_generation_controllers/simple_text_generation_controller.py index 0667af837..110308993 100644 --- a/megatron/core/inference/text_generation_controllers/simple_text_generation_controller.py +++ b/megatron/core/inference/text_generation_controllers/simple_text_generation_controller.py @@ -306,7 +306,7 @@ def generate_all_output_tokens_static_batch( context_length = context_end_position - context_start_position logits = broadcast_from_last_pipeline_stage( [batch_size, context_length, self.tokenizer.vocab_size], - dtype=torch.float32, + dtype=self.inference_wrapped_model.inference_wrapper_config.params_dtype, tensor=logits, ) diff --git a/megatron/core/jit.py b/megatron/core/jit.py index 8bb18d393..5b1dfff3e 100644 --- a/megatron/core/jit.py +++ b/megatron/core/jit.py @@ -2,10 +2,9 @@ import torch -TORCH_MAJOR = int(torch.__version__.split(".")[0]) -TORCH_MINOR = int(torch.__version__.split(".")[1]) +from megatron.core.utils import is_torch_min_version jit_fuser = torch.jit.script # nvFuser is deprecated in PyTorch JIT starting from 2.2 -if (TORCH_MAJOR > 2) or (TORCH_MAJOR == 2 and TORCH_MINOR >= 2): +if is_torch_min_version("2.2.0a0"): jit_fuser = torch.compile diff --git a/megatron/core/model_parallel_config.py b/megatron/core/model_parallel_config.py index f2751673e..46a03f6d6 100644 --- a/megatron/core/model_parallel_config.py +++ b/megatron/core/model_parallel_config.py @@ -39,14 +39,23 @@ class ModelParallelConfig: context_parallel_size: int = 1 """Splits network input along sequence dimension across GPU ranks.""" + hierarchical_context_parallel_sizes: Optional[list[int]] = None + """Degrees of the hierarchical context parallelism. Users should provide a list to specify + the sizes for different levels. Taking the a2a+p2p cp comm type as example, it contains + groups of two levels, so the first value of the list indicates the group size of the a2a + communication type, and the second value indicates the group size of the p2p communication + type. + """ + expert_model_parallel_size: int = 1 """Distributes Moe Experts across sub data parallel dimension.""" + expert_tensor_parallel_size: Optional[int] = None + """Intra-layer tensor model parallelsm for expert layer. Splits tensors across GPU ranks.""" + moe_extended_tp: bool = False - """Alternative parallelization strategy for expert parallelism. Instead of distributing experts - across expert_model_parallel_size, each expert is sharded along extendended tensor parallel - domain (tensor_model_paralle_size * expert_model_parallel_size). It avoids the load balancing - problem with MOE training. + """NOTE: Deprecated from MCore v0.10. This flag is ignored. + Its functionality is replaced by expert_tensor_parallel_size. """ ################### @@ -75,33 +84,33 @@ class ModelParallelConfig: params_dtype: torch.dtype = torch.float32 """dtype used when intializing the weights.""" - timers: Callable = None + timers: Optional[Callable] = None """Timers object to call for various timing functions. See megatron.core.timers.Timers""" - finalize_model_grads_func: Callable = None + finalize_model_grads_func: Optional[Callable] = None """Function that finalizes gradients on all workers. Could include ensuring that grads are all-reduced across data parallelism, pipeline parallelism, and sequence parallelism dimensions. """ - grad_scale_func: Callable = None + grad_scale_func: Optional[Callable] = None """If using loss scaling, this function should take the loss and return the scaled loss. If None, no function is called on the loss. """ - no_sync_func: Callable = None + no_sync_func: Optional[Callable] = None """Function that creates a context that suppresses asynchronous data-parallel communication. If the model is an instance of core.distributed.DistributedDataParallel, the default is to use core.distributed.DistributedDataParallel.no_sync. """ - grad_sync_func: Callable = None + grad_sync_func: Optional[Callable] = None """Function that launches asynchronous gradient reductions (e.g. distributed optimizer gradient reduce-scatters). The function should take one argument: an iterable of parameters whose gradients are to be synchronized. """ - param_sync_func: Callable = None + param_sync_func: Optional[Callable] = None """Function that launches asynchronous parameter synchronizations (e.g. distributed optimizer parameter all-gathers). The function should take one argument: an iterable of parameters to be synchronized. @@ -114,7 +123,7 @@ class ModelParallelConfig: enable_autocast: bool = False """If true runs the forward step function inside torch.autocast context.""" - autocast_dtype: torch.dtype = None + autocast_dtype: Optional[torch.dtype] = None """dtype to pass to torch.amp.autocast when enabled. If None, is set to pipeline_dtype.""" num_microbatches_with_partial_activation_checkpoints: Optional[int] = None @@ -272,6 +281,27 @@ class ModelParallelConfig: encoder and decoder (e.g., T5). Ignored if None. """ + overlap_p2p_comm_warmup_flush: bool = False + """If true, overlap communication and computation in warm up and flush phase. + Only valid when overlap_p2p_comm is True and batch_p2p_comm is False. + Defaults to False. + """ + + microbatch_group_size_per_vp_stage: Optional[int] = None + """This value specifies the number of micro-batches that are executed + at a time for a given virtual stage (both forward and backward). + Default (in __post_init__() method below) to pipeline_parallel_size + which specifies a depth-first schedule. + Example: for PP=2 VP=2, when microbatch_group_size_per_vp_stage=2, + num_microbatches = 4, we have + rank 0 | 0 1 0 1 2 3 2 3 + rank 1 | 0 1 0 1 2 3 2 3 + When microbatch_group_size_per_vp_stage=3, num_microbatches = 5, + we have + rank 0 | 0 1 2 0 1 2 3 4 3 4 + rank 1 | 0 1 2 0 1 2 3 4 3 4 + """ + ################### # CPU Offloading ################### @@ -281,7 +311,7 @@ class ModelParallelConfig: cpu_offloading_num_layers: int = 0 """Tells the number of transformer layers for which activations has to be offloaded.""" - _cpu_offloading_context: ContextManager = ( + _cpu_offloading_context: Optional[ContextManager] = ( None # Used for internal use only, not to be set by a user. # TODO: Need to move to the 'right' place when possible. @@ -312,6 +342,9 @@ def __post_init__(self): if self.tensor_model_parallel_size <= 1: raise ValueError("Can not use sequence paralllelism without tensor parallelism") + if self.expert_tensor_parallel_size is None: + self.expert_tensor_parallel_size = self.tensor_model_parallel_size + if self.pipeline_model_parallel_size > 1: if self.pipeline_dtype is None: raise ValueError( @@ -339,6 +372,16 @@ def __post_init__(self): if self.expert_model_parallel_size > 1 and self.tensor_model_parallel_size > 1: if self.sequence_parallel is False: raise ValueError( - "When using expert parallelism and tensor parallelism, sequence parallelism " - "must be used" + "When using expert parallelism and tensor parallelism, " + "sequence parallelism must be used" + ) + + if self.microbatch_group_size_per_vp_stage is None: + self.microbatch_group_size_per_vp_stage = self.pipeline_model_parallel_size + + if self.overlap_p2p_comm_warmup_flush: + if not self.overlap_p2p_comm or self.batch_p2p_comm: + raise ValueError( + "Pipeline parallel communication overlapping in warmup and flush is only " + "compatible with overlap_p2p_comm but not batch_p2p_comm." ) diff --git a/megatron/core/models/T5/t5_model.py b/megatron/core/models/T5/t5_model.py index bce998c6e..462fbfc69 100644 --- a/megatron/core/models/T5/t5_model.py +++ b/megatron/core/models/T5/t5_model.py @@ -1,16 +1,18 @@ # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -from typing import List, Literal, Optional +from typing import List, Literal, Optional, Tuple import torch from torch import Tensor -from megatron.core import InferenceParams, tensor_parallel +from megatron.core import InferenceParams, parallel_state, tensor_parallel from megatron.core.config_logger import has_config_logger_enabled, log_config_to_disk +from megatron.core.dist_checkpointing.mapping import ShardedStateDict +from megatron.core.enums import ModelType from megatron.core.models.common.embeddings.language_model_embedding import LanguageModelEmbedding from megatron.core.models.common.embeddings.rotary_pos_embedding import RotaryEmbedding from megatron.core.models.common.language_module.language_module import LanguageModule -from megatron.core.transformer.enums import ModelType +from megatron.core.packed_seq_params import PackedSeqParams from megatron.core.transformer.module import MegatronModule from megatron.core.transformer.spec_utils import ModuleSpec from megatron.core.transformer.transformer_block import TransformerBlock @@ -176,7 +178,10 @@ def __init__( max_sequence_length=self.max_sequence_length, position_embedding_type=self.position_embedding_type, ) - self.position_embeddings = self.embedding.position_embeddings + if position_embedding_type == "learned_absolute": + self.position_embeddings = self.embedding.position_embeddings + else: + self.position_embeddings = None # Rotary Position Embeddings if self.position_embedding_type == 'rope': @@ -239,6 +244,7 @@ def forward( encoder_hidden_states: Tensor = None, output_encoder_hidden_only: bool = False, inference_params: InferenceParams = None, + packed_seq_params: PackedSeqParams = None, ) -> Tensor: """Forward pass. @@ -255,12 +261,6 @@ def forward( Tensor: loss tensor """ - (encoder_attn_mask, decoder_attn_mask, encoder_decoder_attn_mask) = ( - t5_extended_attention_mask( - [encoder_attn_mask, decoder_attn_mask, encoder_decoder_attn_mask] - ) - ) - ## Encoder forward if encoder_hidden_states is None: @@ -280,7 +280,7 @@ def forward( rotary_pos_emb = None if self.position_embedding_type == 'rope': rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( - inference_params, self.encoder, encoder_input, self.config + inference_params, self.encoder, encoder_input, self.config, packed_seq_params ) rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len) @@ -315,7 +315,7 @@ def forward( rotary_pos_emb = None if self.position_embedding_type == 'rope': rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( - inference_params, self.decoder, decoder_input, self.config + inference_params, self.encoder, encoder_input, self.config, packed_seq_params ) rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len) @@ -382,6 +382,34 @@ def shared_embedding_or_output_weight(self) -> Tensor: return self.lm_head.output_layer.weight return None + def sharded_state_dict( + self, + prefix: str = '', + sharded_offsets: Tuple[Tuple[int, int, int]] = (), + metadata: Optional[dict] = None, + ) -> ShardedStateDict: + """Sharded state dict implementation handling duplication of encoder and decoder layers. + + Some layers (output, embedding) are shared between the encoder and decoder. + This method sets the replica_id for them to ensure there is only one + layer instance with replica_id (0, 0, 0). + + Args: + prefix (str): Module name prefix. + sharded_offsets (tuple): PP related offsets, expected to be empty at this module level. + metadata (Optional[Dict]): metadata controlling sharded state dict creation. + + Returns: + ShardedStateDict: sharded state dict for the T5Model + """ + sharded_sd = super().sharded_state_dict(prefix, sharded_offsets, metadata) + if not parallel_state.is_inside_encoder(): + for k, sh_ten in sharded_sd.items(): + if not k.startswith(f'{prefix}decoder'): + # Bump replica_id of all the layers shared with the encoder (output, embedding) + sh_ten.replica_id = (sh_ten.replica_id[0] + 1, *sh_ten.replica_id[1:]) + return sharded_sd + def t5_extended_attention_mask(attention_mask_list: List[Tensor]) -> List[Tensor]: """Creates the extended attention mask diff --git a/megatron/core/models/T5/t5_spec.py b/megatron/core/models/T5/t5_spec.py index ecdcdbc26..8370b07df 100644 --- a/megatron/core/models/T5/t5_spec.py +++ b/megatron/core/models/T5/t5_spec.py @@ -38,10 +38,10 @@ except ImportError: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm def encoder_model_with_transformer_engine_default_spec() -> ModuleSpec: @@ -52,7 +52,7 @@ def encoder_model_with_transformer_engine_default_spec() -> ModuleSpec: submodules=TransformerLayerSubmodules( self_attention=ModuleSpec( module=SelfAttention, - params={"attn_mask_type": AttnMaskType.arbitrary}, + params={"attn_mask_type": AttnMaskType.padding}, submodules=SelfAttentionSubmodules( linear_qkv=TELayerNormColumnParallelLinear, core_attention=TEDotProductAttention, @@ -94,7 +94,7 @@ def decoder_model_with_transformer_engine_default_spec() -> ModuleSpec: pre_cross_attn_layernorm=TENorm, cross_attention=ModuleSpec( module=CrossAttention, - params={"attn_mask_type": AttnMaskType.arbitrary}, + params={"attn_mask_type": AttnMaskType.padding}, submodules=CrossAttentionSubmodules( linear_q=TEColumnParallelLinear, linear_kv=TEColumnParallelLinear, diff --git a/megatron/core/models/bert/bert_layer_specs.py b/megatron/core/models/bert/bert_layer_specs.py index cd51c124c..80893d54a 100644 --- a/megatron/core/models/bert/bert_layer_specs.py +++ b/megatron/core/models/bert/bert_layer_specs.py @@ -30,10 +30,10 @@ except ImportError: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm # Use this spec to use lower level Transformer Engine modules (required for fp8 training) bert_layer_with_transformer_engine_spec = ModuleSpec( diff --git a/megatron/core/models/bert/bert_lm_head.py b/megatron/core/models/bert/bert_lm_head.py index fd26ebd16..9002eab97 100644 --- a/megatron/core/models/bert/bert_lm_head.py +++ b/megatron/core/models/bert/bert_lm_head.py @@ -2,24 +2,18 @@ import torch from torch import Tensor +from megatron.core.fusions.fused_layer_norm import HAVE_FUSED_LAYER_NORM, FusedLayerNorm from megatron.core.transformer.module import MegatronModule from megatron.core.transformer.transformer_config import TransformerConfig from megatron.core.transformer.utils import get_linear_layer -try: - import apex - - from megatron.core.fusions.fused_layer_norm import FusedLayerNorm - - HAVE_APEX = True +if HAVE_FUSED_LAYER_NORM: LNImpl = FusedLayerNorm -except ImportError: +else: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm - - warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + from megatron.core.transformer.torch_norm import WrappedTorchNorm as LNImpl class BertLMHead(MegatronModule): @@ -48,6 +42,8 @@ def __init__(self, hidden_size: int, config: TransformerConfig): self.gelu = torch.nn.functional.gelu def forward(self, hidden_states: Tensor) -> Tensor: + """forward pass""" + hidden_states = self.dense(hidden_states) hidden_states = self.gelu(hidden_states) hidden_states = self.layer_norm(hidden_states) diff --git a/megatron/core/models/common/embeddings/language_model_embedding.py b/megatron/core/models/common/embeddings/language_model_embedding.py index bc1a2de9c..2c7fec656 100644 --- a/megatron/core/models/common/embeddings/language_model_embedding.py +++ b/megatron/core/models/common/embeddings/language_model_embedding.py @@ -20,7 +20,9 @@ class LanguageModelEmbedding(MegatronModule): is used for positional embedding add_position_embedding (bool): Add a position embedding. embedding_dropout_prob (float): dropout probability for embeddings - num_tokentypes (int): Set to 0 without binary head, and 2 with a binary head . Defaults to 0. + num_tokentypes (int): Set to 0 without binary head, and 2 with a binary head. Defaults to 0. + scatter_to_sequence_parallel (bool): Set to False to disable scatter of embedding + across sequence parallel region. Defaults to True. """ def __init__( @@ -30,6 +32,7 @@ def __init__( max_sequence_length: int, position_embedding_type: Literal['learned_absolute', 'rope', 'none'] = 'learned_absolute', num_tokentypes: int = 0, + scatter_to_sequence_parallel: bool = True, ): super().__init__(config=config) @@ -38,10 +41,12 @@ def __init__( self.max_sequence_length: int = max_sequence_length self.add_position_embedding: bool = position_embedding_type == 'learned_absolute' self.num_tokentypes = num_tokentypes + self.scatter_to_sequence_parallel = scatter_to_sequence_parallel self.reduce_scatter_embeddings = ( (not self.add_position_embedding) and self.num_tokentypes <= 0 and self.config.sequence_parallel + and self.scatter_to_sequence_parallel ) # Word embeddings (parallel). @@ -92,7 +97,8 @@ def forward(self, input_ids: Tensor, position_ids: Tensor, tokentype_ids: int = Args: input_ids (Tensor): The input tokens position_ids (Tensor): The position id's used to calculate position embeddings - tokentype_ids (int): The token type ids. Used when args.bert_binary_head is set to True. Defaults to None + tokentype_ids (int): The token type ids. Used when args.bert_binary_head is + set to True. Defaults to None Returns: Tensor: The output embeddings @@ -122,12 +128,12 @@ def forward(self, input_ids: Tensor, position_ids: Tensor, tokentype_ids: int = # Dropout. if self.config.sequence_parallel: - if not self.reduce_scatter_embeddings: + if not self.reduce_scatter_embeddings and self.scatter_to_sequence_parallel: embeddings = tensor_parallel.scatter_to_sequence_parallel_region(embeddings) # `scatter_to_sequence_parallel_region` returns a view, which prevents # the original tensor from being garbage collected. Clone to facilitate GC. # Has a small runtime cost (~0.5%). - if self.config.clone_scatter_output_in_embedding: + if self.config.clone_scatter_output_in_embedding and self.scatter_to_sequence_parallel: embeddings = embeddings.clone() with tensor_parallel.get_cuda_rng_tracker().fork(): embeddings = self.embedding_dropout(embeddings) diff --git a/megatron/core/models/common/embeddings/rope_utils.py b/megatron/core/models/common/embeddings/rope_utils.py index accb25196..f1d7ad48d 100644 --- a/megatron/core/models/common/embeddings/rope_utils.py +++ b/megatron/core/models/common/embeddings/rope_utils.py @@ -13,18 +13,36 @@ from torch import Tensor from megatron.core import parallel_state +from megatron.core.utils import is_te_min_version logger = logging.getLogger(__name__) try: - from apex.transformer.functional import ( + from megatron.core.extensions.transformer_engine import ( fused_apply_rotary_pos_emb, fused_apply_rotary_pos_emb_thd, ) HAVE_APPLY_ROPE_FUSION = True except ImportError: - HAVE_APPLY_ROPE_FUSION = False + try: + from apex.transformer.functional import ( + fused_apply_rotary_pos_emb, + fused_apply_rotary_pos_emb_thd, + ) + + HAVE_APPLY_ROPE_FUSION = True + except ImportError: + HAVE_APPLY_ROPE_FUSION = False + + +try: + from flash_attn.layers.rotary import apply_rotary_emb as apply_rotary_emb_flash +except ImportError: + apply_rotary_emb_flash = None + + +__all__ = ['apply_rotary_emb_flash'] def get_pos_emb_on_this_cp_rank(pos_emb: Tensor, seq_dim: int) -> Tensor: @@ -103,6 +121,20 @@ def _apply_rotary_pos_emb_bshd( return torch.cat((t, t_pass), dim=-1) +def _get_thd_freqs_on_this_cp_rank(cp_rank: int, cp_size: int, x: Tensor, freqs: Tensor) -> Tensor: + if cp_size > 1: + cp_seg = x.size(0) // 2 + full_seqlen = cp_size * x.size(0) + return torch.cat( + [ + freqs[cp_rank * cp_seg : (cp_rank + 1) * cp_seg], + freqs[full_seqlen - (cp_rank + 1) * cp_seg : full_seqlen - cp_rank * cp_seg], + ] + ) + else: + return freqs[: x.size(0)] + + def _apply_rotary_pos_emb_thd( t: Tensor, cu_seqlens: Tensor, @@ -123,12 +155,16 @@ def _apply_rotary_pos_emb_thd( Tensor: Shape [t, h, d]. The input tensor after applying RoPE. """ + cp_size = parallel_state.get_context_parallel_world_size() + cp_rank = parallel_state.get_context_parallel_rank() + cu_seqlens = cu_seqlens // cp_size seqlens = (cu_seqlens[1:] - cu_seqlens[:-1]).tolist() + return torch.cat( [ _apply_rotary_pos_emb_bshd( x.unsqueeze(1), - freqs[: x.size(0)], + _get_thd_freqs_on_this_cp_rank(cp_rank, cp_size, x, freqs), rotary_interleaved=rotary_interleaved, multi_latent_attention=multi_latent_attention, mscale=mscale, @@ -149,28 +185,24 @@ def apply_rotary_pos_emb( Reroute to the appropriate apply_rotary_pos_emb function depending on fused/unfused kernels, or bshd (conventional) / thd (packed seq) format """ - if config.apply_rope_fusion and not HAVE_APPLY_ROPE_FUSION: - # setting apply_rope_fusion in config to False - # so that subsequent queries to this config also return False - config.apply_rope_fusion = False - if not getattr(apply_rotary_pos_emb, "printed_fused_warning", False): - logger.warning( - "Setting apply_rope_fusion to false because its implementation" - " is not included in Apex. Try upgrading to the latest version" - ) - apply_rotary_pos_emb.printed_fused_warning = True - - if getattr(config, "multi_latent_attention", False) and config.rotary_interleaved: - logger.warning( - "rotary_interleaved is not supported with multi_latent_attention, setting it to False" - ) - config.rotary_interleaved = False if config.apply_rope_fusion: if cu_seqlens is None: - return fused_apply_rotary_pos_emb(t, freqs, transpose_output_memory=True) + return fused_apply_rotary_pos_emb(t, freqs) else: - return fused_apply_rotary_pos_emb_thd(t, cu_seqlens, freqs) + cp_size = parallel_state.get_context_parallel_world_size() + if cp_size > 1: + if not is_te_min_version("1.11.0", check_equality=False): + raise ValueError("Only TE >= 1.12 supports RoPE fusion for THD format with CP.") + return fused_apply_rotary_pos_emb_thd( + t, + cu_seqlens, + freqs, + cp_size=cp_size, + cp_rank=parallel_state.get_context_parallel_rank(), + ) + else: + return fused_apply_rotary_pos_emb_thd(t, cu_seqlens, freqs) else: if cu_seqlens is None: return _apply_rotary_pos_emb_bshd( @@ -189,3 +221,38 @@ def apply_rotary_pos_emb( multi_latent_attention=config.multi_latent_attention, mscale=mscale, ) + + +def apply_rotary_pos_emb_with_cos_sin( + t: Tensor, cos: Tensor, sin: Tensor, rotary_interleaved: bool = False +) -> Tensor: + """ + This function applies rotary positional embedding to the target tensor t + using precomputed cos and sin of size (seq_len, d_rot / 2) + """ + cos = cos.to(t.dtype) + sin = sin.to(t.dtype) + + if apply_rotary_emb_flash is None: + # Combine cos and sin into freqs + freqs = torch.stack([cos, sin], dim=-1).flatten(start_dim=-2) + + # Expand freqs to match t's shape + while freqs.dim() < t.dim(): + freqs = freqs.unsqueeze(1) + freqs = freqs.expand(t.shape[:-1] + (-1,)) + + y = _apply_rotary_pos_emb_bshd( + t, + freqs, + rotary_interleaved=rotary_interleaved, + multi_latent_attention=False, + mscale=1.0, + ) + else: + # Use Flash Attention's optimized kernel for rotary embedding + t = t.permute(1, 0, 2, 3) + y = apply_rotary_emb_flash(t, cos, sin, rotary_interleaved) + y = y.permute(1, 0, 2, 3) + + return y diff --git a/megatron/core/models/common/embeddings/rotary_pos_embedding.py b/megatron/core/models/common/embeddings/rotary_pos_embedding.py index 5232faec6..c2837c6fa 100644 --- a/megatron/core/models/common/embeddings/rotary_pos_embedding.py +++ b/megatron/core/models/common/embeddings/rotary_pos_embedding.py @@ -7,9 +7,12 @@ if TYPE_CHECKING: from megatron.core.transformer.transformer_config import TransformerConfig from megatron.core.transformer.transformer_block import TransformerBlock + from megatron.core.inference_params import InferenceParams + from megatron.core.packed_seq_params import PackedSeqParams import logging import math +from functools import lru_cache import torch from torch import Tensor, nn @@ -109,12 +112,37 @@ def _apply_scaling( return inv_freq_llama - def forward(self, max_seq_len: int, offset: int = 0) -> Tensor: + def get_freqs_non_repeated(self, max_seq_len: int, offset: int = 0) -> Tensor: + """Generates matrix of frequencies based on positions in the sequence, + used to create positional encodings""" + seq = ( + torch.arange(max_seq_len, device=self.inv_freq.device, dtype=self.inv_freq.dtype) + + offset + ) + + if self.seq_len_interpolation_factor is not None: + seq *= 1 / self.seq_len_interpolation_factor + + freqs = torch.outer(seq, self.inv_freq) # [seq len, dim] + + return freqs + + def get_cos_sin(self, max_seq_len: int, offset: int = 0) -> (Tensor, Tensor): + """Cosine and sine values for RoPE are precomputed for all positions up to the maximum + sequence length""" + freqs = self.get_freqs_non_repeated(max_seq_len, offset) + cos = torch.cos(freqs) + sin = torch.sin(freqs) + return cos, sin + + @lru_cache(maxsize=32) + def forward(self, max_seq_len: int, offset: int = 0, packed_seq: bool = False) -> Tensor: """Forward pass of RoPE embedding. Args: max_seq_len (int): Maximum size of sequence - offset (int, optional): _description_. Defaults to 0. + offset (int, optional): RoPE offset. Defaults to 0. + packed_seq (bool, optional): Whether to use packed sequence. Defaults to False. Returns: Tensor: Embeddings after applying RoPE. @@ -122,15 +150,8 @@ def forward(self, max_seq_len: int, offset: int = 0) -> Tensor: if self.inv_freq.device.type == 'cpu': # move `inv_freq` to GPU once at the first micro-batch forward pass self.inv_freq = self.inv_freq.to(device=torch.cuda.current_device()) - seq = ( - torch.arange(max_seq_len, device=self.inv_freq.device, dtype=self.inv_freq.dtype) - + offset - ) - - if self.seq_len_interpolation_factor is not None: - seq *= 1 / self.seq_len_interpolation_factor - freqs = torch.outer(seq, self.inv_freq) + freqs = self.get_freqs_non_repeated(max_seq_len, offset) # first part even vector components, second part odd vector components, # 2 * dim in dimension size if not self.rotary_interleaved: @@ -141,7 +162,7 @@ def forward(self, max_seq_len: int, offset: int = 0) -> Tensor: ) # emb [seq_length, .., dim] emb = emb[:, None, None, :] - if parallel_state.get_context_parallel_world_size() > 1: + if parallel_state.get_context_parallel_world_size() > 1 and not packed_seq: # slice rotary_pos_emb along sequence dimension and select the parition of the current # CP rank emb = get_pos_emb_on_this_cp_rank(emb, 0) @@ -153,10 +174,11 @@ def _load_from_state_dict(self, state_dict, prefix, *args, **kwargs): def get_rotary_seq_len( self, - inference_params, + inference_params: InferenceParams, transformer: TransformerBlock, transformer_input: Tensor, transformer_config: TransformerConfig, + packed_seq_params: PackedSeqParams, ) -> float: """Function to get the rotary sequence length. @@ -166,11 +188,16 @@ def get_rotary_seq_len( by the model transformer_input (Tensor): Input tensor to the transformer transformer_config (TransformerConfig): Transformer config used by the model + packed_seq_params (PackedSeqParams): Packed sequence params Returns: float: The rotary sequence length """ - if inference_params is not None: + if packed_seq_params is not None: + # max_seqlen are the max sequence length in the packed sequence before being divived + # by the tp and cp size. + return max(packed_seq_params.max_seqlen_q, packed_seq_params.max_seqlen_kv) + elif inference_params is not None: rotary_seq_len = inference_params.max_sequence_length else: if transformer.input_tensor is not None: diff --git a/megatron/core/models/common/embeddings/yarn_rotary_pos_embedding.py b/megatron/core/models/common/embeddings/yarn_rotary_pos_embedding.py index 14d147ea3..3ab155dcd 100644 --- a/megatron/core/models/common/embeddings/yarn_rotary_pos_embedding.py +++ b/megatron/core/models/common/embeddings/yarn_rotary_pos_embedding.py @@ -4,6 +4,7 @@ import logging import math +from functools import lru_cache import torch from torch import Tensor @@ -82,8 +83,17 @@ def __init__( use_cpu_initialization, ) + @lru_cache(maxsize=32) def forward(self, max_seq_len: int, offset: int = 0) -> Tensor: + """Forward pass of Yarn Rotary Embedding. + Args: + max_seq_len (int): Maximum size of sequence + offset (int, optional): RoPE offset. Defaults to 0. + + Returns: + Tensor: Embeddings after applying Yarn RoPE. + """ assert ( not self.rotary_interleaved ), "Yarn RoPE does not support interleaved rotary embeddings" diff --git a/megatron/core/models/gpt/gpt_layer_specs.py b/megatron/core/models/gpt/gpt_layer_specs.py index 1db68dc88..374161757 100755 --- a/megatron/core/models/gpt/gpt_layer_specs.py +++ b/megatron/core/models/gpt/gpt_layer_specs.py @@ -17,6 +17,7 @@ ) from megatron.core.transformer.spec_utils import ModuleSpec from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules +from megatron.core.utils import is_te_min_version try: from megatron.core.extensions.transformer_engine import ( @@ -43,10 +44,10 @@ except ImportError: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - warnings.warn('Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn('Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm def get_gpt_layer_with_transformer_engine_spec( @@ -99,6 +100,12 @@ def get_gpt_layer_with_transformer_engine_spec( ), ) else: + + # TENorm significantly harms convergence when used + # for QKLayerNorm if TE Version < 1.9; + # we instead use the Apex implementation. + qk_norm = TENorm if is_te_min_version("1.9.0") else FusedLayerNorm + return ModuleSpec( module=TransformerLayer, submodules=TransformerLayerSubmodules( @@ -109,10 +116,8 @@ def get_gpt_layer_with_transformer_engine_spec( linear_qkv=TELayerNormColumnParallelLinear, core_attention=TEDotProductAttention, linear_proj=TERowParallelLinear, - # TENorm significantly harms convergence when used - # for QKLayerNorm; we instead use the Apex implementation. - q_layernorm=FusedLayerNorm if qk_layernorm else IdentityOp, - k_layernorm=FusedLayerNorm if qk_layernorm else IdentityOp, + q_layernorm=qk_norm if qk_layernorm else IdentityOp, + k_layernorm=qk_norm if qk_layernorm else IdentityOp, ), ), self_attn_bda=get_bias_dropout_add, diff --git a/megatron/core/models/gpt/gpt_model.py b/megatron/core/models/gpt/gpt_model.py index bd52f8968..be8cdce11 100644 --- a/megatron/core/models/gpt/gpt_model.py +++ b/megatron/core/models/gpt/gpt_model.py @@ -50,6 +50,9 @@ class GPTModel(LanguageModule): Base period for rotary position embeddings. Ignored unless position_embedding_type is 'rope'. Defaults to 10000. + scatter_embedding_sequence_parallel (bool, optional): + Whether embeddings should be scattered across sequence parallel + region or not. Defaults to True. seq_len_interpolation_factor (Optional[float], optional): scale of linearly interpolating RoPE for longer sequences. The value must be a float larger than 1.0. Defaults to None. @@ -70,6 +73,7 @@ def __init__( rotary_percent: float = 1.0, rotary_base: int = 10000, rope_scaling: bool = False, + scatter_embedding_sequence_parallel: bool = True, seq_len_interpolation_factor: Optional[float] = None, ) -> None: super().__init__(config=config) @@ -103,6 +107,7 @@ def __init__( vocab_size=self.vocab_size, max_sequence_length=self.max_sequence_length, position_embedding_type=position_embedding_type, + scatter_to_sequence_parallel=scatter_embedding_sequence_parallel, ) if self.position_embedding_type == 'rope' and not self.config.multi_latent_attention: @@ -216,11 +221,23 @@ def forward( # Rotary positional embeddings (embedding is None for PP intermediate devices) rotary_pos_emb = None + rotary_pos_cos = None + rotary_pos_sin = None if self.position_embedding_type == 'rope' and not self.config.multi_latent_attention: - rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( - inference_params, self.decoder, decoder_input, self.config - ) - rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len) + if not self.training and self.config.flash_decode: + # Flash decoding uses precomputed cos and sin for RoPE + rotary_pos_cos, rotary_pos_sin = self.rotary_pos_emb.get_cos_sin( + inference_params.max_sequence_length + ) + else: + rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( + inference_params, self.decoder, decoder_input, self.config, packed_seq_params + ) + rotary_pos_emb = self.rotary_pos_emb( + rotary_seq_len, + packed_seq=packed_seq_params is not None + and packed_seq_params.qkv_format == 'thd', + ) # Run decoder. hidden_states = self.decoder( @@ -228,6 +245,8 @@ def forward( attention_mask=attention_mask, inference_params=inference_params, rotary_pos_emb=rotary_pos_emb, + rotary_pos_cos=rotary_pos_cos, + rotary_pos_sin=rotary_pos_sin, packed_seq_params=packed_seq_params, **(extra_block_kwargs or {}), ) diff --git a/megatron/core/models/multimodal/llava_model.py b/megatron/core/models/multimodal/llava_model.py index 074cfaae9..dafe37745 100644 --- a/megatron/core/models/multimodal/llava_model.py +++ b/megatron/core/models/multimodal/llava_model.py @@ -6,17 +6,36 @@ import torch -from megatron.core import InferenceParams +from megatron.core import InferenceParams, tensor_parallel from megatron.core.config_logger import has_config_logger_enabled, log_config_to_disk from megatron.core.models.gpt import GPTModel from megatron.core.models.vision.clip_vit_model import CLIPViTModel, get_num_image_embeddings from megatron.core.models.vision.multimodal_projector import MultimodalProjector +from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.parallel_state import get_context_parallel_group, get_context_parallel_world_size from megatron.core.transformer import MegatronModule from megatron.core.transformer.spec_utils import ModuleSpec from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.utils import log_single_rank + +try: + import transformer_engine # pylint: disable=unused-import + from transformer_engine.pytorch.distributed import gather_along_first_dim + + from megatron.core.extensions.transformer_engine import TEDotProductAttention + from megatron.core.utils import is_te_min_version + + HAVE_TE = True +except: + HAVE_TE = False + if get_context_parallel_world_size() > 1: + raise RuntimeError("ContextParallelism requires TransformerEngine support, but not found.") + -IMAGE_TOKEN_INDEX = -200 # ID for images in the input sequence. IGNORE_INDEX = -100 # ID for labels that should be ignored. +# Image token index can be tokenizer dependent so the default value does not work in all cases. +DEFAULT_IMAGE_TOKEN_INDEX = -200 +IMAGE_TOKEN = "" # Note: This is under development and may be missing features. @@ -50,6 +69,10 @@ class LLaVAModel(MegatronModule): img_w (int): Input image width. patch_dim (int): The size of each image patch side. language_rotary_base (int): RoPE base. + language_rope_scaling (bool): Toggle RoPE scaling. + image_token_index (int): Token ID for image token such as . + pixel_shuffle (bool): Enable pixel shuffle. + tile_tags (list): Optional tile tags. """ def __init__( @@ -77,15 +100,19 @@ def __init__( patch_dim: int = 14, language_rotary_base: int = 10000, language_rope_scaling: bool = False, + image_token_index: int = DEFAULT_IMAGE_TOKEN_INDEX, + pixel_shuffle: bool = False, + tile_tags: Optional[list] = None, ) -> None: super().__init__(config=language_transformer_config) if has_config_logger_enabled(language_transformer_config): log_config_to_disk(language_transformer_config, locals(), prefix=type(self).__name__) - logging.getLogger(__name__).warning( - "LLaVA model is under active development. " - "It may be missing features and its methods may change." + log_single_rank( + logging.getLogger(__name__), + logging.WARNING, + "LLaVA is work in progress. Features are missing and methods can change.", ) self.pre_process = pre_process @@ -98,6 +125,21 @@ def __init__( self.vision_projection = None self.language_model = None + self.sequence_parallel_lm = language_transformer_config.sequence_parallel + self.tp_comm_overlap_lm = language_transformer_config.tp_comm_overlap + self.context_parallel_lm = language_transformer_config.context_parallel_size + if self.sequence_parallel_lm or self.context_parallel_lm > 1: + assert ( + language_transformer_layer_spec.submodules.self_attention.submodules.core_attention + == TEDotProductAttention + and HAVE_TE + ), "Sequence/Context Parallelism is supported only with TE DotProductAttention." + if self.context_parallel_lm > 1: + assert is_te_min_version( + "1.10.0" + ), "Context Parallelism in LLaVA requires TE v1.10 or higher" + self.tensor_model_parallel_size_lm = language_transformer_config.tensor_model_parallel_size + # This attribute is needed to check if an all-reduce is required # on the word embeddings inside `finalize_model_grads._allreduce_word_embedding_grads`. self.share_embeddings_and_output_weights = False @@ -114,6 +156,7 @@ def __init__( post_process=self.post_process, rotary_base=language_rotary_base, rope_scaling=language_rope_scaling, + scatter_embedding_sequence_parallel=False, ) self.share_embeddings_and_output_weights = ( self.language_model.share_embeddings_and_output_weights @@ -145,12 +188,16 @@ def __init__( model_subtype=vision_transformer_config.vision_model_type, add_class_token=add_class_token, ) + + vision_projection_input_size = vision_transformer_config.hidden_size + vision_projection_input_size *= 4 if pixel_shuffle else 1 + # Map (intermediate) vision model outputs to the language model input dimension. self.vision_projection = MultimodalProjector( vision_projection_config, vision_projection_layer_spec, vision_projection_type, - vision_transformer_config.hidden_size, # input size to the projection. + vision_projection_input_size, ) # Ignore missing weights for the vision projection during checkpoint loading. # This should be disabled by default but can be enabled if your checkpoint contains @@ -172,8 +219,14 @@ def __init__( vision_transformer_config.vision_model_type, drop_vision_class_token, class_token_len, + pixel_shuffle, + tile_tags is not None, # Tile tags enabled/disabled. ) + self.image_token_index = image_token_index + self._pixel_shuffle = pixel_shuffle + self._tile_tags = tile_tags + def shared_embedding_or_output_weight(self): """This is a convenience method to surface the language model's word embeddings, which is necessary for `finalize_model_grads._allreduce_word_embedding_grads`.""" @@ -230,8 +283,10 @@ def _preprocess_data( loss_mask, labels, use_inference_kv_cache, + inference_params, image_token_index, num_image_tiles, + image_token_mask=None, ): """Preprocess input data before input to language model. @@ -273,7 +328,7 @@ def _preprocess_data( # No pre- or postprocessing needed. # With pipeline parallel > 2, this means a chunk in the middle of the model. if not self.pre_process and not self.post_process: - return language_embeddings, loss_mask, labels + return None, None, None # If using the inference KV cache, the image tokens are already computed. if use_inference_kv_cache: @@ -281,6 +336,9 @@ def _preprocess_data( img_seq_len = self._img_seq_len batch_size, text_seq_len = input_ids.shape + # input_ids seq len is expected to be sharded by CP size + if self.context_parallel_lm: + text_seq_len *= self.context_parallel_lm has_labels = labels is not None if has_labels: @@ -290,7 +348,12 @@ def _preprocess_data( # Create indices for new text and label positions. with torch.no_grad(): - image_token_mask = input_ids == image_token_index + if image_token_mask is None: + assert ( + self.context_parallel_lm <= 1 + ), "image_token_mask cannot be inferred from input_ids if using \ + Context Parallelism. Please provide in forward_step" + image_token_mask = input_ids == image_token_index num_images_per_sample = torch.sum(image_token_mask, dim=-1) # Number of tiles per sample. @@ -308,10 +371,11 @@ def _preprocess_data( if ( self._language_is_pipeline_parallel and max_seq_len < self._language_max_sequence_length + and inference_params is None ): max_seq_len = self._language_max_sequence_length - batch_indices, non_image_indices = torch.where(input_ids != image_token_index) + batch_indices, non_image_indices = torch.where(image_token_mask != True) # New position ids for the text tokens, shifted by the image sequence length. # E.g. for input_ids = [-200, 1, 2, 3] and img_seq_len = 576, we get @@ -375,7 +439,7 @@ def _preprocess_data( # Create the final labels and loss mask (if this is the last language model stage). final_labels, final_loss_mask = None, None - if has_labels: + if self.post_process and has_labels: final_labels = torch.full( (batch_size, max_seq_len), IGNORE_INDEX, dtype=labels.dtype, device=labels.device ) @@ -415,29 +479,237 @@ def _preprocess_data( final_loss_mask[valid_batch_image_indices, valid_before_image_indices] = 0 - if final_embedding is not None and has_labels: + if final_embedding is not None and final_labels is not None: assert ( final_embedding.shape[:2] == final_labels.shape == final_loss_mask.shape ), "unexpected shapes after data preprocessing" if final_embedding is not None: - final_embedding = final_embedding.transpose(1, 0).contiguous() - - # Truncate if exceeding the language model's max sequence length. - truncate_embedding = ( - final_embedding is not None - and final_embedding.shape[0] > self._language_max_sequence_length + # Truncate if exceeding the language model's max sequence length. + if final_embedding.shape[1] > self._language_max_sequence_length: + final_embedding = final_embedding[:, : self._language_max_sequence_length] + # Transpose to [s,b,h] if not using CP because CP Sharding expects seq in dim=1 + if self.context_parallel_lm == 1: + final_embedding = final_embedding.transpose(1, 0).contiguous() + + truncate_labels = ( + final_labels is not None and final_labels.shape[1] > self._language_max_sequence_length ) - if truncate_embedding: - final_embedding = final_embedding[: self._language_max_sequence_length] - - truncate_labels = has_labels and final_labels.shape[1] > self._language_max_sequence_length if truncate_labels: final_labels = final_labels[:, : self._language_max_sequence_length] final_loss_mask = final_loss_mask[:, : self._language_max_sequence_length] return final_embedding, final_labels, final_loss_mask + def _process_embedding_token_parallel( + self, combined_embeddings, new_labels, new_loss_mask, packed_seq_params + ): + """Processes the input data for model parallelism support. + + When using sequence parallelism (SP) or context parallelism (CP), the sequence is sharded + across different GPUs. This function helps ensure that the sharding is done correctly by + 1. Calculates `padding_factor` which determines based on how many chunks we expect to shard + the sequence + 2. Calculates and pads the inputs to necessary length to ensure equal sized chunks + 3. Creates/Modifies PackedSeqParams which helps mask padded tokens during calculations + 4. Performs any layout changes if necessary + 5. Distributes the sequence across GPUs for SP and CP + + Context Parallelism is a feature that helps improve memory efficiency for + long sequence training by distributing sequence across CP ranks. + It requires token length to be divisible by (CP size *2) to ensure proper load balance. + Please refer to `get_batch_on_this_cp_rank` function for more details. + + Sequence Parallelism is a feature that helps improve memory efficiency for + long sequence training by distributing sequence across TP ranks. + It requires token length to be divisible by TP size. + + Returns: + combined_embeddings (torch.Tensor): image and text embeddings combined and distributed. + new_labels (torch.Tensor): Distributed labels for image and text positions. + new_loss_mask (torch.Tensor): Distributed loss mask. + packed_seq_params (PackedSeqParams): Dict with padded token information. + + """ + # combined_embeddings - `s,b,h` if not using CP, `b,s,h` if using CP + batch_size = ( + combined_embeddings.shape[0] + if self.context_parallel_lm > 1 + else combined_embeddings.shape[1] + ) + seq_dim = 1 if self.context_parallel_lm > 1 else 0 + + padding_mask_type = 'padding' in str( + self.language_model.transformer_layer_spec.submodules.self_attention.params.get( + 'attn_mask_type', '' + ) + ) + if self.sequence_parallel_lm and self.tp_comm_overlap_lm: + assert ( + combined_embeddings.shape[seq_dim] == self._language_max_sequence_length + ) or padding_mask_type, f"TP Comm overlap either requires Vision+Text token length \ + == language_max_sequence_length or mask type to be set to padding/padding_causal" + + if padding_mask_type: + # Calculate the padded sequence length needed to support SP and CP + # SP and CP are used to distributed the sequence across GPUs to improve + # memory efficiency and enable very long context training. + # To distribute workload equally, we need to ensure that the sequence is + # divisible by the appropriate padding factor calculated below. + padding_factor = None + padded_seq_len = None + mp_padding_needed = 0 + if self.context_parallel_lm > 1 and self.sequence_parallel_lm: + padding_factor = self.tensor_model_parallel_size_lm * self.context_parallel_lm * 2 + elif self.context_parallel_lm > 1: + padding_factor = self.context_parallel_lm * 2 + elif self.sequence_parallel_lm: + padding_factor = self.tensor_model_parallel_size_lm + + padded_seq_len = int( + (combined_embeddings.shape[seq_dim] + (padding_factor - 1)) + // padding_factor + * padding_factor + ) + + assert ( + padded_seq_len <= self._language_max_sequence_length + ), f"Sequence length after padding {padded_seq_len} for SP/CP has exceeded \ + language_max_sequence_length. Ensure language_max_sequence_length is \ + divisible by SP/CP factor: {padding_factor}" + + if self.sequence_parallel_lm and self.tp_comm_overlap_lm: + # TP Comm overlap initializes the user buffer shape used for communication + # at the beginning of training run and the same shape is expected to be + # used throughout the training. + # Pad to language_max_sequence_length to use TP Comm overlap. + assert ( + self._language_max_sequence_length % padding_factor == 0 + ), f"TP Comm overlap uses language_max_sequence_length \ + which needs to be divisible by SP/CP factor {padding_factor}" + padded_seq_len = self._language_max_sequence_length + + assert ( + packed_seq_params is not None + ), "Please provide PackedSeqParams dict when using SP or CP with padding" + valid_seqlens = packed_seq_params.cu_seqlens_q[1:] - packed_seq_params.cu_seqlens_q[:-1] + valid_seq_len = max(valid_seqlens) + assert ( + padded_seq_len >= valid_seq_len + ), f"Padded Seq Len calculated for model parallelism: {padded_seq_len} \ + is shorter than expected valid token len {valid_seq_len} provided." + + mp_padding_needed = padded_seq_len - combined_embeddings.shape[seq_dim] + if mp_padding_needed > 0: + new_labels = torch.nn.functional.pad( + new_labels, (0, mp_padding_needed), value=IGNORE_INDEX + ) + new_loss_mask = torch.nn.functional.pad(new_loss_mask, (0, mp_padding_needed)) + if self.context_parallel_lm > 1: + combined_embeddings = torch.nn.functional.pad( + combined_embeddings, (0, 0, 0, mp_padding_needed) + ) + else: + combined_embeddings = torch.nn.functional.pad( + combined_embeddings, (0, 0, 0, 0, 0, mp_padding_needed) + ) + + # Update PackedSeqParams if padding needed beyond user provided PackedSeqParams + packed_seq_params.max_seqlen_q = padded_seq_len + packed_seq_params.max_seqlen_kv = padded_seq_len + cu_seqlens_padded = None + # We need cu_seqlens_q_padded/cu_seqlens_kv_padded when doing + # CP+Padding to support accurate Attention with THD format. + if self.context_parallel_lm > 1: + cu_seqlens_padded = torch.arange( + 0, + (batch_size + 1) * (padded_seq_len), + step=(padded_seq_len), + dtype=torch.int32, + device=combined_embeddings.device, + ) + packed_seq_params.cu_seqlens_q_padded = cu_seqlens_padded + packed_seq_params.cu_seqlens_kv_padded = cu_seqlens_padded + packed_seq_params.qkv_format = 'thd' + else: + packed_seq_params.qkv_format = 'sbhd' + + if self.context_parallel_lm > 1: + # Distribute sequence across CP ranks + from megatron.training.utils import get_batch_on_this_cp_rank + + batch = get_batch_on_this_cp_rank( + { + "combined_embeddings": combined_embeddings, + "new_labels": new_labels, + "new_loss_mask": new_loss_mask, + } + ) + + combined_embeddings = batch["combined_embeddings"] # [B, S/CP, H] + new_labels = batch["new_labels"] + new_loss_mask = batch["new_loss_mask"] + + if getattr(packed_seq_params, 'qkv_format', None) == 'thd': + # If PackedSeqParams requires THD format, + # reshape embedding from [B,S,H] to [T,1,H] where T=B*S + combined_embeddings = ( + combined_embeddings.contiguous() + .view(combined_embeddings.shape[0] * combined_embeddings.shape[1], -1) + .unsqueeze(1) + ) + new_labels = new_labels.view(new_labels.shape[0] * new_labels.shape[1]).unsqueeze(0) + new_loss_mask = new_loss_mask.view( + new_loss_mask.shape[0] * new_loss_mask.shape[1] + ).unsqueeze(0) + else: + combined_embeddings = combined_embeddings.transpose( + 1, 0 + ).contiguous() # [B,S/CP,H] -> [S/CP,B,H] + + if self.sequence_parallel_lm: + combined_embeddings = tensor_parallel.scatter_to_sequence_parallel_region( + combined_embeddings + ) # [S/(CP*TP),B,H] + + return combined_embeddings, new_labels, new_loss_mask, packed_seq_params + + def _apply_tile_tagging(self, image_embeddings, num_image_tiles): + """Apply tile tagging. + + The image embeddings of multiple tiles are prepended with tile tags such as . + This implements the method used in NVLM https://arxiv.org/pdf/2409.11402. + + Args: + image_embeddings (torch.Tensor): [img_seq_len, num_tiles, h_language]. + num_image_tiles (torch.Tensor): Number of tiles for each input image [num_images]. + + Returns: + torch.Tensor: Tile tags prepended to image embeddings. + [tile_seq_len (=5) + img_seq_len, num_tiles, h_language] + """ + assert ( + num_image_tiles.shape[0] == 1 and len(num_image_tiles) == 1 + ), "multiple input images are not supported yet." + + num_tiles = num_image_tiles[0].item() + tile_tags = self._tile_tags[: num_tiles - 1] + [self._tile_tags[-1]] + + # [num_tiles, tile_seq_len (=5)] + tile_tag_input_ids = torch.tensor( + tile_tags, dtype=torch.int64, device=num_image_tiles.device + ) + + # [tile_seq_len, num_tiles, h_language] + tile_tag_embeds = self.language_model.embedding(tile_tag_input_ids, position_ids=None) + + # [num_tiles, dim] should be the same same + assert tile_tag_embeds.shape[1:] == image_embeddings.shape[1:] + + image_embeddings = torch.cat([tile_tag_embeds, image_embeddings]) + + return image_embeddings # [tile_seq_len + img_seq_len, num_tiles, h_language] + def forward( self, images: torch.Tensor, @@ -448,8 +720,10 @@ def forward( loss_mask: Optional[torch.Tensor] = None, inference_params: Optional[InferenceParams] = None, num_image_tiles: Optional[List[int]] = None, - image_token_index: Optional[int] = IMAGE_TOKEN_INDEX, + image_token_index: Optional[int] = None, runtime_gather_output: Optional[bool] = None, + image_token_mask: Optional[torch.Tensor] = None, + packed_seq_params: Optional[PackedSeqParams] = None, ) -> torch.Tensor: """Forward function of the LLaVA model. @@ -460,14 +734,21 @@ def forward( input_ids (torch.Tensor): input text ids [batch, text_seq_len]. position_ids (torch.Tensor): input text position ids [batch, text_seq_len]. attention_mask (torch.Tensor): Language model attention mask - [batch, 1, combined_seq_len, combined_seq_len]. + [batch, 1, 1, combined_seq_len]. NOTE: attention_mask is typically None and + attn_mask_type in layer specs determines the attention mask used. labels (torch.Tensor): Optional target text labels [batch, combined_seq_len]. loss_mask (torch.Tensor): Text loss mask [batch, text_seq_len]. inference_params (InferenceParams): Inference-time parameters including KV cache. num_image_tiles (list of int): Number of tiles per image. Default 1 tile per image. - image_token_index (int): ID for input images. + image_token_index (int): ID for input images. Default None means `image_token_index` + arg in the constructor will be used. runtime_gather_output (bool): Gather output at runtime. Default None means `parallel_output` arg in the constructor will be used. + image_token_mask (torch.Tensor): Tensor indicating the location of + image token index in input_ids. + packed_seq_params (PackedSeqParams): 1) If using sequence packing, must contain + subsample length information. 2) If using SP/CP with padding mask type, + must contain padded token information. Returns: output (torch.Tensor): Loss of shape [b, s] if labels are provided, @@ -478,7 +759,7 @@ def forward( inference_params is not None and "image_tokens_count" in inference_params.key_value_memory_dict ) - has_images = images.shape[0] > 0 + has_images = images is not None and images.shape[0] > 0 # If running inference, we can skip image token computation # if they were computed already earlier for this sample. @@ -493,6 +774,12 @@ def forward( image_embeddings = self.vision_model(images) # [num_tiles, img_seq_len, h_vision] if self._drop_vision_class_token: image_embeddings = image_embeddings[:, self.vision_model.class_token_len :, :] + + if self._pixel_shuffle: + image_embeddings = pixel_shuffle( + image_embeddings + ) # [num_tiles, img_seq_len_shuffled, h_vision_shuffled] + # contiguous() required as `permute` can sparsify the tensor and this breaks pipelining image_embeddings = image_embeddings.permute( 1, 0, 2 @@ -503,6 +790,10 @@ def forward( image_embeddings ) # [img_seq_len, num_tiles, h_language] + # Apply tile tagging if enabled and an image token is present. + if self._tile_tags is not None and torch.any(input_ids == self.image_token_index): + image_embeddings = self._apply_tile_tagging(image_embeddings, num_image_tiles) + # TODO: Support batched inference. # In inference, the language model KV cache will be updated for image token positions. # Store the image tokens sequence length to be used as an offset to the KV cache later. @@ -519,13 +810,19 @@ def forward( language_embeddings = None if self.pre_process: input_ids_text = input_ids.clone() - input_ids_text[input_ids_text == image_token_index] = 0 + input_ids_text[input_ids_text == self.image_token_index] = 0 # Note: This adds absolute position embedding but not RoPE. # Each image is counted as one position. # RoPE is added in language_model forward. Each image embedding is one position. language_embeddings = self.language_model.embedding( input_ids=input_ids_text, position_ids=position_ids ) # [text_seq_len, b, h_language] + # Gather the language embeddings back. We need the full embedding to insert + # image embeddings and then scatter again to avoid load imbalance. + if self.context_parallel_lm > 1: + cp_group = get_context_parallel_group() + language_embeddings, _ = gather_along_first_dim(language_embeddings, cp_group) + language_embeddings = language_embeddings.transpose( 1, 0 ).contiguous() # [b, text_seq_len, h_language] @@ -534,7 +831,6 @@ def forward( if num_image_tiles is None: num_image_tiles = torch.ones(images.shape[0], dtype=torch.int, device=input_ids.device) - # Preprocess input, labels and loss mask. combined_embeddings, new_labels, new_loss_mask = self._preprocess_data( image_embeddings, language_embeddings, @@ -542,10 +838,19 @@ def forward( loss_mask, labels, use_inference_kv_cache, - image_token_index, + inference_params, + image_token_index if image_token_index is not None else self.image_token_index, num_image_tiles, + image_token_mask, ) # [combined_seq_len, b, h_language], [b, combined_seq_len], [b, combined_seq_len] + if self.context_parallel_lm > 1 or self.sequence_parallel_lm: + combined_embeddings, new_labels, new_loss_mask, packed_seq_params = ( + self._process_embedding_token_parallel( + combined_embeddings, new_labels, new_loss_mask, packed_seq_params + ) + ) + output = self.language_model( input_ids=None, position_ids=None, @@ -554,11 +859,9 @@ def forward( labels=new_labels, inference_params=inference_params, runtime_gather_output=runtime_gather_output, + packed_seq_params=packed_seq_params, ) - if labels is None or loss_mask is None: - return output - return output, new_loss_mask @@ -584,3 +887,37 @@ def _load_state_dict_hook_ignore_param_names( f"{param_name} being removed from incompatible_keys.missing_keys in LlavaModel" ) incompatible_keys.missing_keys.remove(param_name) + + +# pylint: disable-next=line-too-long +# Based on https://github.com/OpenGVLab/InternVL/blob/c7c5af1a8930b4862afe8ed14672307082ef61fa/internvl_chat/internvl/model/internvl_chat/modeling_internvl_chat.py#L218 +# Copyright (c) 2023 OpenGVLab. +def pixel_shuffle(x, scale_factor=0.5, version=2): + """Pixel shuffle based on InternVL but adapted for our use case. + + Args: + x (torch.Tensor): Vision model outputs [num_tiles, img_seq_len, h_vision] + version (int): Implementation version. + + Returns: + Shuffled vision model outputs [num_tiles, (sq ** 2) * (scale ** 2), h_vision / (scale ** 2)] + """ + h = w = int(x.shape[1] ** 0.5) # sq + x = x.reshape(x.shape[0], h, w, -1) # [num_tiles, sq, sq, h_vision] + + n, w, h, c = x.size() + # N, W, H, C --> N, W, H * scale, C // scale + x = x.view(n, w, int(h * scale_factor), int(c / scale_factor)) + # N, W, H * scale, C // scale --> N, H * scale, W, C // scale + x = x.permute(0, 2, 1, 3).contiguous() + # N, H * scale, W, C // scale --> N, H * scale, W * scale, C // (scale ** 2) + x = x.view( + n, int(h * scale_factor), int(w * scale_factor), int(c / (scale_factor * scale_factor)) + ) + + if version == 2: + x = x.permute(0, 2, 1, 3).contiguous() + + x = x.reshape(x.shape[0], -1, x.shape[-1]) + + return x diff --git a/megatron/core/models/multimodal/llava_spec.py b/megatron/core/models/multimodal/llava_spec.py index 40e58d0bf..09831c6e2 100644 --- a/megatron/core/models/multimodal/llava_spec.py +++ b/megatron/core/models/multimodal/llava_spec.py @@ -25,10 +25,10 @@ except ImportError: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm def decoder_model_with_transformer_engine_default_spec( diff --git a/megatron/core/models/retro/decoder_spec.py b/megatron/core/models/retro/decoder_spec.py index 2ad234b96..f431798f1 100644 --- a/megatron/core/models/retro/decoder_spec.py +++ b/megatron/core/models/retro/decoder_spec.py @@ -34,10 +34,10 @@ except ImportError: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm try: from megatron.core.extensions.transformer_engine import ( diff --git a/megatron/core/models/retro/encoder_spec.py b/megatron/core/models/retro/encoder_spec.py index b8a969bd8..944d52f03 100644 --- a/megatron/core/models/retro/encoder_spec.py +++ b/megatron/core/models/retro/encoder_spec.py @@ -42,10 +42,10 @@ except ImportError: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm def get_retro_encoder_layer_te_spec() -> ModuleSpec: diff --git a/megatron/core/models/vision/clip_vit_model.py b/megatron/core/models/vision/clip_vit_model.py index 53c3fedde..2fdc77a4f 100644 --- a/megatron/core/models/vision/clip_vit_model.py +++ b/megatron/core/models/vision/clip_vit_model.py @@ -5,13 +5,21 @@ import torch from megatron.core.config_logger import has_config_logger_enabled, log_config_to_disk -from megatron.core.extensions.transformer_engine import TENorm from megatron.core.models.common.vision_module.vision_module import VisionModule from megatron.core.transformer.enums import ModelType from megatron.core.transformer.spec_utils import ModuleSpec, build_module from megatron.core.transformer.transformer_block import TransformerBlock from megatron.core.transformer.transformer_config import TransformerConfig +try: + import transformer_engine # pylint: disable=unused-import + + from megatron.core.extensions.transformer_engine import TENorm + + NORM_IMPL = TENorm +except: + NORM_IMPL = torch.nn.LayerNorm + # Note: This is under development and is missing features like position embedding interpolation. class CLIPViTModel(VisionModule): @@ -32,8 +40,8 @@ def __init__( self, transformer_config: TransformerConfig, transformer_layer_spec: ModuleSpec, - ln_pre_impl: Union[ModuleSpec, type] = TENorm, - ln_post_impl: Union[ModuleSpec, type] = TENorm, + ln_pre_impl: Union[ModuleSpec, type] = NORM_IMPL, + ln_post_impl: Union[ModuleSpec, type] = NORM_IMPL, add_class_token: bool = True, class_token_len: int = 1, patch_dim: int = 14, @@ -43,7 +51,7 @@ def __init__( ) -> None: error_msg = f"CLIPViTModel model subtype {model_subtype} is not supported." - assert model_subtype in ["clip", "siglip"], error_msg + assert model_subtype in ["clip", "siglip", "internvit"], error_msg if model_subtype == "siglip": assert class_token_len == 0, "SigLIP does not support class tokens." @@ -82,7 +90,7 @@ def __init__( ) conv_bias = False padding = 0 - if model_subtype == "siglip": + elif model_subtype == "siglip": self.ln_post = build_module( ln_post_impl, config=transformer_config, @@ -91,6 +99,11 @@ def __init__( ) conv_bias = True padding = "valid" + elif model_subtype == "internvit": + conv_bias = True + padding = 0 + else: + raise ValueError(f"unsupported vision model type {model_subtype}") self.conv1 = torch.nn.Conv2d( in_channels=3, @@ -174,17 +187,33 @@ def forward( def get_num_image_embeddings( - img_h, img_w, patch_dim, vision_model_type, disable_vision_class_token, class_token_len + img_h, + img_w, + patch_dim, + vision_model_type, + disable_vision_class_token, + class_token_len, + pixel_shuffle=False, + use_tile_tags=False, ): """Get the number of image embeddings per image tile.""" if vision_model_type == "siglip": keep_class_token = False - elif vision_model_type == "clip": + elif vision_model_type in ("clip", "internvit"): keep_class_token = not disable_vision_class_token + else: + raise ValueError(f"unsupported vision model: {vision_model_type}") num_patches_per_dim_h = img_h // patch_dim num_patches_per_dim_w = img_w // patch_dim num_patches = num_patches_per_dim_h * num_patches_per_dim_w num_image_embeddings_per_tile = num_patches + (class_token_len if keep_class_token else 0) + if pixel_shuffle: + num_image_embeddings_per_tile = int(num_image_embeddings_per_tile * (0.5**2)) + + if use_tile_tags: + # The length of tile tags tokenized. Currently, the same across tokenizers used. + num_image_embeddings_per_tile += 5 + return num_image_embeddings_per_tile diff --git a/megatron/core/models/vision/multimodal_projector.py b/megatron/core/models/vision/multimodal_projector.py index 18e62c68a..12071cadd 100644 --- a/megatron/core/models/vision/multimodal_projector.py +++ b/megatron/core/models/vision/multimodal_projector.py @@ -1,8 +1,7 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from megatron.core import tensor_parallel from megatron.core.transformer.mlp import MLP, MLPSubmodules from megatron.core.transformer.module import MegatronModule -from megatron.core.transformer.spec_utils import ModuleSpec, build_module +from megatron.core.transformer.spec_utils import build_module from megatron.core.transformer.transformer_config import TransformerConfig from megatron.core.utils import make_viewless_tensor @@ -51,6 +50,14 @@ def __init__( raise Exception(f"Unsupported multimodal projection type {self.projector_type}") def forward(self, hidden_states): + """Run multimodal projector. + + Args: + hidden_states (torch.Tensor): Input. + + Returns: + torch.Tensor: The projected output. + """ # Run encoder. encoder_output, encoder_output_bias = self.encoder(hidden_states) diff --git a/megatron/core/models/vision/vit_layer_specs.py b/megatron/core/models/vision/vit_layer_specs.py index da9066b00..5b39efe79 100644 --- a/megatron/core/models/vision/vit_layer_specs.py +++ b/megatron/core/models/vision/vit_layer_specs.py @@ -25,10 +25,10 @@ except ImportError: import warnings - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - warnings.warn(f'Apex is not installed. Falling back to Torch LayerNorm') - LNImpl = WrappedTorchLayerNorm + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm # Use this spec to use lower level Transformer Engine modules (required for fp8 training) diff --git a/megatron/core/num_microbatches_calculator.py b/megatron/core/num_microbatches_calculator.py index 5850e512c..ae7cea92e 100644 --- a/megatron/core/num_microbatches_calculator.py +++ b/megatron/core/num_microbatches_calculator.py @@ -51,6 +51,16 @@ def update_num_microbatches( _GLOBAL_NUM_MICROBATCHES_CALCULATOR.update(consumed_samples, consistency_check, verbose) +def unset_num_microbatches_calculator(): + """Unset microbatches calculator. + + Useful for multiple runs. See `tests/unit_tests/ckpt_converter/test_ckpt_converter.py` + for an example. + """ + global _GLOBAL_NUM_MICROBATCHES_CALCULATOR + _GLOBAL_NUM_MICROBATCHES_CALCULATOR = None + + def init_num_microbatches_calculator( rank: int, rampup_batch_size: Optional[List[int]], diff --git a/megatron/core/optimizer/__init__.py b/megatron/core/optimizer/__init__.py index 4a83564ce..0d3ec5a48 100644 --- a/megatron/core/optimizer/__init__.py +++ b/megatron/core/optimizer/__init__.py @@ -241,6 +241,7 @@ def _get_megatron_optimizer_based_on_param_groups( data_parallel_group: Optional[torch.distributed.ProcessGroup] = None, data_parallel_group_gloo: Optional[torch.distributed.ProcessGroup] = None, data_parallel_group_idx: Optional[int] = None, + distributed_optimizer_instance_id: Optional[int] = 0, ) -> MegatronOptimizer: """Get Megatron optimizer based on parameter groups. @@ -255,6 +256,8 @@ def _get_megatron_optimizer_based_on_param_groups( group for distributed optimizer. Defaults to None. data_parallel_group_idx (int, optional): data-parallel group index for distributed optimizer. Defaults to None. + distributed_optimizer_instance_id (int, optional): Distributed optimizer instance. Defaults + 0. Returns: Instance of MegatronOptimizer. @@ -325,14 +328,15 @@ def init_state_fn(opt): data_parallel_group=data_parallel_group, data_parallel_group_gloo=data_parallel_group_gloo, data_parallel_group_idx=data_parallel_group_idx, + distributed_optimizer_instance_id=distributed_optimizer_instance_id, ) else: optimizer = Float16OptimizerWithFloat16Params(*optimizer_args) - setattr(optimizer, 'model_parallel_group', model_parallel_group) + setattr(optimizer, 'grad_stats_parallel_group', model_parallel_group) else: # FP32 optimizer. optimizer = FP32Optimizer(optimizer, config, init_state_fn) - setattr(optimizer, 'model_parallel_group', model_parallel_group) + setattr(optimizer, 'grad_stats_parallel_group', model_parallel_group) return optimizer @@ -373,6 +377,17 @@ def get_megatron_optimizer( overlap_param_gather_with_optimizer_step_flags = [False] model_parallel_rank = torch.distributed.get_rank(mpu.get_model_parallel_group()) + if torch.distributed.get_world_size( + mpu.get_data_parallel_group(with_context_parallel=True, partial_data_parallel=False) + ) > torch.distributed.get_world_size( + mpu.get_data_parallel_group(with_context_parallel=True, partial_data_parallel=True) + ): + distributed_optimizer_instance_id = torch.distributed.get_rank( + mpu.get_inter_partial_data_parallel_group() + ) + else: + distributed_optimizer_instance_id = 0 + optimizers = [] model_chunk_offset = 0 for dense_model_chunks, overlap_param_gather_with_optimizer_step in zip( @@ -399,11 +414,14 @@ def get_megatron_optimizer( param_groups=param_groups, per_model_buffers=buffers, model_parallel_group=mpu.get_model_parallel_group(), - data_parallel_group=mpu.get_data_parallel_group(with_context_parallel=True), + data_parallel_group=mpu.get_data_parallel_group( + with_context_parallel=True, partial_data_parallel=True + ), data_parallel_group_gloo=mpu.get_data_parallel_group_gloo( - with_context_parallel=True + with_context_parallel=True, partial_data_parallel=True ), data_parallel_group_idx=model_parallel_rank, + distributed_optimizer_instance_id=distributed_optimizer_instance_id, ) ) model_chunk_offset += 1 @@ -419,23 +437,19 @@ def get_megatron_optimizer( buffer_name='expert_parallel_buffers', ) if len(moe_param_groups) > 0: - model_parallel_world_size = torch.distributed.get_world_size(mpu.get_model_parallel_group()) - expert_parallel_rank = mpu.get_expert_model_parallel_rank() + model_parallel_rank = torch.distributed.get_rank( + mpu.get_expert_tensor_model_pipeline_parallel_group() + ) optimizers.append( _get_megatron_optimizer_based_on_param_groups( config, model_chunks=model_chunks, param_groups=moe_param_groups, per_model_buffers=moe_buffers, - model_parallel_group=mpu.get_model_parallel_group(with_expert_parallel=True), - data_parallel_group=mpu.get_data_modulo_expert_parallel_group( - with_context_parallel=True - ), - data_parallel_group_gloo=mpu.get_data_modulo_expert_parallel_group_gloo( - with_context_parallel=True - ), - data_parallel_group_idx=expert_parallel_rank * model_parallel_world_size - + model_parallel_rank, + model_parallel_group=mpu.get_expert_tensor_model_pipeline_parallel_group(), + data_parallel_group=mpu.get_expert_data_parallel_group(), + data_parallel_group_gloo=mpu.get_expert_data_parallel_group_gloo(), + data_parallel_group_idx=model_parallel_rank, ) ) diff --git a/megatron/core/optimizer/clip_grads.py b/megatron/core/optimizer/clip_grads.py index 708ccd019..5c3a6578f 100644 --- a/megatron/core/optimizer/clip_grads.py +++ b/megatron/core/optimizer/clip_grads.py @@ -2,7 +2,6 @@ """Gradient clipping.""" -import os from typing import List, Optional, Union import torch @@ -46,12 +45,13 @@ from ..tensor_parallel import param_is_not_tensor_parallel_duplicate from ..transformer.module import param_is_not_shared +from ..utils import get_data_parallel_group_if_dtensor, to_local_if_dtensor def get_grad_norm_fp32( grads_for_norm: Union[List[torch.Tensor], torch.Tensor], norm_type: Union[int, float] = 2, - model_parallel_group: Optional[torch.distributed.ProcessGroup] = None, + grad_stats_parallel_group: Optional[torch.distributed.ProcessGroup] = None, ) -> float: """Calculate the norm of gradients in fp32. @@ -63,8 +63,9 @@ def get_grad_norm_fp32( Tensor that will be used for calculating the grad norm. norm_type (float or int): type of the used p-norm. Can be ``'inf'`` for infinity norm. - model_parallel_group (group): given the nature of the distributed - optimizer, this is passed as an argument. + grad_stats_parallel_group (group): Process group for reducing the grad norms. This is + generally the model-parallel group for non-distributed optimizers, and the entire + world for the distributed optimizer. Returns: Total norm of the parameters (viewed as a single vector). @@ -73,6 +74,12 @@ def get_grad_norm_fp32( if isinstance(grads_for_norm, torch.Tensor): grads_for_norm = [grads_for_norm] + data_parallel_group = None + for grad in grads_for_norm: + data_parallel_group = get_data_parallel_group_if_dtensor(grad, data_parallel_group) + + grads_for_norm = [to_local_if_dtensor(grad) for grad in grads_for_norm] + # Norm parameters. norm_type = float(norm_type) total_norm = 0.0 @@ -81,9 +88,13 @@ def get_grad_norm_fp32( if norm_type == inf: total_norm = max(grad.abs().max() for grad in grads_for_norm) total_norm_cuda = torch.tensor([float(total_norm)], dtype=torch.float, device='cuda') - # Take max across all model-parallel GPUs. + # Take max across all data-parallel GPUs if using FSDP and then all model-parallel GPUs. + if data_parallel_group: + torch.distributed.all_reduce( + total_norm_cuda, op=torch.distributed.ReduceOp.MAX, group=data_parallel_group + ) torch.distributed.all_reduce( - total_norm_cuda, op=torch.distributed.ReduceOp.MAX, group=model_parallel_group + total_norm_cuda, op=torch.distributed.ReduceOp.MAX, group=grad_stats_parallel_group ) total_norm = total_norm_cuda[0].item() @@ -111,9 +122,13 @@ def get_grad_norm_fp32( grad_norm = torch.norm(grad, norm_type) total_norm += grad_norm**norm_type - # Sum across all model-parallel GPUs. + # Sum across all data-parallel GPUs if using FSDP and then all model-parallel GPUs. + if data_parallel_group: + torch.distributed.all_reduce( + total_norm, op=torch.distributed.ReduceOp.SUM, group=data_parallel_group + ) torch.distributed.all_reduce( - total_norm, op=torch.distributed.ReduceOp.SUM, group=model_parallel_group + total_norm, op=torch.distributed.ReduceOp.SUM, group=grad_stats_parallel_group ) total_norm = total_norm.item() ** (1.0 / norm_type) @@ -136,11 +151,13 @@ def clip_grad_by_total_norm_fp32( total_norm (float): total norm of the gradients. """ # Grads. + params = [] grads = [] for param in parameters: if param.grad is not None: assert param.grad.type() == 'torch.cuda.FloatTensor' - grads.append(param.grad.detach()) + params.append(param) + grads.append(to_local_if_dtensor(param.grad).detach()) # Scale. clip_coeff = max_norm / (total_norm + 1.0e-6) @@ -153,7 +170,7 @@ def clip_grad_by_total_norm_fp32( def count_zeros_fp32( parameters: Union[List[torch.Tensor], torch.Tensor], - model_parallel_group: torch.distributed.ProcessGroup, + grad_stats_parallel_group: torch.distributed.ProcessGroup, ) -> float: """Counts the number of zeros in gradients associated with the passed-in list of parameters. @@ -162,8 +179,9 @@ def count_zeros_fp32( parameters (Iterable[Tensor] or Tensor): an iterable of Tensors or a single Tensor that will have the number of zeros in its corresponding gradient counted. - model_parallel_group (torch.distributed.ProcessGroup, optional): model-parallel - group over which grad norm needs to be aggregated. + grad_stats_parallel_group (group): Process group for reducing the num_zeros count. This is + generally the model-parallel group for non-distributed optimizers, and the entire + world for the distributed optimizer. """ if isinstance(parameters, torch.Tensor): @@ -174,18 +192,27 @@ def count_zeros_fp32( # - parameter should not be shared # - should not be a replica due to tensor model parallelism total_num_zeros = torch.tensor([0.0], dtype=torch.float, device='cuda') + data_parallel_group = None for param in parameters: grad_not_none = param.grad is not None is_not_shared = param_is_not_shared(param) is_not_tp_duplicate = param_is_not_tensor_parallel_duplicate(param) if grad_not_none and is_not_shared and is_not_tp_duplicate: - grad = param.grad.detach() + data_parallel_group = get_data_parallel_group_if_dtensor( + param.grad, data_parallel_group + ) + grad = to_local_if_dtensor(param.grad).detach() num_zeros = grad.numel() - torch.count_nonzero(grad) total_num_zeros = num_zeros + total_num_zeros + # Sum across all data-parallel GPUs if using FSDP. + if data_parallel_group: + torch.distributed.all_reduce( + total_num_zeros, op=torch.distributed.ReduceOp.SUM, group=data_parallel_group + ) # Sum across all model-parallel GPUs. torch.distributed.all_reduce( - total_num_zeros, op=torch.distributed.ReduceOp.SUM, group=model_parallel_group + total_num_zeros, op=torch.distributed.ReduceOp.SUM, group=grad_stats_parallel_group ) total_num_zeros = total_num_zeros.item() diff --git a/megatron/core/optimizer/distrib_optimizer.py b/megatron/core/optimizer/distrib_optimizer.py index dfa8d5197..c952f4ce7 100644 --- a/megatron/core/optimizer/distrib_optimizer.py +++ b/megatron/core/optimizer/distrib_optimizer.py @@ -4,7 +4,6 @@ import itertools -import warnings from dataclasses import replace from logging import getLogger from typing import Callable, Dict, List, Optional, Tuple @@ -18,7 +17,7 @@ try: from apex.optimizers import FusedAdam as Adam except ImportError: - from torch.optim import Adam + from torch.optim import AdamW as Adam HAVE_APEX_OR_TE = False @@ -427,6 +426,7 @@ def __init__( data_parallel_group: torch.distributed.ProcessGroup, data_parallel_group_gloo: torch.distributed.ProcessGroup, data_parallel_group_idx: int, + distributed_optimizer_instance_id: int, ): """ Distributed optimizer, for all data types (fp16, bf16, and fp32). @@ -446,7 +446,7 @@ def __init__( always require a grad scaler. init_state_fn (Callable, optional): function to initialize state in the optimizer. model_chunks (List[MegatronModule]): list of model chunks. - per_model_buffers (Dict[int, List[ParamAndGradBuffer]]): the implementation of the + per_model_buffers (Dict[int, List[_ParamAndGradBuffer]]): the implementation of the distributed optimizer is centered on using a contiguous buffer for communicating grads & params between the model state and the optimizer state. You can find a more detailed description in @@ -457,15 +457,12 @@ def __init__( (used in checkpoint loading and saving). data_parallel_group_idx (int): index in data-parallel group (used by distributed checkpointing logic). + distributed_optimizer_instance_id (int): index of the Distributed Optimizer instance. """ if has_config_logger_enabled(config): log_config_to_disk(config, locals(), prefix=type(self).__name__) - assert ( - HAVE_APEX_OR_TE - ), f'Please install Apex or Transformer Engine to use DistributedOptimizer.' - super().__init__(optimizer, config, grad_scaler, init_state_fn) self.model_chunks = model_chunks self.ddp_config = self.model_chunks[0].ddp_config @@ -483,6 +480,7 @@ def __init__( self.data_parallel_group = data_parallel_group self.data_parallel_group_gloo = data_parallel_group_gloo self.data_parallel_group_idx = data_parallel_group_idx + self.distributed_optimizer_instance_id = distributed_optimizer_instance_id self.gbuf_idx_to_model_idx_map = {} gbuf_idx = 0 @@ -539,28 +537,6 @@ def __init__( self.optimizer.param_groups = [g["orig_group"] for g in self.opt_group_ranges] self.optimizer.load_state_dict(self.optimizer.state_dict()) - def enable_pre_hook(self): - """ - Enable forward pre-hook needed for param all-gather overlap with forward compute. - """ - warnings.warn( - "`DistributedOptimizer.enable_pre_hook` will be deprecated in a future release. " - "Use `DistributedDataParallel.enable_forward_pre_hook` directly." - ) - for model_chunk in self.model_chunks: - model_chunk.enable_forward_pre_hook() - - def disable_pre_hook(self): - """ - Disable forward pre-hook needed for param all-gather overlap with forward compute. - """ - warnings.warn( - "`DistributedOptimizer.disable_pre_hook` will be deprecated in a future release. " - "Use `DistributedDataParallel.disable_forward_pre_hook` directly." - ) - for model_chunk in self.model_chunks: - model_chunk.disable_forward_pre_hook() - def _get_model_param_range_map(self, param: torch.nn.Parameter): """ Given a model param, get the index sub-range of the param that this @@ -571,10 +547,10 @@ def _get_model_param_range_map(self, param: torch.nn.Parameter): param_range_map = gbuf_range_map["param_map"][param] return param_range_map - def get_model_parallel_group(self) -> torch.distributed.ProcessGroup: + def get_grad_stats_parallel_group(self) -> torch.distributed.ProcessGroup: """ - With the distributed optimizer, the model parallel group is the - entire world. + With the distributed optimizer, gradient statistics (num_zeros & norm) are reduced over + all ranks (versus only the model-parallel ranks with the non-distributed optimizer). """ return None @@ -969,10 +945,14 @@ def sharded_param_state_dp_zero( if is_loading: param_state_data = None else: - # Gather on rank 0 - param_state_data = self.get_parameter_state_dp_zero() + if self.distributed_optimizer_instance_id == 0: + # Gather on rank 0 + param_state_data = self.get_parameter_state_dp_zero() - if torch.distributed.get_rank(self.data_parallel_group) == 0: + if ( + torch.distributed.get_rank(self.data_parallel_group) == 0 + and self.distributed_optimizer_instance_id == 0 + ): # Fixed TPxPP. Save on DP rank 0 only param_state = ShardedObject( f'optimizer.distributed.dp_group_idx_{self.data_parallel_group_idx}.param_state', @@ -1148,7 +1128,10 @@ def sharded_param_state_fs_model_space( assert ( len(sharded_metadata.replica_id) == 3 ), f'Expected replica_id format (PP, TP, DP), got: {sharded_metadata}' - replica_id = (*sharded_metadata.replica_id[:2], 0) + replica_id = ( + *sharded_metadata.replica_id[:2], + self.distributed_optimizer_instance_id, + ) # Instantiate ShardedTensor (or ShardedTensorFactory) for optimizer # params. @@ -1481,11 +1464,11 @@ def load_parameter_state_from_dp_zero(self, state_dict, *, update_legacy_format= def split_state_dict_if_needed(self, state_dict): """ When "--fp8-param-gather" is disabled, weights and biases are stored in the same - `ParamAndGradBuffer`. So, when saving a checkpoint, the optimizer's main parameters are + `_ParamAndGradBuffer`. So, when saving a checkpoint, the optimizer's main parameters are saved in a single continuous tensor (this also applies to "exp_avg" and "exp_avg_sq"). However, when "--fp8-param-gather" is enabled, weights(in fp8 dtype) and biases(in bf16/fp16 - dtype) are stored in separate `ParamAndGradBuffer`. Therefore, when we enabled + dtype) are stored in separate `_ParamAndGradBuffer`. Therefore, when we enabled "--fp8-param-gather", and want to load a checkpoint saved without "--fp8-param-gather", we need to split the weights(fp8) and biases(bf16/fp16) in the static_dict into two separate tensors. @@ -1561,7 +1544,7 @@ def split_state_dict_if_needed(self, state_dict): non_fp8_idx = len(non_fp8_buffer.params) - 1 offsets, fp8_offsets, non_fp8_offsets = [0], [0], [0] - # Because the parameters in `ParamAndGradBuffer` are traversed in reverse order, the + # Because the parameters in `_ParamAndGradBuffer` are traversed in reverse order, the # flag here also needs to be traversed in reverse order. for fp8_flag in fp8_flags[::-1]: if fp8_flag: diff --git a/megatron/core/optimizer/optimizer.py b/megatron/core/optimizer/optimizer.py index 4d2b1af78..c48bb580d 100644 --- a/megatron/core/optimizer/optimizer.py +++ b/megatron/core/optimizer/optimizer.py @@ -4,7 +4,6 @@ import copy import math -import warnings from abc import ABC, abstractmethod from itertools import chain from logging import getLogger @@ -13,23 +12,27 @@ import torch try: - from transformer_engine.pytorch.optimizers import multi_tensor_applier -except ImportError: - try: - from apex.multi_tensor_apply import multi_tensor_applier - except ImportError: - from megatron.core.utils import local_multi_tensor_applier + from transformer_engine.pytorch.optimizers import multi_tensor_applier, multi_tensor_scale - multi_tensor_applier = local_multi_tensor_applier + multi_tensor_scale_impl = multi_tensor_scale +except ImportError: try: import amp_C + from apex.multi_tensor_apply import multi_tensor_applier - l2_norm_impl = amp_C.multi_tensor_l2norm multi_tensor_scale_impl = amp_C.multi_tensor_scale except ImportError: - from megatron.core.utils import local_multi_tensor_l2_norm, local_multi_tensor_scale + import warnings + + warnings.warn( + 'Transformer Engine and Apex are not installed. ' + 'Falling back to local implementations of ' + 'multi_tensor_applier and multi_tensor_scale' + ) + + from megatron.core.utils import local_multi_tensor_applier, local_multi_tensor_scale - l2_norm_impl = local_multi_tensor_l2_norm + multi_tensor_applier = local_multi_tensor_applier multi_tensor_scale_impl = local_multi_tensor_scale from .. import parallel_state, tensor_parallel @@ -75,7 +78,7 @@ def _multi_tensor_copy_this_to_that( is not provided, we default back to simple loop copy to be compatible with bfloat16. """ - if overflow_buf: + if overflow_buf is not None: overflow_buf.fill_(0) # Scaling with factor `1.0` is equivalent to copy. multi_tensor_applier(multi_tensor_scale_impl, overflow_buf, [this, that], 1.0) @@ -137,10 +140,24 @@ def get_main_grads_for_grad_norm(self) -> List[torch.Tensor]: return grads_for_norm - def get_model_parallel_group(self) -> torch.distributed.ProcessGroup: - """Default returned here, but the distributed optimizer overrides this.""" + def get_grad_stats_parallel_group(self) -> torch.distributed.ProcessGroup: + """Process group for reducing gradient statistics (num_zeros & norm). + + The two most common cases are: + - Non-distributed optimizer (default): Return the model-parallel group. + - Distributed optimizer (overridden in distrib_optimizer.py): Return the entire world. + """ if hasattr(self, 'model_parallel_group'): - return self.model_parallel_group + warnings.warn( + "WARNING: `optimizer.model_parallel_group` deprecated and renamed to " + "`optimizer.grad_stats_parallel_group`. The previous name will be " + "removed in a future release." + ) + self.grad_stats_parallel_group = self.model_parallel_group + delattr(self, "model_parallel_group") + return self.grad_stats_parallel_group + if hasattr(self, 'grad_stats_parallel_group'): + return self.grad_stats_parallel_group return parallel_state.get_model_parallel_group() @abstractmethod @@ -158,7 +175,7 @@ def get_grad_norm(self): """Compute and return grad norm.""" grads_for_norm = self.get_main_grads_for_grad_norm() total_norm = get_grad_norm_fp32( - grads_for_norm, model_parallel_group=self.get_model_parallel_group() + grads_for_norm, grad_stats_parallel_group=self.get_grad_stats_parallel_group() ) return total_norm @@ -167,7 +184,7 @@ def clip_grad_norm(self, clip_grad: float) -> float: params = self.get_parameters() grads_for_norm = self.get_main_grads_for_grad_norm() grad_norm = get_grad_norm_fp32( - grads_for_norm, model_parallel_group=self.get_model_parallel_group() + grads_for_norm, grad_stats_parallel_group=self.get_grad_stats_parallel_group() ) clip_grad_by_total_norm_fp32(params, clip_grad, grad_norm) return grad_norm @@ -175,7 +192,9 @@ def clip_grad_norm(self, clip_grad: float) -> float: def count_zeros(self) -> float: """Count number of zeros in model's gradients.""" params = self.get_parameters() - return count_zeros_fp32(params, model_parallel_group=self.get_model_parallel_group()) + return count_zeros_fp32( + params, grad_stats_parallel_group=self.get_grad_stats_parallel_group() + ) @abstractmethod def zero_grad(self, set_to_none: bool = True): @@ -354,7 +373,9 @@ def _unscale_main_grads_and_check_for_nan(self): # Update across all model parallel instances. torch.distributed.all_reduce( - self.found_inf, op=torch.distributed.ReduceOp.MAX, group=self.get_model_parallel_group() + self.found_inf, + op=torch.distributed.ReduceOp.MAX, + group=self.get_grad_stats_parallel_group(), ) # Check for nan. @@ -665,7 +686,7 @@ def load_state_dict(self, state_dict): optimizer_key = 'optimizer' if optimizer_key not in state_dict: optimizer_key = 'optimizer_state_dict' - logger.info('***WARNING*** loading optimizer from ' 'an old checkpoint ...') + logger.info('***WARNING*** loading optimizer from an old checkpoint ...') if 'common_step' in state_dict[optimizer_key]['state']: common_step = state_dict[optimizer_key]['state'].pop('common_step') self._restore_common_per_param_step(state_dict[optimizer_key], common_step) @@ -674,9 +695,7 @@ def load_state_dict(self, state_dict): # Grad scaler. if 'grad_scaler' not in state_dict: if self.config.fp16: - logger.info( - '***WARNING*** found an old checkpoint, will not ' 'load grad scaler ...' - ) + logger.info('***WARNING*** found an old checkpoint, will not load grad scaler ...') else: if self.grad_scaler: self.grad_scaler.load_state_dict(state_dict['grad_scaler']) @@ -738,7 +757,8 @@ def prepare_grads(self) -> bool: ) for param_group in self.optimizer.param_groups: for param in param_group['params']: - param.grad = param.main_grad + if hasattr(param, 'main_grad'): + param.grad = param.main_grad if timers is not None: timers('optimizer-copy-to-main-grad').stop() @@ -964,24 +984,6 @@ def step_with_ready_grads(self) -> bool: return success - def disable_pre_hook(self): - """Disable pre-hooks for underlying distributed optimizers.""" - warnings.warn( - "`ChainedOptimizer.disable_pre_hook` will be deprecated in a future release. " - "Use `DistributedDataParallel.disable_forward_pre_hook` directly." - ) - for model_chunk in self.model_chunks: - model_chunk.disable_forward_pre_hook() - - def enable_pre_hook(self): - """Enable pre-hooks for underlying distributed optimizers.""" - warnings.warn( - "`ChainedOptimizer.enable_pre_hook` will be deprecated in a future release. " - "Use `DistributedDataParallel.enable_forward_pre_hook` directly." - ) - for model_chunk in self.model_chunks: - model_chunk.enable_forward_pre_hook() - @torch.no_grad() def step(self): """ChainedOptimizer will step all optimizers one by one.""" diff --git a/megatron/core/parallel_state.py b/megatron/core/parallel_state.py index e9043b647..823bc9072 100644 --- a/megatron/core/parallel_state.py +++ b/megatron/core/parallel_state.py @@ -20,7 +20,6 @@ # Model parallel group (both intra- and pipeline) that the current rank belongs to. _MODEL_PARALLEL_GROUP = None # Model parallel group (both intra-, pipeline, and expert) that the current rank belongs to. -_MODEL_AND_EXPERT_PARALLEL_GROUP = None # Embedding group. _EMBEDDING_GROUP = None # Position embedding group. @@ -31,14 +30,31 @@ # tensor model parallel group and data parallel group combined # used for fp8 and moe training _TENSOR_AND_DATA_PARALLEL_GROUP = None -# Expert parallel group that the current rank belongs to. -_EXPERT_MODEL_PARALLEL_GROUP = None -_TENSOR_AND_EXPERT_PARALLEL_GROUP = None -_DATA_MODULO_EXPERT_PARALLEL_GROUP = None -_DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO = None -_DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP = None -_DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP_GLOO = None +### Expert-related parallel states +# Naming convention: +# _EXPERT prefix in group name means it's used for expert layer in MoE models. +# _EXPERT_MODEL denotes expert parallelism which splits number of experts across the group. +# _EXPERT_TENSOR denotes tensor parallelism of expert which splits tensor across the group. +# _EXPERT_DATA denotes data parallelism of expert which replicates weight across the group. + +# Expert model parallel group that current rank belongs to. +_EXPERT_MODEL_PARALLEL_GROUP = None +# Expert tensor parallel group that current rank belongs to. +_EXPERT_TENSOR_PARALLEL_GROUP = None +# Expert tensor and model combined parallel group +_EXPERT_TENSOR_AND_MODEL_PARALLEL_GROUP = None +# Expert tensor, model, pipeline combined parallel group +_EXPERT_TENSOR_MODEL_PIPELINE_PARALLEL_GROUP = None +# Expert data parallel group +_EXPERT_DATA_PARALLEL_GROUP = None +_EXPERT_DATA_PARALLEL_GROUP_GLOO = None +# Parallel state values changed on the fly +_MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE = None +_MPU_EXPERT_MODEL_PARALLEL_RANK = None +_MPU_EXPERT_TENSOR_PARALLEL_WORLD_SIZE = None +_MPU_EXPERT_TENSOR_PARALLEL_RANK = None +### End of expert related parallel states _VIRTUAL_PIPELINE_MODEL_PARALLEL_RANK = None _VIRTUAL_PIPELINE_MODEL_PARALLEL_WORLD_SIZE = None @@ -49,12 +65,10 @@ # These values enable us to change the mpu sizes on the fly. _MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE = None _MPU_PIPELINE_MODEL_PARALLEL_WORLD_SIZE = None -_MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE = None _MPU_DATA_PARALLEL_WORLD_SIZE = None _MPU_DATA_PARALLEL_RANK = None _MPU_TENSOR_MODEL_PARALLEL_RANK = None _MPU_PIPELINE_MODEL_PARALLEL_RANK = None -_MPU_EXPERT_MODEL_PARALLEL_RANK = None # A list of ranks that have a copy of the embedding. _EMBEDDING_GLOBAL_RANKS = None @@ -74,17 +88,28 @@ # the first local rank in the tensor model parallel group _TENSOR_MODEL_PARALLEL_GLOBAL_RANKS = None +# A list of global ranks for each model parallel group to ease calculation of +# the first local rank in the model parallel group +_MODEL_PARALLEL_GLOBAL_RANKS = None + # Context parallel group that the current rank belongs to _CONTEXT_PARALLEL_GROUP = None # A list of global ranks for each context parallel group to ease calculation of the # destination rank when exchanging KV/dKV between context parallel_ranks _CONTEXT_PARALLEL_GLOBAL_RANKS = None +# Hierarchical context parallel groups +_HIERARCHICAL_CONTEXT_PARALLEL_GROUPS = [] # Data parallel group information with context parallel combined. _DATA_PARALLEL_GROUP_WITH_CP = None _DATA_PARALLEL_GROUP_WITH_CP_GLOO = None _DATA_PARALLEL_GLOBAL_RANKS_WITH_CP = None +# Partial Data parallel group information with context parallel combined. +_INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP = None +_INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP_GLOO = None +_INTER_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP = None + # combined parallel group of TP and CP _TENSOR_AND_CONTEXT_PARALLEL_GROUP = None @@ -120,7 +145,7 @@ def get_nccl_options(pg_name, nccl_comm_cfgs): def generate_masked_orthogonal_rank_groups( world_size: int, parallel_size: List[int], mask: List[bool] ) -> List[List[int]]: - """Generate orthogonal parallel groups based on the parallel size and mask. + r"""Generate orthogonal parallel groups based on the parallel size and mask. Arguments: world_size (int): world size @@ -181,15 +206,15 @@ def inner_product(a: List[int], b: List[int]) -> int: return sum([x * y for x, y in zip(a, b)]) def decompose(index, shape, stride=None): - ''' + """ This function solve the math problem below: There is an equation: index = sum(idx[i] * stride[i]) And given the value of index, stride. Return the idx. - This function will used to get the pp/dp/pp_rank + This function will be used to get the pp/dp/pp_rank from group_index and rank_in_group. - ''' + """ if stride is None: stride = prefix_product(shape) idx = [(index // d) % s for s, d in zip(shape, stride)] @@ -226,19 +251,58 @@ def decompose(index, shape, stride=None): return ranks +def create_hierarchical_parallel_groups( + rank, ranks, group_size, hierarchical_group_sizes, pg_options +): + """Create hierarchical groups for one parallelism. + Taking a group size of 16 as example, so we have a total of 16 GPUs denoted by g0 ... g15. + If the hierarchical group sizes are [2,2,4], we use 2 GPUs in the first and second level + of sub-groups, and 4 GPUs in the last level of sub groups. The present function will + create 8 level-1 sub-groups, 8 level-2 sub-groups and 4 level-3 sub-groups as: + 8 level-1 sub-groups: + [g0, g1], [g2, g3], [g4, g5], [g6, g7], [g8, g9], [g10, g11], [g12, g13], [g14, g15] + 8 level-2 sub-groups: + [g0, g2], [g1, g3], [g4, g6], [g5, g7], [g8, g10], [g9, g11], [g12, g14], [g13, g15] + 4 level-3 sub-groups: + [g0, g4, g8, g12], [g1, g5, g9, g13], [g2, g6, g10, g14], [g3, g7, g11, g15] + """ + + hierarchical_groups = [] + accumulated_group_sizes = 1 + processed_group_sizes = 1 + for hierarchical_group_size in hierarchical_group_sizes: + accumulated_group_sizes *= hierarchical_group_size + for k in range(group_size // accumulated_group_sizes): + for j in range(processed_group_sizes): + global_sub_ranks = [ + ranks[j + i * processed_group_sizes + k * accumulated_group_sizes] + for i in range(hierarchical_group_size) + ] + sub_group = torch.distributed.new_group(global_sub_ranks, pg_options=pg_options) + if rank in global_sub_ranks: + hierarchical_groups.append(sub_group) + processed_group_sizes *= hierarchical_group_size + return hierarchical_groups + + class RankGenerator(object): """A class for generating rank groups for different modes of parallelism.""" def __init__( self, tp: int, ep: int, dp: int, pp: int, cp: int, order: str, rank_offset: int = 0 ) -> None: + assert ( + ep == 1 or cp == 1 + ), "Both EP and CP > 1 in not allow in one rank generator. \ + CP is only included in default RankGenerator, and EP only in expert RankGenerator." + self.tp = tp self.ep = ep self.dp = dp self.pp = pp self.cp = cp self.rank_offset = rank_offset - self.world_size = tp * dp * pp * cp + self.world_size = tp * dp * pp * cp * ep self.name_to_size = { "tp": self.tp, @@ -250,10 +314,6 @@ def __init__( self.order = order order = order.lower() - if 'ep' in order: - if 'ep-dp' not in order and 'dp-ep' not in order: - raise RuntimeError(f"The ep and dp must be adjacent in order ({self.order}).") - for name in self.name_to_size.keys(): if name not in order and self.name_to_size[name] != 1: raise RuntimeError( @@ -263,20 +323,11 @@ def __init__( elif name not in order: order = order + '-' + name - self.order_w_ep = order - self.order_wo_ep = '-'.join([token for token in order.split('-') if token != 'ep']) - self.ordered_size_wo_ep = [] - self.ordered_size_w_ep = [] + self.order = order + self.ordered_size = [] for token in order.split('-'): - if token == 'dp': - self.ordered_size_w_ep.append(self.dp // self.ep) - self.ordered_size_wo_ep.append(self.dp) - elif token == 'ep': - self.ordered_size_w_ep.append(self.ep) - else: - self.ordered_size_w_ep.append(self.name_to_size[token]) - self.ordered_size_wo_ep.append(self.name_to_size[token]) + self.ordered_size.append(self.name_to_size[token]) def get_mask(self, order: str, token: str): """Create a mask for the specified tokens based on the given order. @@ -287,13 +338,13 @@ def get_mask(self, order: str, token: str): separated by hyphens (e.g., 'tp-dp'). """ ordered_token = order.split('-') - token = token.split('-') + token_list = token.split('-') mask = [False] * len(ordered_token) - for t in token: + for t in token_list: mask[ordered_token.index(t)] = True return mask - def get_ranks(self, token, independent_ep=False): + def get_ranks(self, token): """Get rank group by input token. Args: @@ -302,22 +353,9 @@ def get_ranks(self, token, independent_ep=False): to obtain multiple parallel types, we can use a hyphen '-' to separate them. For example, if we want to obtain the TP_DP group, the token should be 'tp-dp'. - - independent_ep (bool: True): - This flag controls whether we treat EP and DP independently. - EP shares ranks with DP, if we want to get ranks related to - EP, we should set the flag. For example, get_ranks('dp', True) - will get DP modulo EP group, and get_ranks('dp', False) will - get full DP group. """ - if independent_ep: - parallel_size = self.ordered_size_w_ep - order = self.order_w_ep - else: - parallel_size = self.ordered_size_wo_ep - order = self.order_wo_ep - mask = self.get_mask(order, token) - ranks = generate_masked_orthogonal_rank_groups(self.world_size, parallel_size, mask) + mask = self.get_mask(self.order, token) + ranks = generate_masked_orthogonal_rank_groups(self.world_size, self.ordered_size, mask) if self.rank_offset > 0: for rank_group in ranks: for i in range(len(rank_group)): @@ -356,11 +394,14 @@ def initialize_model_parallel( pipeline_model_parallel_split_rank: Optional[int] = None, use_sharp: bool = False, context_parallel_size: int = 1, + hierarchical_context_parallel_sizes: Optional[List[int]] = None, expert_model_parallel_size: int = 1, + num_distributed_optimizer_instances: int = 1, + expert_tensor_parallel_size: Optional[int] = None, nccl_communicator_config_path: Optional[str] = None, distributed_timeout_minutes: int = 30, order: str = "tp-cp-ep-dp-pp", - encoder_tensor_model_parallel_size: Optional[int] = 0, + encoder_tensor_model_parallel_size: int = 0, encoder_pipeline_model_parallel_size: Optional[int] = 0, get_embedding_ranks: Optional[Callable[[List[int], Optional[int]], List[int]]] = None, get_position_embedding_ranks: Optional[Callable[[List[int], Optional[int]], List[int]]] = None, @@ -438,6 +479,13 @@ def initialize_model_parallel( The number of Mixture of Experts parallel GPUs in each expert parallel group. + num_distributed_optimizer_instances (int, default = 1): + The number of distributed optimizer replicas across the data- + parallel domain. + + expert_tensor_parallel_size (int, default = tp_size): + The number of GPUs to split individual tensors of expert. + nccl_communicator_config_path (str, default = None): Path to the yaml file of NCCL communicator configurations. `min_ctas`, `max_ctas`, and `cga_cluster_size` can be set @@ -513,7 +561,6 @@ def initialize_model_parallel( world_size: int = torch.distributed.get_world_size() if encoder_tensor_model_parallel_size > 0: - assert encoder_pipeline_model_parallel_size > 0 assert ( encoder_tensor_model_parallel_size <= tensor_model_parallel_size ), "We do not support encoders with more TP than the decoder." @@ -533,12 +580,6 @@ def initialize_model_parallel( data_parallel_size: int = world_size // total_model_size - if data_parallel_size % expert_model_parallel_size != 0: - raise RuntimeError( - f"data_parallel_size ({data_parallel_size}) is not divisible by " - "expert_model_parallel_size " - ) - encoder_world_size = encoder_model_size * data_parallel_size decoder_world_size = decoder_model_size * data_parallel_size @@ -590,7 +631,7 @@ def initialize_model_parallel( decoder_rank_generator = RankGenerator( tp=tensor_model_parallel_size, - ep=expert_model_parallel_size, + ep=1, dp=data_parallel_size, pp=pipeline_model_parallel_size, cp=context_parallel_size, @@ -598,13 +639,45 @@ def initialize_model_parallel( rank_offset=encoder_world_size, ) - def generator_wrapper(group_type, **kwargs): + # Build expert rank generator + if expert_tensor_parallel_size is None: + expert_tensor_parallel_size = tensor_model_parallel_size + expert_tensor_model_pipeline_parallel_size = ( + expert_tensor_parallel_size * expert_model_parallel_size * pipeline_model_parallel_size + ) + expert_data_parallel_size = decoder_world_size // expert_tensor_model_pipeline_parallel_size + if decoder_world_size % expert_tensor_model_pipeline_parallel_size != 0: + raise RuntimeError( + f"decoder world_size ({decoder_world_size}) is not divisible by expert_tensor_model_pipeline_parallel size ({expert_tensor_model_pipeline_parallel_size})" + ) + + # TODO: support expert specific ordering + expert_decoder_rank_generator = RankGenerator( + tp=expert_tensor_parallel_size, + ep=expert_model_parallel_size, + dp=expert_data_parallel_size, + pp=pipeline_model_parallel_size, + cp=1, + order=order, + rank_offset=encoder_world_size, + ) + + assert decoder_rank_generator.get_ranks("pp") == expert_decoder_rank_generator.get_ranks( + "pp" + ), f"Pipeline parallel groups are expected to be the same for Non-Expert and Expert part, \ + but got {decoder_rank_generator.get_ranks('pp')} and {expert_decoder_rank_generator.get_ranks('pp')}" + + def generator_wrapper(group_type, is_expert=False, **kwargs): """The `RankGenerator` class produces a hyper-rectangle for a given set of tensor, pipeline, data, expert, and context parallelism. If we have an encoder, in addition to the default decoder, we essentially instantiate two `RankGenerator` classes to construct the parallelism for each module separately, and we then have to stitch them together for the right groups. For now, this means pp and tp-pp.""" - d_ranks = decoder_rank_generator.get_ranks(group_type, **kwargs) + if is_expert: + d_ranks = expert_decoder_rank_generator.get_ranks(group_type, **kwargs) + else: + d_ranks = decoder_rank_generator.get_ranks(group_type, **kwargs) + if encoder_rank_generator is None: for x in d_ranks: yield x @@ -636,6 +709,9 @@ def generator_wrapper(group_type, **kwargs): global _DATA_PARALLEL_GROUP_WITH_CP global _DATA_PARALLEL_GROUP_WITH_CP_GLOO global _DATA_PARALLEL_GLOBAL_RANKS_WITH_CP + global _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP + global _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP_GLOO + global _INTER_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP assert _DATA_PARALLEL_GROUP is None, 'data parallel group is already initialized' for ranks in generator_wrapper('dp'): @@ -648,6 +724,11 @@ def generator_wrapper(group_type, **kwargs): _DATA_PARALLEL_GROUP_GLOO = group_gloo _DATA_PARALLEL_GLOBAL_RANKS = ranks + assert ( + data_parallel_size % num_distributed_optimizer_instances == 0 + ), 'Data parallel size should be divisible by partial DistOpt shard factor' + intra_partial_data_parallel_size = data_parallel_size // num_distributed_optimizer_instances + for ranks_with_cp in generator_wrapper('dp-cp'): group_with_cp = torch.distributed.new_group( ranks_with_cp, timeout=timeout, pg_options=get_nccl_options('dp_cp', nccl_comm_cfgs) @@ -655,11 +736,58 @@ def generator_wrapper(group_type, **kwargs): group_with_cp_gloo = torch.distributed.new_group( ranks_with_cp, timeout=timeout, backend="gloo" ) + if rank in ranks_with_cp: _DATA_PARALLEL_GROUP_WITH_CP = group_with_cp _DATA_PARALLEL_GROUP_WITH_CP_GLOO = group_with_cp_gloo _DATA_PARALLEL_GLOBAL_RANKS_WITH_CP = ranks_with_cp + if num_distributed_optimizer_instances > 1: + # Create groups for Partial DistOpt, one for intra-partial DP domain + # Another for inter-partial DP domain + for i in range(num_distributed_optimizer_instances): + intra_partial_data_parallel_ranks_with_cp = ranks_with_cp[ + (i * intra_partial_data_parallel_size) : ( + (i + 1) * intra_partial_data_parallel_size + ) + ] + + intra_partial_data_parallel_group_with_cp = torch.distributed.new_group( + intra_partial_data_parallel_ranks_with_cp, + timeout=timeout, + pg_options=get_nccl_options('dp_cp', nccl_comm_cfgs), + ) + intra_partial_data_parallel_group_with_cp_gloo = torch.distributed.new_group( + intra_partial_data_parallel_ranks_with_cp, timeout=timeout, backend="gloo" + ) + + if rank in intra_partial_data_parallel_ranks_with_cp: + _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP = ( + intra_partial_data_parallel_group_with_cp + ) + _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP_GLOO = ( + intra_partial_data_parallel_group_with_cp_gloo + ) + + for i in range(intra_partial_data_parallel_size): + inter_partial_data_parallel_ranks_with_cp = ranks_with_cp[ + i::intra_partial_data_parallel_size + ] + + inter_partial_data_parallel_group_with_cp = torch.distributed.new_group( + inter_partial_data_parallel_ranks_with_cp, + timeout=timeout, + pg_options=get_nccl_options('dp_cp', nccl_comm_cfgs), + ) + + if rank in inter_partial_data_parallel_ranks_with_cp: + _INTER_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP = ( + inter_partial_data_parallel_group_with_cp + ) + else: + _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP = _DATA_PARALLEL_GROUP_WITH_CP + _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP_GLOO = _DATA_PARALLEL_GROUP_WITH_CP_GLOO + # Apply SHARP to DP process groups if use_sharp: if rank == 0: @@ -691,9 +819,19 @@ def generator_wrapper(group_type, **kwargs): if rank in ranks: _CONTEXT_PARALLEL_GROUP = group _CONTEXT_PARALLEL_GLOBAL_RANKS = ranks + if hierarchical_context_parallel_sizes: + global _HIERARCHICAL_CONTEXT_PARALLEL_GROUPS + _HIERARCHICAL_CONTEXT_PARALLEL_GROUPS += create_hierarchical_parallel_groups( + rank, + ranks, + context_parallel_size, + hierarchical_context_parallel_sizes, + get_nccl_options('cp', nccl_comm_cfgs), + ) # Build the model-parallel groups. global _MODEL_PARALLEL_GROUP + global _MODEL_PARALLEL_GLOBAL_RANKS assert _MODEL_PARALLEL_GROUP is None, 'model parallel group is already initialized' for ranks in generator_wrapper('tp-pp'): group = torch.distributed.new_group( @@ -701,18 +839,7 @@ def generator_wrapper(group_type, **kwargs): ) if rank in ranks: _MODEL_PARALLEL_GROUP = group - - # Build the model-parallel groups with expert parallel - global _MODEL_AND_EXPERT_PARALLEL_GROUP - assert ( - _MODEL_AND_EXPERT_PARALLEL_GROUP is None - ), 'model and expert parallel group is already initialized' - for ranks in generator_wrapper('tp-ep-pp', independent_ep=True): - group = torch.distributed.new_group( - ranks, timeout=timeout, pg_options=get_nccl_options('mp_exp', nccl_comm_cfgs) - ) - if rank in ranks: - _MODEL_AND_EXPERT_PARALLEL_GROUP = group + _MODEL_PARALLEL_GLOBAL_RANKS = ranks # Build the tensor model-parallel groups. global _TENSOR_MODEL_PARALLEL_GROUP @@ -804,62 +931,68 @@ def generator_wrapper(group_type, **kwargs): if rank in ranks: _TENSOR_AND_CONTEXT_PARALLEL_GROUP = group - # Build the tensor + expert parallel groups + ### Expert-related parallel groups initialization + # Build the expert model parallel group global _EXPERT_MODEL_PARALLEL_GROUP assert _EXPERT_MODEL_PARALLEL_GROUP is None, 'Expert parallel group is already initialized' - global _TENSOR_AND_EXPERT_PARALLEL_GROUP - assert ( - _TENSOR_AND_EXPERT_PARALLEL_GROUP is None - ), 'Tensor + expert parallel group is already initialized' - global _DATA_MODULO_EXPERT_PARALLEL_GROUP - assert ( - _DATA_MODULO_EXPERT_PARALLEL_GROUP is None - ), 'Data modulo expert group is already initialized' - global _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP + for ranks in generator_wrapper('ep', is_expert=True): + group = torch.distributed.new_group( + ranks, pg_options=get_nccl_options('exp', nccl_comm_cfgs) + ) + if rank in ranks: + _EXPERT_MODEL_PARALLEL_GROUP = group + + # Build the expert tensor parallel group + global _EXPERT_TENSOR_PARALLEL_GROUP assert ( - _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP is None - ), 'Data modulo expert group with context parallel is already initialized' - global _DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO - global _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP_GLOO + _EXPERT_TENSOR_PARALLEL_GROUP is None + ), 'Expert tensor model parallel group is already initialized' + for ranks in generator_wrapper('tp', is_expert=True): + group = torch.distributed.new_group( + ranks, timeout=timeout, pg_options=get_nccl_options('tp', nccl_comm_cfgs) + ) + if rank in ranks: + _EXPERT_TENSOR_PARALLEL_GROUP = group - for ranks in generator_wrapper('tp-ep', independent_ep=True): + # Build the tensor + expert parallel groups + global _EXPERT_TENSOR_AND_MODEL_PARALLEL_GROUP + assert ( + _EXPERT_TENSOR_AND_MODEL_PARALLEL_GROUP is None + ), 'Expert tensor + model parallel group is already initialized' + for ranks in generator_wrapper('tp-ep', is_expert=True): group = torch.distributed.new_group( ranks, timeout=timeout, pg_options=get_nccl_options('tp_exp', nccl_comm_cfgs) ) if rank in ranks: - _TENSOR_AND_EXPERT_PARALLEL_GROUP = group + _EXPERT_TENSOR_AND_MODEL_PARALLEL_GROUP = group - for ranks in generator_wrapper('ep', independent_ep=True): + # Build the expert+tensor+pipeline parallel groups + global _EXPERT_TENSOR_MODEL_PIPELINE_PARALLEL_GROUP + assert ( + _EXPERT_TENSOR_MODEL_PIPELINE_PARALLEL_GROUP is None + ), 'The expert_tensor_model_pipeline parallel group is already initialized' + for ranks in generator_wrapper('tp-ep-pp', is_expert=True): group = torch.distributed.new_group( - ranks, pg_options=get_nccl_options('exp', nccl_comm_cfgs) + ranks, timeout=timeout, pg_options=get_nccl_options('mp', nccl_comm_cfgs) ) if rank in ranks: - _EXPERT_MODEL_PARALLEL_GROUP = group + _EXPERT_TENSOR_MODEL_PIPELINE_PARALLEL_GROUP = group + + # Build the expert data parallel group + global _EXPERT_DATA_PARALLEL_GROUP + assert _EXPERT_DATA_PARALLEL_GROUP is None, 'Expert data group is already initialized' + global _EXPERT_DATA_PARALLEL_GROUP_GLOO + assert _EXPERT_DATA_PARALLEL_GROUP_GLOO is None, 'Expert data group-gloo is already initialized' - for ranks in generator_wrapper('dp', independent_ep=True): + for ranks in generator_wrapper('dp', is_expert=True): group = torch.distributed.new_group( - ranks, timeout=timeout, pg_options=get_nccl_options('dp_modulo_exp', nccl_comm_cfgs) + ranks, timeout=timeout, pg_options=get_nccl_options('dp', nccl_comm_cfgs) ) group_gloo = torch.distributed.new_group(ranks, backend="gloo") if rank in ranks: - _DATA_MODULO_EXPERT_PARALLEL_GROUP = group - _DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO = group_gloo - - for ranks in generator_wrapper('dp-cp', independent_ep=True): - # Lazy initialization of the group - if get_context_parallel_world_size() > 1: - group = torch.distributed.new_group( - ranks, - timeout=timeout, - pg_options=get_nccl_options('dp_modulo_exp_cp', nccl_comm_cfgs), - ) - group_gloo = torch.distributed.new_group(ranks, backend="gloo") - else: - group = _DATA_MODULO_EXPERT_PARALLEL_GROUP - group_gloo = _DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO - if rank in ranks: - _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP = group - _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP_GLOO = group_gloo + _EXPERT_DATA_PARALLEL_GROUP = group + _EXPERT_DATA_PARALLEL_GROUP_GLOO = group_gloo + ### End of expert related parallel groups initialization # Initialize global memory buffer # This isn't really "parallel state" but there isn't another good place to @@ -894,13 +1027,8 @@ def model_parallel_is_initialized(): return True -def get_model_parallel_group(with_expert_parallel=False): +def get_model_parallel_group(): """Get the model-parallel group the caller rank belongs to.""" - if with_expert_parallel: - assert ( - _MODEL_AND_EXPERT_PARALLEL_GROUP is not None - ), 'model parallel group is not initialized' - return _MODEL_AND_EXPERT_PARALLEL_GROUP assert _MODEL_PARALLEL_GROUP is not None, 'model parallel group is not initialized' return _MODEL_PARALLEL_GROUP @@ -922,30 +1050,50 @@ def get_pipeline_model_parallel_group(): return _PIPELINE_MODEL_PARALLEL_GROUP -def get_data_parallel_group(with_context_parallel=False): +def get_data_parallel_group(with_context_parallel=False, partial_data_parallel=False): """Get the data-parallel group the caller rank belongs to.""" if with_context_parallel: + if partial_data_parallel: + assert ( + _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP is not None + ), 'Intra partial data parallel group is not initialized' + return _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP assert ( _DATA_PARALLEL_GROUP_WITH_CP is not None ), 'data parallel group with context parallel combined is not initialized' return _DATA_PARALLEL_GROUP_WITH_CP else: assert _DATA_PARALLEL_GROUP is not None, 'data parallel group is not initialized' + assert partial_data_parallel == False, 'Partial DP for Optimizer needs to include CP' return _DATA_PARALLEL_GROUP -def get_data_parallel_group_gloo(with_context_parallel=False): +def get_data_parallel_group_gloo(with_context_parallel=False, partial_data_parallel=False): """Get the Gloo data-parallel group the caller rank belongs to.""" if with_context_parallel: + if partial_data_parallel: + assert ( + _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP_GLOO is not None + ), 'Intra partial data parallel group is not initialized' + return _INTRA_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP_GLOO assert ( _DATA_PARALLEL_GROUP_WITH_CP_GLOO is not None ), 'data parallel group-gloo with context parallel combined is not initialized' return _DATA_PARALLEL_GROUP_WITH_CP_GLOO else: assert _DATA_PARALLEL_GROUP_GLOO is not None, 'data parallel group-gloo is not initialized' + assert partial_data_parallel == False, 'Partial DP for Optimizer needs to include CP' return _DATA_PARALLEL_GROUP_GLOO +def get_inter_partial_data_parallel_group(): + """Get the group spanning the different partial data-parallel groups.""" + assert ( + _INTER_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP is not None + ), 'Inter partial data parallel group is not initialized' + return _INTER_PARTIAL_DATA_PARALLEL_GROUP_WITH_CP + + def get_context_parallel_group(check_initialized=True): """Get the context-parallel group the caller rank belongs to.""" if check_initialized: @@ -962,6 +1110,13 @@ def get_context_parallel_global_ranks(check_initialized=True): return _CONTEXT_PARALLEL_GLOBAL_RANKS +def get_hierarchical_context_parallel_groups(check_initialized=True): + """Get the inner ring of context parallel group the caller rank belongs to.""" + if check_initialized: + assert _HIERARCHICAL_CONTEXT_PARALLEL_GROUPS is not None + return _HIERARCHICAL_CONTEXT_PARALLEL_GROUPS + + def get_embedding_group(): """Get the embedding group the caller rank belongs to.""" assert _EMBEDDING_GROUP is not None, 'embedding group is not initialized' @@ -1022,56 +1177,6 @@ def get_tensor_and_context_parallel_group(): return _TENSOR_AND_CONTEXT_PARALLEL_GROUP -def get_expert_model_parallel_group(): - """Get the expert-model-parallel group the caller rank belongs to.""" - assert ( - _EXPERT_MODEL_PARALLEL_GROUP is not None - ), 'expert model parallel group is not initialized' - return _EXPERT_MODEL_PARALLEL_GROUP - - -def get_tensor_and_expert_parallel_group(): - """Get the tensor- and expert-parallel group the caller rank belongs to.""" - assert ( - _TENSOR_AND_EXPERT_PARALLEL_GROUP is not None - ), 'tensor and expert parallel group is not initialized' - return _TENSOR_AND_EXPERT_PARALLEL_GROUP - - -def get_data_modulo_expert_parallel_group(with_context_parallel=False): - """Get the data-modulo-expert-parallel group the caller rank belongs to.""" - if with_context_parallel: - assert ( - _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP is not None - ), 'data modulo expert parallel group with context parallel is not initialized' - return _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP - else: - assert ( - _DATA_MODULO_EXPERT_PARALLEL_GROUP is not None - ), 'data modulo expert parallel group is not initialized' - return _DATA_MODULO_EXPERT_PARALLEL_GROUP - - -def get_data_modulo_expert_parallel_group_gloo(with_context_parallel=False): - """Get the Gloo data-modulo-expert-parallel group the caller rank belongs to.""" - if with_context_parallel: - assert ( - _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP_GLOO is not None - ), 'data modulo expert parallel group-gloo with context parallel is not initialized' - return _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP_GLOO - else: - assert ( - _DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO is not None - ), 'data modulo expert parallel group-gloo is not initialized' - return _DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO - - -def set_expert_model_parallel_world_size(world_size): - """Sets the expert-model-parallel world size.""" - global _MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE - _MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE = world_size - - def set_tensor_model_parallel_world_size(world_size): """Set the tensor-model-parallel size""" global _MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE @@ -1116,12 +1221,6 @@ def get_pipeline_model_parallel_world_size(): return torch.distributed.get_world_size(group=pp_group) -def set_expert_model_parallel_rank(rank): - """Set expert-model-parallel rank.""" - global _MPU_EXPERT_MODEL_PARALLEL_RANK - _MPU_EXPERT_MODEL_PARALLEL_RANK = rank - - def set_tensor_model_parallel_rank(rank): """Set tensor-model-parallel rank.""" global _MPU_TENSOR_MODEL_PARALLEL_RANK @@ -1255,22 +1354,30 @@ def is_pipeline_stage_after_split(rank=None): return False -def is_inside_encoder(rank=None): - """Return True if pipeline stage executes encoder block for a model - with both encoder and decoder.""" +def is_inside_encoder(rank=None) -> bool: + """Return True if pipeline stage executes encoder block. + This function implicitly assumes we have a model with both + encoder and decoder.""" if get_pipeline_model_parallel_world_size() == 1: return True if rank is None: rank = get_pipeline_model_parallel_rank() global _PIPELINE_MODEL_PARALLEL_DECODER_START - if _PIPELINE_MODEL_PARALLEL_DECODER_START is None: + # _PIPELINE_MODEL_PARALLEL_DECODER_START == None means that the + # encoder shares the first pipeline rank with the decoder + if _PIPELINE_MODEL_PARALLEL_DECODER_START is None and rank == 0: return True - if rank < _PIPELINE_MODEL_PARALLEL_DECODER_START: + # _PIPELINE_MODEL_PARALLEL_DECODER_START != None means that the + # encoder is on it's own pipeline ranks before the decoder + if ( + _PIPELINE_MODEL_PARALLEL_DECODER_START is not None + and rank < _PIPELINE_MODEL_PARALLEL_DECODER_START + ): return True return False -def is_inside_decoder(rank=None): +def is_inside_decoder(rank=None) -> bool: """Return True if pipeline stage executes decoder block for a model with both encoder and decoder.""" if get_pipeline_model_parallel_world_size() == 1: @@ -1285,6 +1392,12 @@ def is_inside_decoder(rank=None): return False +def get_pipeline_model_parallel_decoder_start() -> int: + """Return decoder start rank (if encoder pipeline parallelism is set).""" + global _PIPELINE_MODEL_PARALLEL_DECODER_START + return _PIPELINE_MODEL_PARALLEL_DECODER_START + + def is_pipeline_stage_at_split(): """Return true if pipeline stage executes decoder block and next stage executes encoder block for a model with both encoder and @@ -1320,6 +1433,13 @@ def get_tensor_model_parallel_src_rank(): return _TENSOR_MODEL_PARALLEL_GLOBAL_RANKS[0] +def get_model_parallel_src_rank(): + """Calculate the global rank corresponding to the first local rank + in the model parallel group.""" + assert _MODEL_PARALLEL_GLOBAL_RANKS is not None, "Model parallel group is not initialized" + return _MODEL_PARALLEL_GLOBAL_RANKS[0] + + def get_data_parallel_src_rank(with_context_parallel=False): """Calculate the global rank corresponding to the first local rank in the data parallel group.""" @@ -1388,14 +1508,17 @@ def get_pipeline_model_parallel_prev_rank(): return _PIPELINE_GLOBAL_RANKS[(rank_in_pipeline - 1) % world_size] -def get_data_parallel_world_size(with_context_parallel=False): +def get_data_parallel_world_size(with_context_parallel=False, partial_data_parallel=False): """Return world size for the data parallel group.""" global _MPU_DATA_PARALLEL_WORLD_SIZE if _MPU_DATA_PARALLEL_WORLD_SIZE is not None: return _MPU_DATA_PARALLEL_WORLD_SIZE if torch.distributed.is_available() and torch.distributed.is_initialized(): return torch.distributed.get_world_size( - group=get_data_parallel_group(with_context_parallel=with_context_parallel) + group=get_data_parallel_group( + with_context_parallel=with_context_parallel, + partial_data_parallel=partial_data_parallel, + ) ) else: return 0 @@ -1407,14 +1530,17 @@ def set_data_parallel_rank(rank): _MPU_DATA_PARALLEL_RANK = rank -def get_data_parallel_rank(with_context_parallel=False): +def get_data_parallel_rank(with_context_parallel=False, partial_data_parallel=False): """Return caller's rank in the data-parallel group.""" global _MPU_DATA_PARALLEL_RANK if _MPU_DATA_PARALLEL_RANK is not None: return _MPU_DATA_PARALLEL_RANK if torch.distributed.is_available() and torch.distributed.is_initialized(): return torch.distributed.get_rank( - group=get_data_parallel_group(with_context_parallel=with_context_parallel) + group=get_data_parallel_group( + with_context_parallel=with_context_parallel, + partial_data_parallel=partial_data_parallel, + ) ) else: return 0 @@ -1452,30 +1578,30 @@ def get_tensor_and_context_parallel_rank(): return 0 +### Expert-related parallel states functions +def get_expert_model_parallel_group(check_initialized=True): + """Get the expert-model-parallel group the caller rank belongs to.""" + if check_initialized: + assert ( + _EXPERT_MODEL_PARALLEL_GROUP is not None + ), 'expert model parallel group is not initialized' + return _EXPERT_MODEL_PARALLEL_GROUP + + def get_expert_model_parallel_world_size(): """Return world size for the expert-model-parallel group.""" if _MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE is not None: return _MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE if torch.distributed.is_available() and torch.distributed.is_initialized(): - tensor_and_expert_parallel_world_size = torch.distributed.get_world_size( - group=get_tensor_and_expert_parallel_group() - ) - return tensor_and_expert_parallel_world_size // get_tensor_model_parallel_world_size() + return torch.distributed.get_world_size(group=get_expert_model_parallel_group()) else: return 0 -def get_tensor_and_expert_parallel_world_size(): - """Return world size for the expert model parallel group times model parallel group. - Currently, each expert will also be distributed across TP group by default. - """ - if torch.distributed.is_available() and torch.distributed.is_initialized(): - tensor_and_expert_parallel_world_size = torch.distributed.get_world_size( - group=get_tensor_and_expert_parallel_group() - ) - return tensor_and_expert_parallel_world_size - else: - return 0 +def set_expert_model_parallel_world_size(world_size): + """Sets the expert-model-parallel world size.""" + global _MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE + _MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE = world_size def get_expert_model_parallel_rank(): @@ -1483,32 +1609,133 @@ def get_expert_model_parallel_rank(): if _MPU_EXPERT_MODEL_PARALLEL_RANK is not None: return _MPU_EXPERT_MODEL_PARALLEL_RANK if torch.distributed.is_available() and torch.distributed.is_initialized(): - tensor_and_expert_parallel_rank = torch.distributed.get_rank( - group=get_tensor_and_expert_parallel_group() - ) - return tensor_and_expert_parallel_rank // get_tensor_model_parallel_world_size() + return torch.distributed.get_rank(group=get_expert_model_parallel_group()) else: return 0 -def get_data_modulo_expert_parallel_rank(with_context_parallel=False): - """Return caller's rank in the context-parallel group.""" +def set_expert_model_parallel_rank(rank): + """Set expert-model-parallel rank.""" + global _MPU_EXPERT_MODEL_PARALLEL_RANK + _MPU_EXPERT_MODEL_PARALLEL_RANK = rank + + +def get_expert_tensor_parallel_group(check_initialized=True): + """Get the expert-tensor-parallel group the caller rank belongs to.""" + if check_initialized: + assert ( + _EXPERT_TENSOR_PARALLEL_GROUP is not None + ), 'Expert tensor parallel group is not initialized' + return _EXPERT_TENSOR_PARALLEL_GROUP + + +def get_expert_tensor_parallel_world_size(): + """Return world size for the expert tensor parallel group.""" + global _MPU_EXPERT_TENSOR_PARALLEL_WORLD_SIZE + if _MPU_EXPERT_TENSOR_PARALLEL_WORLD_SIZE is not None: + return _MPU_EXPERT_TENSOR_PARALLEL_WORLD_SIZE + # Use tensor parallel group world size for backward compability otherwise + if not _EXPERT_TENSOR_PARALLEL_GROUP: + return _MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE + else: + return torch.distributed.get_world_size(group=get_expert_tensor_parallel_group()) + + +def set_expert_tensor_parallel_world_size(world_size): + "Set expert tensor model parallel size" + global _MPU_EXPERT_TENSOR_PARALLEL_WORLD_SIZE + _MPU_EXPERT_TENSOR_PARALLEL_WORLD_SIZE = world_size + + +def get_expert_tensor_parallel_rank(): + """Return my rank for the expert tensor parallel group.""" + global _MPU_EXPERT_TENSOR_PARALLEL_RANK + if _MPU_EXPERT_TENSOR_PARALLEL_RANK is not None: + return _MPU_EXPERT_TENSOR_PARALLEL_RANK + # Use tensor parallel group rank for backward compability otherwise + if not _EXPERT_TENSOR_PARALLEL_GROUP: + return _MPU_TENSOR_MODEL_PARALLEL_RANK + else: + return torch.distributed.get_rank(group=get_expert_tensor_parallel_group()) + + +def set_expert_tensor_parallel_rank(rank): + "Set expert tensor model parallel rank" + global _MPU_EXPERT_TENSOR_PARALLEL_RANK + _MPU_EXPERT_TENSOR_PARALLEL_RANK = rank + + +def get_expert_tensor_and_model_parallel_group(check_initialized=True): + """Get the expert-tensor and expert-model group the caller rank belongs to.""" + if check_initialized: + assert ( + _EXPERT_TENSOR_AND_MODEL_PARALLEL_GROUP is not None + ), 'Expert tensor and model parallel group is not initialized' + return _EXPERT_TENSOR_AND_MODEL_PARALLEL_GROUP + + +def get_expert_tensor_and_model_parallel_world_size(): + """Return world size for the expert model parallel group times expert tensor parallel group.""" if torch.distributed.is_available() and torch.distributed.is_initialized(): - return torch.distributed.get_rank( - group=get_data_modulo_expert_parallel_group(with_context_parallel=with_context_parallel) + world_size = torch.distributed.get_world_size( + group=get_expert_tensor_and_model_parallel_group() ) + return world_size else: return 0 -def get_tensor_and_expert_parallel_rank(): +def get_expert_tensor_and_model_parallel_rank(): """Return caller's rank in the joint tensor- and expert-model-parallel group.""" if torch.distributed.is_available() and torch.distributed.is_initialized(): - return torch.distributed.get_rank(group=get_tensor_and_expert_parallel_group()) + return torch.distributed.get_rank(group=get_expert_tensor_and_model_parallel_group()) + else: + return 0 + + +def get_expert_tensor_model_pipeline_parallel_group(): + """Get expert tensor-model-pipeline parallel group.""" + assert ( + _EXPERT_TENSOR_MODEL_PIPELINE_PARALLEL_GROUP is not None + ), 'Expert tensor-model-pipeline parallel group is not initialized' + return _EXPERT_TENSOR_MODEL_PIPELINE_PARALLEL_GROUP + + +def get_expert_data_parallel_group(): + """Get expert data parallel group.""" + assert _EXPERT_DATA_PARALLEL_GROUP is not None, 'Expert data parallel group is not initialized' + return _EXPERT_DATA_PARALLEL_GROUP + + +def get_data_modulo_expert_parallel_group(): + """[Deprecated] Get expert data parallel group.""" + warnings.warn( + "get_data_modulo_expert_parallel_group is deprecated, please use " + "get_expert_data_parallel_group instead.", + DeprecationWarning, + ) + return get_expert_data_parallel_group() + + +def get_expert_data_parallel_group_gloo(): + """Get expert data parallel group-gloo.""" + assert ( + _EXPERT_DATA_PARALLEL_GROUP_GLOO is not None + ), 'Expert data parallel group-gloo is not initialized' + return _EXPERT_DATA_PARALLEL_GROUP_GLOO + + +def get_expert_data_parallel_rank(): + """Return caller's rank in the expert data parallel group.""" + if torch.distributed.is_available() and torch.distributed.is_initialized(): + return torch.distributed.get_rank(group=get_expert_data_parallel_group()) else: return 0 +### End of expert-related functions region + + def _set_global_memory_buffer(): """Initialize global buffer.""" global _GLOBAL_MEMORY_BUFFER @@ -1552,15 +1779,15 @@ def destroy_model_parallel(): global _MODEL_PARALLEL_GROUP _MODEL_PARALLEL_GROUP = None - global _MODEL_AND_EXPERT_PARALLEL_GROUP - _MODEL_AND_EXPERT_PARALLEL_GROUP = None - global _TENSOR_MODEL_PARALLEL_GROUP _TENSOR_MODEL_PARALLEL_GROUP = None global _PIPELINE_MODEL_PARALLEL_GROUP _PIPELINE_MODEL_PARALLEL_GROUP = None + global _PIPELINE_MODEL_PARALLEL_DECODER_START + _PIPELINE_MODEL_PARALLEL_DECODER_START = None + global _DATA_PARALLEL_GROUP _DATA_PARALLEL_GROUP = None @@ -1588,18 +1815,6 @@ def destroy_model_parallel(): global _TENSOR_AND_CONTEXT_PARALLEL_GROUP _TENSOR_AND_CONTEXT_PARALLEL_GROUP = None - global _EXPERT_MODEL_PARALLEL_GROUP - _EXPERT_MODEL_PARALLEL_GROUP = None - - global _TENSOR_AND_EXPERT_PARALLEL_GROUP - _TENSOR_AND_EXPERT_PARALLEL_GROUP = None - - global _DATA_MODULO_EXPERT_PARALLEL_GROUP - _DATA_MODULO_EXPERT_PARALLEL_GROUP = None - - global _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP - _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP = None - global _VIRTUAL_PIPELINE_MODEL_PARALLEL_RANK _VIRTUAL_PIPELINE_MODEL_PARALLEL_RANK = None @@ -1621,27 +1836,65 @@ def destroy_model_parallel(): global _GLOBAL_MEMORY_BUFFER _GLOBAL_MEMORY_BUFFER = None + global _DATA_PARALLEL_GROUP_GLOO + if ( + _DATA_PARALLEL_GROUP_GLOO is not None + and torch.distributed.distributed_c10d._world.pg_map.get(_DATA_PARALLEL_GROUP_GLOO, None) + is not None + ): + torch.distributed.destroy_process_group(_DATA_PARALLEL_GROUP_GLOO) + _DATA_PARALLEL_GROUP_GLOO = None + + global _DATA_PARALLEL_GROUP_WITH_CP_GLOO + if ( + _DATA_PARALLEL_GROUP_WITH_CP_GLOO is not None + and torch.distributed.distributed_c10d._world.pg_map.get( + _DATA_PARALLEL_GROUP_WITH_CP_GLOO, None + ) + is not None + ): + torch.distributed.destroy_process_group(_DATA_PARALLEL_GROUP_WITH_CP_GLOO) + _DATA_PARALLEL_GROUP_WITH_CP_GLOO = None + + # Destroy parallel state related to expert parallelism. + global _EXPERT_MODEL_PARALLEL_GROUP + _EXPERT_MODEL_PARALLEL_GROUP = None + global _MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE _MPU_EXPERT_MODEL_PARALLEL_WORLD_SIZE = None global _MPU_EXPERT_MODEL_PARALLEL_RANK _MPU_EXPERT_MODEL_PARALLEL_RANK = None - global _DATA_PARALLEL_GROUP_GLOO - if _DATA_PARALLEL_GROUP_GLOO is not None: - torch.distributed.destroy_process_group(_DATA_PARALLEL_GROUP_GLOO) - _DATA_PARALLEL_GROUP_GLOO = None + global _EXPERT_TENSOR_PARALLEL_GROUP + _EXPERT_TENSOR_PARALLEL_GROUP = None - global _DATA_PARALLEL_GROUP_WITH_CP_GLOO - _DATA_PARALLEL_GROUP_WITH_CP_GLOO = None + global _MPU_EXPERT_TENSOR_PARALLEL_WORLD_SIZE + _MPU_EXPERT_TENSOR_PARALLEL_WORLD_SIZE = None + + global _MPU_EXPERT_TENSOR_PARALLEL_RANK + _MPU_EXPERT_TENSOR_PARALLEL_RANK = None + + global _EXPERT_TENSOR_AND_MODEL_PARALLEL_GROUP + _EXPERT_TENSOR_AND_MODEL_PARALLEL_GROUP = None + + global _EXPERT_TENSOR_MODEL_PIPELINE_PARALLEL_GROUP + _EXPERT_TENSOR_MODEL_PIPELINE_PARALLEL_GROUP = None - global _DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO - if _DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO is not None: - torch.distributed.destroy_process_group(_DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO) - _DATA_MODULO_EXPERT_PARALLEL_GROUP_GLOO = None + global _EXPERT_DATA_PARALLEL_GROUP + _EXPERT_DATA_PARALLEL_GROUP = None - global _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP_GLOO - _DATA_MODULO_EXPERT_PARALLEL_GROUP_WITH_CP_GLOO = None + global _EXPERT_DATA_PARALLEL_GROUP_GLOO + if ( + _EXPERT_DATA_PARALLEL_GROUP_GLOO is not None + and torch.distributed.distributed_c10d._world.pg_map.get( + _EXPERT_DATA_PARALLEL_GROUP_GLOO, None + ) + is not None + ): + torch.distributed.destroy_process_group(_EXPERT_DATA_PARALLEL_GROUP_GLOO) + _EXPERT_DATA_PARALLEL_GROUP_GLOO = None + # End of expert parallelism destroy. global _MOE_LAYER_WISE_LOGGING_TRACKER _MOE_LAYER_WISE_LOGGING_TRACKER = {} diff --git a/megatron/core/pipeline_parallel/p2p_communication.py b/megatron/core/pipeline_parallel/p2p_communication.py index 3e33e7c2f..88aee8987 100644 --- a/megatron/core/pipeline_parallel/p2p_communication.py +++ b/megatron/core/pipeline_parallel/p2p_communication.py @@ -1,8 +1,6 @@ # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -import operator -from functools import reduce -from typing import Callable, List, Optional, Tuple, Union +from typing import List, Optional, Tuple, Union import torch @@ -166,8 +164,7 @@ def _p2p_ops( prev_pipeline_rank: int, next_pipeline_rank: int, ): - reqs = [] - rank = get_pipeline_model_parallel_rank() + reqs = {} even_send_odd_recv_group = group if get_pipeline_model_parallel_world_size() == 2: # Use the global process group for one of the two p2p communications @@ -183,50 +180,50 @@ def _p2p_ops( send_next_req = torch.distributed.isend( tensor=tensor_send_next, dst=next_pipeline_rank, group=even_send_odd_recv_group ) - reqs.append(send_next_req) + reqs["send_next"] = send_next_req if tensor_recv_prev is not None: recv_prev_req = torch.distributed.irecv( tensor=tensor_recv_prev, src=prev_pipeline_rank, group=even_recv_odd_send_group ) - reqs.append(recv_prev_req) + reqs["recv_prev"] = recv_prev_req if tensor_send_prev is not None: send_prev_req = torch.distributed.isend( tensor=tensor_send_prev, dst=prev_pipeline_rank, group=even_send_odd_recv_group ) - reqs.append(send_prev_req) + reqs["send_prev"] = send_prev_req if tensor_recv_next is not None: recv_next_req = torch.distributed.irecv( tensor=tensor_recv_next, src=next_pipeline_rank, group=even_recv_odd_send_group ) - reqs.append(recv_next_req) + reqs["recv_next"] = recv_next_req else: if tensor_recv_prev is not None: recv_prev_req = torch.distributed.irecv( tensor=tensor_recv_prev, src=prev_pipeline_rank, group=even_send_odd_recv_group ) - reqs.append(recv_prev_req) + reqs["recv_prev"] = recv_prev_req if tensor_send_next is not None: send_next_req = torch.distributed.isend( tensor=tensor_send_next, dst=next_pipeline_rank, group=even_recv_odd_send_group ) - reqs.append(send_next_req) + reqs["send_next"] = send_next_req if tensor_recv_next is not None: recv_next_req = torch.distributed.irecv( tensor=tensor_recv_next, src=next_pipeline_rank, group=even_send_odd_recv_group ) - reqs.append(recv_next_req) + reqs["recv_next"] = recv_next_req if tensor_send_prev is not None: send_prev_req = torch.distributed.isend( tensor=tensor_send_prev, dst=prev_pipeline_rank, group=even_recv_odd_send_group ) - reqs.append(send_prev_req) + reqs["send_prev"] = send_prev_req return reqs @@ -349,7 +346,10 @@ def _ring_exchange_wrapper(**kwargs): assert not isinstance(prev_rank, list) prev_rank = [prev_rank] - reqs = [] + if config.use_ring_exchange_p2p or config.batch_p2p_comm: + reqs = [] + else: + reqs = {} tensor_recv_prev_list = [] tensor_recv_next_list = [] @@ -366,20 +366,22 @@ def _ring_exchange_wrapper(**kwargs): else: tensor_recv_next = None - reqs.extend( - p2p_func( - tensor_send_prev=tensor_send_prev, - tensor_recv_prev=tensor_recv_prev, - tensor_send_next=tensor_send_next, - tensor_recv_next=tensor_recv_next, - group=group, - prev_pipeline_rank=pr, - next_pipeline_rank=nr, - ) + p2p_reqs = p2p_func( + tensor_send_prev=tensor_send_prev, + tensor_recv_prev=tensor_recv_prev, + tensor_send_next=tensor_send_next, + tensor_recv_next=tensor_recv_next, + group=group, + prev_pipeline_rank=pr, + next_pipeline_rank=nr, ) + if isinstance(p2p_reqs, list): + reqs.extend(p2p_reqs) + else: + reqs.update(p2p_reqs) if wait_on_reqs and len(reqs) > 0: - for req in reqs: + for req in reqs if isinstance(reqs, list) else reqs.values(): req.wait() reqs = None diff --git a/megatron/core/pipeline_parallel/schedules.py b/megatron/core/pipeline_parallel/schedules.py index f082dbc6d..ca18d4b2f 100644 --- a/megatron/core/pipeline_parallel/schedules.py +++ b/megatron/core/pipeline_parallel/schedules.py @@ -158,6 +158,7 @@ def custom_backward(output, grad_output): def set_current_microbatch(model, microbatch_id): + """Set the current microbatch.""" decoder_exists = True decoder = None try: @@ -395,6 +396,7 @@ def backward_step(input_tensor, output_tensor, output_tensor_grad, model_type, c def check_first_val_step(first_val_step, forward_only, cond): + """Check if it is the first validation step.""" if (first_val_step is not None) and forward_only: return first_val_step and cond else: @@ -498,6 +500,7 @@ def forward_backward_no_pipelining( def clear_embedding_activation_buffer(config, model): + """Clear embedding activation buffer.""" if ( parallel_state.is_pipeline_last_stage(ignore_virtual=True) @@ -519,6 +522,7 @@ def clear_embedding_activation_buffer(config, model): def finish_embedding_wgrad_compute(config, embedding_module): + """Finish embedding wgrad compute.""" if ( parallel_state.is_pipeline_last_stage(ignore_virtual=True) and config.defer_embedding_wgrad_compute @@ -553,6 +557,16 @@ def forward_backward_pipelining_with_interleaving( communication between pipeline stages as needed. Returns dictionary with losses if the last stage, empty dict otherwise.""" + + # Convention used in this function: + # num_microbatches for number of microbatches per pipeline stage; + # num_model_chunks for virtual pipeline size; + # then total_num_microbatches = num_microbatches * num_model_chunks. + # Their corresponding index variables are + # microbatch_id in [0, num_microbatches) + # model_chunk_id in [0, num_model_chunks) + # virtual_microbatch_id in [0, total_num_microbatches) + assert isinstance(model, list), "interleaved pipeline parallelism expected model chunking" assert all(isinstance(chunk, torch.nn.Module) for chunk in model), "invalid model chunking" assert isinstance( @@ -628,10 +642,26 @@ def enable_grad_sync(): pipeline_parallel_size = parallel_state.get_pipeline_model_parallel_world_size() pipeline_parallel_rank = parallel_state.get_pipeline_model_parallel_rank() - if num_microbatches % pipeline_parallel_size != 0: - msg = f'number of microbatches ({num_microbatches}) is not divisible by ' - msg += f'pipeline-model-parallel-size ({pipeline_parallel_size}) ' - msg += 'when using interleaved schedule' + if ( + config.microbatch_group_size_per_vp_stage > num_microbatches + or config.microbatch_group_size_per_vp_stage < pipeline_parallel_size + ): + msg = ( + 'The number of contiguous micro-batches in a virtual pipeline stage' + f'should range in [PP={pipeline_parallel_size} , M={num_microbatches}]' + ) + raise ValueError(msg) + + # If the final micro-batch group has fewer micro-batches than pipeline-parallel size, + # the pipeline will have dependency bubbles. + final_microbatch_group_size = num_microbatches % config.microbatch_group_size_per_vp_stage + if 0 < final_microbatch_group_size < pipeline_parallel_size: + msg = 'The remainder of M (the total micro-batches) divided by N (number of ' + msg += 'contiguous micro-batches in a virtual pipeline stage) should be 0, ' + msg += 'or larger than or equal to the pipeline-parallel size, but it is ' + msg += f'{final_microbatch_group_size}. ' + msg += 'Otherwise, it introduces dependency bubbles in the pipeline ' + msg += 'and reduces throughput.' raise RuntimeError(msg) model_type = get_model_type(model[0]) @@ -655,19 +685,17 @@ def enable_grad_sync(): if forward_only: num_warmup_microbatches = total_num_microbatches else: - # Run all forward passes and then all backward passes if number of - # microbatches is just the number of pipeline stages. - # Otherwise, perform (num_model_chunks-1)*pipeline_parallel_size on + # Run (num_model_chunks-1)*config.microbatch_group_size_per_vp_stage on # all workers, followed by more microbatches after depending on # stage ID (more forward passes for earlier stages, later stages can # immediately start with 1F1B). - if num_microbatches == pipeline_parallel_size: + num_warmup_microbatches = (pipeline_parallel_size - pipeline_parallel_rank - 1) * 2 + num_warmup_microbatches += ( + num_model_chunks - 1 + ) * config.microbatch_group_size_per_vp_stage + if num_warmup_microbatches >= total_num_microbatches: num_warmup_microbatches = total_num_microbatches all_warmup_microbatches = True - else: - num_warmup_microbatches = (pipeline_parallel_size - pipeline_parallel_rank - 1) * 2 - num_warmup_microbatches += (num_model_chunks - 1) * pipeline_parallel_size - num_warmup_microbatches = min(num_warmup_microbatches, total_num_microbatches) num_microbatches_remaining = total_num_microbatches - num_warmup_microbatches # Checkpoint the activations of partial Transformer layers in a number of micro-batches @@ -687,10 +715,55 @@ def enable_grad_sync(): config.param_sync_func[0](model[0].parameters()) config.param_sync_func[1](model[1].parameters()) - def get_model_chunk_id(microbatch_id, forward): + # Create a tunable schedule lookup table. + # The schedule lookup table uses the virtual_microbatch_id to find the corresponding + # microbatch_id and model_chunk_id. For example, the tunable schedule table for + # PP2 N3M5 with VP2 is constructed as below: + # virtual_microbatch_id | 0 1 2 3 4 5 6 7 8 9 + # microbatch_id | 0 1 2 0 1 2 3 4 3 4 + # model_chunk_id | 0 0 0 1 1 1 0 0 1 1 + schedule_table = [] + for min_microbatch_id_in_group in range( + 0, num_microbatches, config.microbatch_group_size_per_vp_stage + ): + if ( + min_microbatch_id_in_group + config.microbatch_group_size_per_vp_stage + >= num_microbatches + ): + # Construct schedule for the last microbatch group + schedule_table.extend( + [ + (microbatch_id, model_chunk_id) + for model_chunk_id in range(len(model)) + for microbatch_id in range(min_microbatch_id_in_group, num_microbatches) + ] + ) + else: + # Construct schedule for other microbatch groups + schedule_table.extend( + [ + (microbatch_id, model_chunk_id) + for model_chunk_id in range(len(model)) + for microbatch_id in range( + min_microbatch_id_in_group, + min_microbatch_id_in_group + config.microbatch_group_size_per_vp_stage, + ) + ] + ) + + # Decouple individual lookup table for microbatch_id and model_chunk_id. + # For example, the micro-batch table for PP2 N3M5 with VP2 is + # virtual_microbatch_id | 0 1 2 3 4 5 6 7 8 9 + # microbatch_id | 0 1 2 0 1 2 3 4 3 4 + # Similarly, the model chunk table is + # virtual_microbatch_id | 0 1 2 3 4 5 6 7 8 9 + # model_chunk_id | 0 0 0 1 1 1 0 0 1 1 + # Both tables are indexed with virtual_microbatch_id. + microbatch_id_table, model_chunk_id_table = zip(*schedule_table) + + def get_model_chunk_id(virtual_microbatch_id, forward): """Helper method to get the model chunk ID given the iteration number.""" - microbatch_id_in_group = microbatch_id % (pipeline_parallel_size * num_model_chunks) - model_chunk_id = microbatch_id_in_group // pipeline_parallel_size + model_chunk_id = model_chunk_id_table[virtual_microbatch_id % total_num_microbatches] if not forward: model_chunk_id = num_model_chunks - model_chunk_id - 1 return model_chunk_id @@ -698,38 +771,93 @@ def get_model_chunk_id(microbatch_id, forward): def get_microbatch_id_in_model_chunk(iteration_id, forward): """Helper method to get the microbatch_id within model chunk given the iteration number.""" assert forward - iteration_group_id = iteration_id // (pipeline_parallel_size * num_model_chunks) - microbatch_id_in_model_chunk = (iteration_group_id * pipeline_parallel_size) + ( - iteration_id % pipeline_parallel_size - ) + microbatch_id_in_model_chunk = microbatch_id_table[iteration_id] return microbatch_id_in_model_chunk - def is_first_microbatch_for_model_chunk(microbatch_id: int) -> bool: + def num_released_microbatches(virtual_microbatch_id, model_chunk_id): + """Helper method to count number of released (i.e. popped from input_tensors) + microbatches for a model chunk.""" + if forward_only: # Micro-batch is released after forward prop. + return model_chunk_id_table[:virtual_microbatch_id].count(model_chunk_id) + else: # Micro-batch is released after backward prop. + # Zero backward prop in warmup. + if virtual_microbatch_id < num_warmup_microbatches: + return 0 + else: + backward_microbatch_id = virtual_microbatch_id - num_warmup_microbatches + model_chunk_id = num_model_chunks - model_chunk_id - 1 + return model_chunk_id_table[:backward_microbatch_id].count(model_chunk_id) + + def is_first_microbatch_for_model_chunk(virtual_microbatch_id: int) -> bool: """Check if an iteration is the first for a model chunk.""" - microbatch_group_size = pipeline_parallel_size * num_model_chunks - microbatch_group_id = microbatch_id // microbatch_group_size - microbatch_id_in_group = microbatch_id % microbatch_group_size - if microbatch_group_id == 0: - return microbatch_id_in_group % pipeline_parallel_size == 0 + if virtual_microbatch_id < total_num_microbatches: + return microbatch_id_table[virtual_microbatch_id] == 0 else: return False - def is_last_microbatch_for_model_chunk(microbatch_id: int) -> bool: + def is_last_microbatch_for_model_chunk(virtual_microbatch_id: int) -> bool: """Check if an iteration is the last for a model chunk.""" - microbatch_group_size = pipeline_parallel_size * num_model_chunks - num_microbatch_groups = total_num_microbatches // microbatch_group_size - microbatch_group_id = microbatch_id // microbatch_group_size - microbatch_id_in_group = microbatch_id % microbatch_group_size - if microbatch_group_id == num_microbatch_groups - 1: - return microbatch_id_in_group % pipeline_parallel_size == pipeline_parallel_size - 1 + if virtual_microbatch_id < total_num_microbatches: + return microbatch_id_table[virtual_microbatch_id] == num_microbatches - 1 else: return False - def forward_step_helper(microbatch_id, current_microbatch, checkpoint_activations_microbatch): + def recv_tensor_from_previous_stage(virtual_microbatch_id, forward): + """Determine if peers are sending, and where in data structure + to put received tensors. + Return a boolean if the pipeline stage expects to recv from peers, and the + corresponding model_chunk_id for the received tensor. + """ + recv = True + # The leading pipeline stage is the first rank in fwd and the last rank in bwd. + is_leading_pipeline_stage = ( + parallel_state.is_pipeline_first_stage(ignore_virtual=True) + if forward + else parallel_state.is_pipeline_last_stage(ignore_virtual=True) + ) + + last_model_chunk = (num_model_chunks - 1) if forward else 0 + + if is_leading_pipeline_stage: + # The leading pipeline stage is ahead of the ending pipeline stage + # (i.e. last rank in fwd and first rank in bwd) by (pipeline_parallel_size - 1). + # Let's consider bwd as an example with PP 4: + # 0 1 2 3 ... + # 0 1 2 3 ... + # 0 1 2 3 ... + # 0 1 2 3 ... + if virtual_microbatch_id < (pipeline_parallel_size - 1): + # The ending stage has not produced any tensors, so no recv will be initiated. + recv = False + next_model_chunk_id = get_model_chunk_id(virtual_microbatch_id + 1, forward) + else: + # Find the model chunk of the aligned microbatches in the ending stage. + # For example, microbatch 0 in the ending stage is aligned with microbatch 3 + # in the leading stage. + next_model_chunk_id = get_model_chunk_id( + virtual_microbatch_id - (pipeline_parallel_size - 1), forward + ) + # Last model chunk in the final stage does not produce tensors. + if next_model_chunk_id == last_model_chunk: + recv = False + if forward: + # Model chunk id increases in forward. + next_model_chunk_id += 1 + else: + # Model chunk id decreases in backward. + next_model_chunk_id -= 1 + else: + next_model_chunk_id = get_model_chunk_id(virtual_microbatch_id + 1, forward) + + return recv, next_model_chunk_id + + def forward_step_helper( + virtual_microbatch_id, microbatch_id, checkpoint_activations_microbatch + ): """Helper method to run forward step with model split into chunks (run set_virtual_pipeline_model_parallel_rank() before calling forward_step()).""" - model_chunk_id = get_model_chunk_id(microbatch_id, forward=True) + model_chunk_id = get_model_chunk_id(virtual_microbatch_id, forward=True) parallel_state.set_virtual_pipeline_model_parallel_rank(model_chunk_id) # launch param synchronization for next model chunk @@ -738,12 +866,14 @@ def forward_step_helper(microbatch_id, current_microbatch, checkpoint_activation # asynchronous communication at the same time across the # pipeline-parallel group. if config.param_sync_func is not None: - param_sync_microbatch_id = microbatch_id + pipeline_parallel_rank + param_sync_virtual_microbatch_id = virtual_microbatch_id + pipeline_parallel_rank if ( - param_sync_microbatch_id < total_num_microbatches - and is_first_microbatch_for_model_chunk(param_sync_microbatch_id) + param_sync_virtual_microbatch_id < total_num_microbatches + and is_first_microbatch_for_model_chunk(param_sync_virtual_microbatch_id) ): - param_sync_chunk_id = get_model_chunk_id(param_sync_microbatch_id, forward=True) + 1 + param_sync_chunk_id = ( + get_model_chunk_id(param_sync_virtual_microbatch_id, forward=True) + 1 + ) if 1 < param_sync_chunk_id < num_model_chunks: config.param_sync_func[param_sync_chunk_id]( model[param_sync_chunk_id].parameters() @@ -753,7 +883,14 @@ def forward_step_helper(microbatch_id, current_microbatch, checkpoint_activation if parallel_state.is_pipeline_first_stage(): if len(input_tensors[model_chunk_id]) == len(output_tensors[model_chunk_id]): input_tensors[model_chunk_id].append(None) - input_tensor = input_tensors[model_chunk_id][-1] + + # For non-depth-first pipeline schedules, the first rank would buffer multiple received + # activation tensors for a model chunk until accessed during warmup. + # This input buffering is needed to overlap the computation with the receipt of + # the next inputs. To index the proper buffered inputs for forword_step, we use + # microbatch_id offset with number of released microbatches that have completed backprop. + offset = num_released_microbatches(virtual_microbatch_id, model_chunk_id) + input_tensor = input_tensors[model_chunk_id][microbatch_id - offset] output_tensor, num_tokens = forward_step( forward_step_func, @@ -766,31 +903,37 @@ def forward_step_helper(microbatch_id, current_microbatch, checkpoint_activation collect_non_loss_data, checkpoint_activations_microbatch, check_first_val_step( - first_val_step, forward_only, is_first_microbatch_for_model_chunk(microbatch_id) + first_val_step, + forward_only, + is_first_microbatch_for_model_chunk(virtual_microbatch_id), ), - current_microbatch=current_microbatch, + current_microbatch=microbatch_id, ) + output_tensors[model_chunk_id].append(output_tensor) nonlocal total_num_tokens total_num_tokens += num_tokens.item() - # if forward-only, no need to save tensors for a backward pass + # If forward-only, no need to save tensors for a backward pass. if forward_only: - input_tensors[model_chunk_id].pop() + # Release the tensor that have completed forward step. + input_tensors[model_chunk_id].pop(0) output_tensors[model_chunk_id].pop() return output_tensor - def backward_step_helper(microbatch_id): + def backward_step_helper(virtual_microbatch_id): """Helper method to run backward step with model split into chunks (run set_virtual_pipeline_model_parallel_rank() before calling backward_step()).""" - model_chunk_id = get_model_chunk_id(microbatch_id, forward=False) + model_chunk_id = get_model_chunk_id(virtual_microbatch_id, forward=False) parallel_state.set_virtual_pipeline_model_parallel_rank(model_chunk_id) # launch grad synchronization (default) - if config.grad_sync_func is None and is_last_microbatch_for_model_chunk(microbatch_id): + if config.grad_sync_func is None and is_last_microbatch_for_model_chunk( + virtual_microbatch_id + ): enable_grad_sync() synchronized_model_chunks.add(model_chunk_id) @@ -800,6 +943,7 @@ def backward_step_helper(microbatch_id): input_tensor = input_tensors[model_chunk_id].pop(0) output_tensor = output_tensors[model_chunk_id].pop(0) output_tensor_grad = output_tensor_grads[model_chunk_id].pop(0) + input_tensor_grad = backward_step( input_tensor, output_tensor, output_tensor_grad, model_type, config ) @@ -810,11 +954,13 @@ def backward_step_helper(microbatch_id): # asynchronous communication at the same time across the # pipeline-parallel group. if config.grad_sync_func is not None: - grad_sync_microbatch_id = microbatch_id - pipeline_parallel_rank - if grad_sync_microbatch_id >= 0 and is_last_microbatch_for_model_chunk( - grad_sync_microbatch_id + grad_sync_virtual_microbatch_id = virtual_microbatch_id - pipeline_parallel_rank + if grad_sync_virtual_microbatch_id >= 0 and is_last_microbatch_for_model_chunk( + grad_sync_virtual_microbatch_id ): - grad_sync_chunk_id = get_model_chunk_id(grad_sync_microbatch_id, forward=False) + grad_sync_chunk_id = get_model_chunk_id( + grad_sync_virtual_microbatch_id, forward=False + ) enable_grad_sync() config.grad_sync_func[grad_sync_chunk_id](model[grad_sync_chunk_id].parameters()) synchronized_model_chunks.add(grad_sync_chunk_id) @@ -827,15 +973,66 @@ def backward_step_helper(microbatch_id): input_tensors[0].append(p2p_communication.recv_forward(tensor_shape, config)) fwd_wait_handles = None + fwd_wait_recv_handles = None bwd_wait_handles = None + bwd_wait_recv_handles = None + if parallel_state.is_pipeline_first_stage(ignore_virtual=True): + fwd_recv_buffer_size = ( + config.microbatch_group_size_per_vp_stage - pipeline_parallel_size + 1 + ) + else: + fwd_recv_buffer_size = 1 + if parallel_state.is_pipeline_last_stage(ignore_virtual=True): + bwd_recv_buffer_size = ( + config.microbatch_group_size_per_vp_stage - pipeline_parallel_size + 1 + ) + else: + bwd_recv_buffer_size = 1 + fwd_recv_buffer = [None] * fwd_recv_buffer_size + bwd_recv_buffer = [None] * bwd_recv_buffer_size + recv_prev_wait_handles = [] + send_next_wait_handle = None + send_prev_wait_handle = None + recv_next_wait_handles = [] for k in range(num_warmup_microbatches): + cur_model_chunk_id = get_model_chunk_id(k, forward=True) + parallel_state.set_virtual_pipeline_model_parallel_rank(cur_model_chunk_id) + + if config.overlap_p2p_comm_warmup_flush: + if not parallel_state.is_pipeline_first_stage() and k != 0: + assert recv_prev_wait_handles, ( + f'pp rank {pipeline_parallel_rank}, iteration {k},' + 'should have registered recv handle' + ) + recv_prev_wait_handle = recv_prev_wait_handles.pop(0) + recv_prev_wait_handle.wait() - if fwd_wait_handles is not None: - for req in fwd_wait_handles: - req.wait() + # Determine if tensor should be received from previous stage. + recv_prev, next_forward_model_chunk_id = recv_tensor_from_previous_stage(k, forward=True) - # Decide to checkpoint all layers' activations of the current micro-batch + # No receive in last iteration when recv iteration k+1. + if k == (total_num_microbatches - 1): + recv_prev = False + + # Prefetch recv for iteration k+1 for non-first ranks. + if config.overlap_p2p_comm_warmup_flush and not parallel_state.is_pipeline_first_stage( + ignore_virtual=True + ): + fwd_recv_buffer[k % fwd_recv_buffer_size], fwd_wait_recv_handles = ( + p2p_communication.send_forward_recv_forward( + output_tensor=None, # No output_tensor to send. + recv_prev=recv_prev, + tensor_shape=tensor_shape, + config=config, + overlap_p2p_comm=True, + ) + ) + + if fwd_wait_recv_handles: + recv_prev_wait_handles.append(fwd_wait_recv_handles.pop("recv_prev")) + + # Decide to checkpoint all layers' activations of the current micro-batch. if max_outstanding_backprops is not None: checkpoint_activations_microbatch = ( k % max_outstanding_backprops @@ -844,19 +1041,8 @@ def backward_step_helper(microbatch_id): else: checkpoint_activations_microbatch = None - current_microbatch = get_microbatch_id_in_model_chunk(k, forward=True) - output_tensor = forward_step_helper( - k, current_microbatch, checkpoint_activations_microbatch - ) - - # Determine if tensor should be received from previous stage. - next_forward_model_chunk_id = get_model_chunk_id(k + 1, forward=True) - recv_prev = True - if parallel_state.is_pipeline_first_stage(ignore_virtual=True): - if next_forward_model_chunk_id == 0: - recv_prev = False - if k == (total_num_microbatches - 1): - recv_prev = False + microbatch_id = get_microbatch_id_in_model_chunk(k, forward=True) + output_tensor = forward_step_helper(k, microbatch_id, checkpoint_activations_microbatch) # Don't send tensor downstream if on last stage. if parallel_state.is_pipeline_last_stage(): @@ -864,9 +1050,10 @@ def backward_step_helper(microbatch_id): # Send and receive tensors as appropriate (send tensors computed # in this iteration; receive tensors for next iteration). - if not config.overlap_p2p_comm: + if not config.overlap_p2p_comm_warmup_flush: if ( k == (num_warmup_microbatches - 1) + and not config.overlap_p2p_comm and not forward_only and not all_warmup_microbatches ): @@ -889,16 +1076,46 @@ def backward_step_helper(microbatch_id): input_tensor = p2p_communication.send_forward_recv_forward( output_tensor, recv_prev=recv_prev, tensor_shape=tensor_shape, config=config ) - input_tensors[next_forward_model_chunk_id].append(input_tensor) + if recv_prev: + input_tensors[next_forward_model_chunk_id].append(input_tensor) + deallocate_output_tensor(output_tensor, config.deallocate_pipeline_outputs) else: - input_tensor, fwd_wait_handles = p2p_communication.send_forward_recv_forward( - output_tensor, - recv_prev=recv_prev, - tensor_shape=tensor_shape, - config=config, - overlap_p2p_comm=True, - ) + if not parallel_state.is_pipeline_first_stage(ignore_virtual=True): + # Send only since recv prefetched. + _, fwd_wait_handles = p2p_communication.send_forward_recv_forward( + output_tensor, + recv_prev=False, + tensor_shape=tensor_shape, + config=config, + overlap_p2p_comm=True, + ) + else: # No prefetch for first rank, so both send and recv initiated. + fwd_recv_buffer[k % fwd_recv_buffer_size], fwd_wait_handles = ( + p2p_communication.send_forward_recv_forward( + output_tensor, + recv_prev=recv_prev, + tensor_shape=tensor_shape, + config=config, + overlap_p2p_comm=True, + ) + ) + if send_next_wait_handle is not None: + send_next_wait_handle.wait() + if fwd_wait_handles is not None: + send_next_wait_handle = ( + fwd_wait_handles.pop("send_next") if "send_next" in fwd_wait_handles else None + ) + if "recv_prev" in fwd_wait_handles: + recv_prev_wait_handles.append(fwd_wait_handles.pop("recv_prev")) + deallocate_output_tensor(output_tensor, config.deallocate_pipeline_outputs) + if recv_prev: + input_tensors[next_forward_model_chunk_id].append( + fwd_recv_buffer[k % fwd_recv_buffer_size] + ) + fwd_recv_buffer[(k + 1) % fwd_recv_buffer_size] = None + + if config.overlap_p2p_comm: if ( k == (num_warmup_microbatches - 1) and not forward_only @@ -909,7 +1126,7 @@ def backward_step_helper(microbatch_id): if parallel_state.is_pipeline_last_stage(ignore_virtual=True): recv_next = False - (output_tensor_grad, bwd_wait_handles) = ( + (bwd_recv_buffer[-1], bwd_wait_handles) = ( p2p_communication.send_backward_recv_backward( input_tensor_grad, recv_next=recv_next, @@ -918,18 +1135,26 @@ def backward_step_helper(microbatch_id): overlap_p2p_comm=True, ) ) + if send_prev_wait_handle is not None: + send_prev_wait_handle.wait() + if bwd_wait_handles is not None: + send_prev_wait_handle = ( + bwd_wait_handles.pop("send_prev") + if "send_prev" in bwd_wait_handles + else None + ) + if "recv_next" in bwd_wait_handles: + recv_next_wait_handles.append(bwd_wait_handles.pop("recv_next")) - output_tensor_grads[num_model_chunks - 1].append(output_tensor_grad) - input_tensors[next_forward_model_chunk_id].append(input_tensor) - - deallocate_output_tensor(output_tensor, config.deallocate_pipeline_outputs) + if recv_next: + output_tensor_grads[num_model_chunks - 1].append(bwd_recv_buffer[-1]) # Run 1F1B in steady state. for k in range(num_microbatches_remaining): # Forward pass. forward_k = k + num_warmup_microbatches - # Decide to checkpoint all layers' activations of the current micro-batch + # Decide to checkpoint all layers' activations of the current micro-batch. if max_outstanding_backprops is not None: checkpoint_activations_microbatch = ( forward_k % max_outstanding_backprops @@ -938,16 +1163,27 @@ def backward_step_helper(microbatch_id): else: checkpoint_activations_microbatch = None - current_microbatch = get_microbatch_id_in_model_chunk(forward_k, forward=True) + cur_model_chunk_id = get_model_chunk_id(forward_k, forward=True) + parallel_state.set_virtual_pipeline_model_parallel_rank(cur_model_chunk_id) + microbatch_id = get_microbatch_id_in_model_chunk(forward_k, forward=True) if config.overlap_p2p_comm: - if fwd_wait_handles is not None: - for req in fwd_wait_handles: - req.wait() + if not parallel_state.is_pipeline_first_stage(): + if config.overlap_p2p_comm_warmup_flush: + assert recv_prev_wait_handles, ( + f'pp rank {pipeline_parallel_rank}, fwd iteration {forward_k}, ' + 'should have registered recv handle' + ) + recv_prev_wait_handle = recv_prev_wait_handles.pop(0) + recv_prev_wait_handle.wait() + else: + if recv_prev_wait_handles is not None and recv_prev_wait_handles: + recv_prev_wait_handle = recv_prev_wait_handles.pop(0) + recv_prev_wait_handle.wait() deallocate_output_tensor(output_tensor, config.deallocate_pipeline_outputs) output_tensor = forward_step_helper( - forward_k, current_microbatch, checkpoint_activations_microbatch + forward_k, microbatch_id, checkpoint_activations_microbatch ) # Determine if current stage has anything to send in either direction, @@ -955,23 +1191,13 @@ def backward_step_helper(microbatch_id): forward_model_chunk_id = get_model_chunk_id(forward_k, forward=True) parallel_state.set_virtual_pipeline_model_parallel_rank(forward_model_chunk_id) - # Last virtual stage no activation tensor to send + # Last virtual stage no activation tensor to send. if parallel_state.is_pipeline_last_stage(): output_tensor = None - # Determine if peers are sending, and where in data structure to put - # received tensors. - recv_prev = True - if parallel_state.is_pipeline_first_stage(ignore_virtual=True): - # First stage is ahead of last stage by (pipeline_parallel_size - 1). - next_forward_model_chunk_id = get_model_chunk_id( - forward_k - (pipeline_parallel_size - 1), forward=True - ) - if next_forward_model_chunk_id == (num_model_chunks - 1): - recv_prev = False - next_forward_model_chunk_id += 1 - else: - next_forward_model_chunk_id = get_model_chunk_id(forward_k + 1, forward=True) + recv_prev, next_forward_model_chunk_id = recv_tensor_from_previous_stage( + forward_k, forward=True + ) # If last iteration, don't receive; we already received one extra # before the start of the for loop. @@ -980,54 +1206,85 @@ def backward_step_helper(microbatch_id): # Send activation tensor to the next stage and receive activation tensor from the # previous stage - input_tensor, fwd_wait_handles = p2p_communication.send_forward_recv_forward( - output_tensor, - recv_prev=recv_prev, - tensor_shape=tensor_shape, - config=config, - overlap_p2p_comm=True, + fwd_recv_buffer[forward_k % fwd_recv_buffer_size], fwd_wait_handles = ( + p2p_communication.send_forward_recv_forward( + output_tensor, + recv_prev=recv_prev, + tensor_shape=tensor_shape, + config=config, + overlap_p2p_comm=True, + ) ) + if send_next_wait_handle is not None: + send_next_wait_handle.wait() + if fwd_wait_handles is not None: + send_next_wait_handle = ( + fwd_wait_handles.pop("send_next") if "send_next" in fwd_wait_handles else None + ) + if "recv_prev" in fwd_wait_handles: + recv_prev_wait_handles.append(fwd_wait_handles.pop("recv_prev")) # assert fwd_wait_handles is not None - if bwd_wait_handles is not None: - for req in bwd_wait_handles: - req.wait() - # Backward pass. backward_k = k - input_tensor_grad = backward_step_helper(backward_k) - backward_model_chunk_id = get_model_chunk_id(backward_k, forward=False) parallel_state.set_virtual_pipeline_model_parallel_rank(backward_model_chunk_id) + if not parallel_state.is_pipeline_last_stage(): + if config.overlap_p2p_comm_warmup_flush: + assert recv_next_wait_handles, ( + f'pp rank {pipeline_parallel_rank}, bwd iteration {backward_k}, ' + 'should have registered recv next handle' + ) + recv_next_wait_handle = recv_next_wait_handles.pop(0) + recv_next_wait_handle.wait() + else: + if recv_next_wait_handles is not None and recv_next_wait_handles: + recv_next_wait_handle = recv_next_wait_handles.pop(0) + recv_next_wait_handle.wait() + + input_tensor_grad = backward_step_helper(backward_k) - # First virtual stage no activation gradient tensor to send + # First virtual stage no activation gradient tensor to send. if parallel_state.is_pipeline_first_stage(): input_tensor_grad = None - # Determine if the current virtual stage has an activation gradient tensor to receive - recv_next = True - if parallel_state.is_pipeline_last_stage(ignore_virtual=True): - # Last stage is ahead of first stage by (pipeline_parallel_size - 1). - next_backward_model_chunk_id = get_model_chunk_id( - backward_k - (pipeline_parallel_size - 1), forward=False - ) - if next_backward_model_chunk_id == 0: - recv_next = False - next_backward_model_chunk_id -= 1 - else: - next_backward_model_chunk_id = get_model_chunk_id(backward_k + 1, forward=False) - - output_tensor_grad, bwd_wait_handles = p2p_communication.send_backward_recv_backward( - input_tensor_grad, - recv_next=recv_next, - tensor_shape=tensor_shape, - config=config, - overlap_p2p_comm=True, + recv_next, next_backward_model_chunk_id = recv_tensor_from_previous_stage( + backward_k, forward=False ) - else: # no p2p overlap + (bwd_recv_buffer[backward_k % bwd_recv_buffer_size], bwd_wait_handles) = ( + p2p_communication.send_backward_recv_backward( + input_tensor_grad, + recv_next=recv_next, + tensor_shape=tensor_shape, + config=config, + overlap_p2p_comm=True, + ) + ) + if send_prev_wait_handle is not None: + send_prev_wait_handle.wait() + if bwd_wait_handles is not None: + send_prev_wait_handle = ( + bwd_wait_handles.pop("send_prev") if "send_prev" in bwd_wait_handles else None + ) + if "recv_next" in bwd_wait_handles: + recv_next_wait_handles.append(bwd_wait_handles.pop("recv_next")) + + # Put input_tensor and output_tensor_grad in data structures in the + # right location. + if recv_prev: + input_tensors[next_forward_model_chunk_id].append( + fwd_recv_buffer[forward_k % fwd_recv_buffer_size] + ) + fwd_recv_buffer[(forward_k + 1) % fwd_recv_buffer_size] = None + if recv_next: + output_tensor_grads[next_backward_model_chunk_id].append( + bwd_recv_buffer[backward_k % bwd_recv_buffer_size] + ) + bwd_recv_buffer[(backward_k + 1) % bwd_recv_buffer_size] = None + else: # No p2p overlap. output_tensor = forward_step_helper( - forward_k, current_microbatch, checkpoint_activations_microbatch + forward_k, microbatch_id, checkpoint_activations_microbatch ) # Backward pass. @@ -1049,31 +1306,13 @@ def backward_step_helper(microbatch_id): if parallel_state.is_pipeline_first_stage(): input_tensor_grad = None - # Determine if peers are sending, and where in data structure to put - # received tensors. - recv_prev = True - if parallel_state.is_pipeline_first_stage(ignore_virtual=True): - # First stage is ahead of last stage by (pipeline_parallel_size - 1). - next_forward_model_chunk_id = get_model_chunk_id( - forward_k - (pipeline_parallel_size - 1), forward=True - ) - if next_forward_model_chunk_id == (num_model_chunks - 1): - recv_prev = False - next_forward_model_chunk_id += 1 - else: - next_forward_model_chunk_id = get_model_chunk_id(forward_k + 1, forward=True) + recv_prev, next_forward_model_chunk_id = recv_tensor_from_previous_stage( + forward_k, forward=True + ) - recv_next = True - if parallel_state.is_pipeline_last_stage(ignore_virtual=True): - # Last stage is ahead of first stage by (pipeline_parallel_size - 1). - next_backward_model_chunk_id = get_model_chunk_id( - backward_k - (pipeline_parallel_size - 1), forward=False - ) - if next_backward_model_chunk_id == 0: - recv_next = False - next_backward_model_chunk_id -= 1 - else: - next_backward_model_chunk_id = get_model_chunk_id(backward_k + 1, forward=False) + recv_next, next_backward_model_chunk_id = recv_tensor_from_previous_stage( + backward_k, forward=False + ) # If last iteration, don't receive; we already received one extra # before the start of the for loop. @@ -1093,39 +1332,117 @@ def backward_step_helper(microbatch_id): ) deallocate_output_tensor(output_tensor, config.deallocate_pipeline_outputs) - # Put input_tensor and output_tensor_grad in data structures in the - # right location. - if recv_prev: - input_tensors[next_forward_model_chunk_id].append(input_tensor) - if recv_next: - output_tensor_grads[next_backward_model_chunk_id].append(output_tensor_grad) + # Put input_tensor and output_tensor_grad in data structures in the + # right location. + if recv_prev: + input_tensors[next_forward_model_chunk_id].append(input_tensor) + if recv_next: + output_tensor_grads[next_backward_model_chunk_id].append(output_tensor_grad) deallocate_output_tensor(output_tensor, config.deallocate_pipeline_outputs) # Run cooldown backward passes (flush out pipeline). if not forward_only: - if config.overlap_p2p_comm and bwd_wait_handles is not None: - for wait_handle in bwd_wait_handles: - wait_handle.wait() + if bwd_wait_handles is not None: + for bwd_wait_handle in bwd_wait_handles.values(): + bwd_wait_handle.wait() if all_warmup_microbatches: output_tensor_grads[num_model_chunks - 1].append( p2p_communication.recv_backward(tensor_shape, config=config) ) for k in range(num_microbatches_remaining, total_num_microbatches): - input_tensor_grad = backward_step_helper(k) - next_backward_model_chunk_id = get_model_chunk_id(k + 1, forward=False) - recv_next = True - if parallel_state.is_pipeline_last_stage(ignore_virtual=True): - if next_backward_model_chunk_id == (num_model_chunks - 1): - recv_next = False + cur_model_chunk_id = get_model_chunk_id(k, forward=False) + parallel_state.set_virtual_pipeline_model_parallel_rank(cur_model_chunk_id) + if not parallel_state.is_pipeline_last_stage() and k != 0: + if config.overlap_p2p_comm_warmup_flush: + assert recv_next_wait_handles, ( + f'pp rank {pipeline_parallel_rank}, backward iteration {k}, ' + 'should have registered recv next handle' + ) + recv_next_wait_handle = recv_next_wait_handles.pop(0) + recv_next_wait_handle.wait() + else: + if recv_next_wait_handles is not None and recv_next_wait_handles: + recv_next_wait_handle = recv_next_wait_handles.pop(0) + recv_next_wait_handle.wait() + + recv_next, next_backward_model_chunk_id = recv_tensor_from_previous_stage( + k, forward=False + ) + if k == (total_num_microbatches - 1): recv_next = False - output_tensor_grads[next_backward_model_chunk_id].append( - p2p_communication.send_backward_recv_backward( + + # Prefetch recv for backward iteration k+1 for non last ranks. + if config.overlap_p2p_comm_warmup_flush and not parallel_state.is_pipeline_last_stage( + ignore_virtual=True + ): + bwd_recv_buffer[k % bwd_recv_buffer_size], bwd_wait_recv_handles = ( + p2p_communication.send_backward_recv_backward( + input_tensor_grad=None, # No input_tensor_grad to send. + recv_next=recv_next, + tensor_shape=tensor_shape, + config=config, + overlap_p2p_comm=True, + ) + ) + + if bwd_wait_recv_handles: + recv_next_wait_handles.append(bwd_wait_recv_handles.pop("recv_next")) + + input_tensor_grad = backward_step_helper(k) + + # First virtual stage no activation gradient tensor to send. + if parallel_state.is_pipeline_first_stage(): + input_tensor_grad = None + + if config.overlap_p2p_comm_warmup_flush: + if not parallel_state.is_pipeline_last_stage(ignore_virtual=True): + _, bwd_wait_handles = p2p_communication.send_backward_recv_backward( + input_tensor_grad, + recv_next=False, + tensor_shape=tensor_shape, + config=config, + overlap_p2p_comm=True, + ) + else: + bwd_recv_buffer[k % bwd_recv_buffer_size], bwd_wait_handles = ( + p2p_communication.send_backward_recv_backward( + input_tensor_grad, + recv_next=recv_next, + tensor_shape=tensor_shape, + config=config, + overlap_p2p_comm=True, + ) + ) + + if send_prev_wait_handle is not None: + send_prev_wait_handle.wait() + if bwd_wait_handles is not None: + send_prev_wait_handle = ( + bwd_wait_handles.pop("send_prev") + if "send_prev" in bwd_wait_handles + else None + ) + if "recv_next" in bwd_wait_handles: + recv_next_wait_handles.append(bwd_wait_handles.pop("recv_next")) + if recv_next: + output_tensor_grads[next_backward_model_chunk_id].append( + bwd_recv_buffer[k % bwd_recv_buffer_size] + ) + bwd_recv_buffer[(k + 1) % bwd_recv_buffer_size] = None + + else: + output_tensor_grad = p2p_communication.send_backward_recv_backward( input_tensor_grad, recv_next=recv_next, tensor_shape=tensor_shape, config=config ) - ) + + if recv_next: + output_tensor_grads[next_backward_model_chunk_id].append(output_tensor_grad) + + if send_prev_wait_handle is not None: + send_prev_wait_handle.wait() # Launch any remaining grad reductions. enable_grad_sync() @@ -1135,6 +1452,13 @@ def backward_step_helper(microbatch_id): config.grad_sync_func[model_chunk_id](model[model_chunk_id].parameters()) synchronized_model_chunks.add(model_chunk_id) + assert ( + not recv_prev_wait_handles + ), 'recv_prev_wait_handles should be cleared at the end of a step' + assert ( + not recv_next_wait_handles + ), 'recv_next_wait_handles should be cleared at the end of a step' + if config.finalize_model_grads_func is not None and not forward_only: # If defer_embedding_wgrad_compute is enabled we need to do the @@ -1168,15 +1492,15 @@ def get_tensor_shapes( config, encoder_decoder_xattn: bool, ): - # Determine right tensor sizes (based on position of rank with - # respect to split rank) and model size. - # Send two tensors if model decoder requires the encoder's output - # (via cross-attention) and rank is in decoder stage. - # first tensor is decoder. - # second tensor is encoder. - # If model has an encoder & decoder and rank is at the boundary: - # send one tensor. - # Otherwise, send one tensor. + """ + Determine right tensor sizes (based on position of rank with respect to split rank) and + model size. + Send two tensors if model decoder requires the encoder's output (via cross-attention) and + rank is in decoder stage. + First tensor is decoder. Second tensor is encoder. + If model has an encoder & decoder and rank is at the boundary, send one tensor. + Otherwise, send one tensor. + """ tensor_shapes = [] seq_length = seq_length // parallel_state.get_context_parallel_world_size() @@ -1191,7 +1515,7 @@ def get_tensor_shapes( ) if model_type == ModelType.encoder_and_decoder: - if parallel_state.is_inside_encoder(rank): + if parallel_state.is_inside_encoder(rank) and not parallel_state.is_inside_decoder(rank): tensor_shapes.append((seq_length, micro_batch_size, config.hidden_size)) elif encoder_decoder_xattn: tensor_shapes.append((decoder_seq_length, micro_batch_size, config.hidden_size)) @@ -1204,6 +1528,7 @@ def get_tensor_shapes( def recv_forward(tensor_shapes, config): + """Wrapper for p2p_communication.recv_forward used with non-interleaving schedule.""" input_tensors = [] for tensor_shape in tensor_shapes: if tensor_shape is None: @@ -1214,6 +1539,7 @@ def recv_forward(tensor_shapes, config): def recv_backward(tensor_shapes, config): + """Wrapper for p2p_communication.recv_backward used with non-interleaving schedule.""" output_tensor_grads = [] for tensor_shape in tensor_shapes: if tensor_shape is None: @@ -1224,6 +1550,7 @@ def recv_backward(tensor_shapes, config): def send_forward(output_tensors, tensor_shapes, config): + """Wrapper for p2p_communication.send_forward used with non-interleaving schedule.""" if not isinstance(output_tensors, list): output_tensors = [output_tensors] for output_tensor, tensor_shape in zip(output_tensors, tensor_shapes): @@ -1233,6 +1560,7 @@ def send_forward(output_tensors, tensor_shapes, config): def send_backward(input_tensor_grads, tensor_shapes, config): + """Wrapper for p2p_communication.send_backward used with non-interleaving schedule.""" if not isinstance(input_tensor_grads, list): input_tensor_grads = [input_tensor_grads] for input_tensor_grad, tensor_shape in zip(input_tensor_grads, tensor_shapes): @@ -1242,6 +1570,8 @@ def send_backward(input_tensor_grads, tensor_shapes, config): def send_forward_recv_backward(output_tensors, tensor_shapes, config): + """Wrapper for p2p_communication.send_forward_recv_backward used + with non-interleaving schedule.""" if not isinstance(output_tensors, list): output_tensors = [output_tensors] output_tensor_grads = [] @@ -1257,6 +1587,8 @@ def send_forward_recv_backward(output_tensors, tensor_shapes, config): def send_backward_recv_forward(input_tensor_grads, tensor_shapes, config): + """Wrapper for p2p_communication.send_backward_recv_forward used + with non-interleaving schedule.""" if not isinstance(input_tensor_grads, list): input_tensor_grads = [input_tensor_grads] input_tensors = [] @@ -1290,12 +1622,12 @@ def forward_backward_pipelining_without_interleaving( if isinstance(model, list): assert ( len(model) == 1 - ), "non-interleaved pipeline parallelism does not support model chunking" + ), "non-interleaved pipeline-parallel schedule does not support model chunking" model = model[0] if isinstance(data_iterator, list): assert ( len(data_iterator) == 1 - ), "non-pipeline-parallel schedule does not support model chunking" + ), "non-interleaved pipeline-parallel schedule does not support model chunking" data_iterator = data_iterator[0] config = get_model_config(model) diff --git a/megatron/core/ssm/mamba_block.py b/megatron/core/ssm/mamba_block.py index 1af00d390..0de169cf1 100644 --- a/megatron/core/ssm/mamba_block.py +++ b/megatron/core/ssm/mamba_block.py @@ -1,6 +1,5 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. # Copyright (c) 2024, Tri Dao, Albert Gu. -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. # Some of this code was adopted from https://github.com/state-spaces/mamba/ # This source code is licensed under the Apache license found in the @@ -168,7 +167,7 @@ def __init__( # Transformer layers apply their own pp_layer_offset layer = build_module(submodules.mlp_layer, config=self.config, layer_number=i + 1) else: - assert True, "unexpected layer_type" + assert False, "unexpected layer_type" self.layers.append(layer) # Required for activation recomputation diff --git a/megatron/core/ssm/mamba_layer.py b/megatron/core/ssm/mamba_layer.py index 686f529b1..f0776746d 100644 --- a/megatron/core/ssm/mamba_layer.py +++ b/megatron/core/ssm/mamba_layer.py @@ -1,6 +1,5 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. # Copyright (c) 2024, Tri Dao, Albert Gu. -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. # Some of this code was adopted from https://github.com/state-spaces/mamba/ # This source code is licensed under the Apache license found in the @@ -20,12 +19,33 @@ @dataclass class MambaLayerSubmodules: + """ + Configuration class for specifying the submodules of a Mamba layer. + + This class defines the structure and default implementations for various + components of a Mamba layer, allowing for flexible customization of the + layer's architecture. + + Args: + norm (Union[ModuleSpec, type]): Specification for the input layer normalization. + mixer (Union[ModuleSpec, type]): Specification for the along-sequence mixing mechanism. + mamba_bda (Union[ModuleSpec, type]): Specification for the bias-dropout-add operation + after the mixer. + """ + norm: Union[ModuleSpec, type] = IdentityOp mixer: Union[ModuleSpec, type] = IdentityOp mamba_bda: Union[ModuleSpec, type] = IdentityOp class MambaLayer(MegatronModule): + """ + A single Mamba layer. + + Mamba layer takes input with size [s, b, h] and returns an + output of the same size. + """ + def __init__( self, config: TransformerConfig, @@ -34,9 +54,7 @@ def __init__( layer_number: int = 1, residual_in_fp32=False, ): - """ - Top level Mamba Layer - """ + """Initialize Mamba Layer.""" super().__init__(config) self.config = config self.layer_number = layer_number @@ -60,6 +78,22 @@ def forward( inference_params=None, rotary_pos_emb: Tensor = None, # Not used in MambaLayer ): + """ + Perform a forward pass through the Mamba layer. + + This method implements the core computation of a Mamba layer, including + the convolution and the selective SSM/SSD. + + Args: + hidden_states (Tensor): Input tensor of shape [s, b, h] where s is sequence length, + b is batch size, and h is hidden size. + attention_mask (Tensor): Mask tensor for self-attention. Not used by this layer. + inference_params (object, optional): Parameters for inference-time optimizations. + rotary_pos_emb (Tensor, optional): Rotary positional embeddings. + + Returns: + output (Tensor): Transformed hidden states of shape [s, b, h]. + """ residual = hidden_states if self.residual_in_fp32: @@ -78,4 +112,5 @@ def forward( return hidden_states def allocate_inference_cache(self, batch_size, max_seqlen, dtype=None): + """Allocate the inference cache.""" return self.mixer.allocate_inference_cache(batch_size, max_seqlen, dtype=dtype) diff --git a/megatron/core/ssm/mamba_mixer.py b/megatron/core/ssm/mamba_mixer.py index cf52190d0..6448f30d9 100644 --- a/megatron/core/ssm/mamba_mixer.py +++ b/megatron/core/ssm/mamba_mixer.py @@ -1,6 +1,5 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. # Copyright (c) 2024, Tri Dao, Albert Gu. -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. # Some of this code was adopted from https://github.com/state-spaces/mamba/ # This source code is licensed under the Apache license found in the @@ -580,6 +579,7 @@ def _get_states_from_cache(self, inference_params, batch_size, initialize_states return conv_state, ssm_state def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """Provide a sharded state dictionary for distributed checkpointing.""" sharded_state_dict = {} # Parameters self._save_to_state_dict(sharded_state_dict, '', keep_vars=True) diff --git a/megatron/core/ssm/triton_cache_manager.py b/megatron/core/ssm/triton_cache_manager.py index 43b5b34f3..8c921dacb 100644 --- a/megatron/core/ssm/triton_cache_manager.py +++ b/megatron/core/ssm/triton_cache_manager.py @@ -1,44 +1,81 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +# Copyright 2018-2020 Philippe Tillet +# Copyright 2020-2022 OpenAI + +# Some of this code was adopted from https://github.com/triton-lang/triton +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. import os -import socket +import uuid from pathlib import Path -import torch - try: + from triton import __version__ as triton_version from triton.runtime.cache import FileCacheManager except ImportError: raise ImportError("triton is required by the Mamba model but cannot be imported") -def get_rank(): - return torch.distributed.get_rank() +def _version_no_greater_than(version, version_limit): + major, minor, _ = map(int, version.split('.')) + limit_major, limit_minor = map(int, version_limit.split('.')) + return major < limit_major or (major == limit_major and minor <= limit_minor) def default_cache_dir(): + """Provides a default path for the Triton cache directory.""" return os.path.join(Path.home(), ".triton", "cache") class ParallelFileCacheManager(FileCacheManager): + """ + This patched version of ParallelFileCacheManager prevents errors related + to the builing of the Triton compiler cache when the number of model + parallel ranks is greater than one, including when certain types of file + system are used (such as Lustre). + + Usage: + export TRITON_CACHE_DIR= + export TRITON_CACHE_MANAGER=megatron.core.ssm.triton_cache_manager:ParallelFileCacheManager - # See https://github.com/triton-lang/triton/blob/main/python/triton/runtime/cache.py - - # When running Triton with multiple ranks, they each create their own cache manager. Their input - # keys to that class are mostly (but not entirely) the same across ranks, which leads many ranks - # to write to the same 'key' directories in the cache dir at the same time during compilation, - # leading to conflicts. This works around that by making each cache dir be rank specific by - # adding "rank__" to the cache directory. - - def __init__(self, key): - self.key = key - self.lock_path = None - # create cache directory if it doesn't exist - self.cache_dir = os.environ.get('TRITON_CACHE_DIR', default_cache_dir()) - self.cache_dir = os.path.join( - self.cache_dir, "rank_{}_{}".format(socket.gethostname(), os.getpid()) + This patch implements the changes in the following two Triton project pull + requests: + 1. https://github.com/triton-lang/triton/pull/3544 + 2. https://github.com/triton-lang/triton/pull/4295 + + The above changes will probably be included in Triton release version 3.2, + making this patch no longer necessary. + """ + + def put(self, data, filename, binary=True) -> str: + """A patched version of put, implementing PR 3544 and PR 4295.""" + patch_limit = '3.1' + assert _version_no_greater_than(triton_version, patch_limit), ( + "Assertion failed: ParallelFileCacheManager patch should not be " + f"used beyond Triton version {patch_limit}." ) - if self.cache_dir: - self.cache_dir = os.path.join(self.cache_dir, self.key) - self.lock_path = os.path.join(self.cache_dir, "lock") - os.makedirs(self.cache_dir, exist_ok=True) + if not self.cache_dir: + raise RuntimeError("Could not create or locate cache dir") + binary = isinstance(data, bytes) + if not binary: + data = str(data) + assert self.lock_path is not None + filepath = self._make_path(filename) + # Random ID to avoid any collisions + rnd_id = str(uuid.uuid4()) + # we use the PID in case a bunch of these around so we can see what PID made it + pid = os.getpid() + # use temp dir to be robust against program interruptions + temp_dir = os.path.join(self.cache_dir, f"tmp.pid_{pid}_{rnd_id}") + os.makedirs(temp_dir, exist_ok=True) + temp_path = os.path.join(temp_dir, filename) + + mode = "wb" if binary else "w" + with open(temp_path, mode) as f: + f.write(data) + # Replace is guaranteed to be atomic on POSIX systems if it succeeds + # so filepath cannot see a partial write + os.replace(temp_path, filepath) + os.removedirs(temp_dir) + return filepath diff --git a/megatron/core/tensor_parallel/__init__.py b/megatron/core/tensor_parallel/__init__.py index 41d87431f..00bfe4f45 100644 --- a/megatron/core/tensor_parallel/__init__.py +++ b/megatron/core/tensor_parallel/__init__.py @@ -18,12 +18,10 @@ all_to_all_sp2hp, copy_to_tensor_model_parallel_region, gather_from_sequence_parallel_region, - gather_from_sequence_parallel_region_to_moe, gather_from_tensor_model_parallel_region, reduce_from_tensor_model_parallel_region, reduce_scatter_last_dim_to_tensor_parallel_region, reduce_scatter_to_sequence_parallel_region, - reduce_scatter_to_sequence_parallel_region_from_moe, scatter_to_sequence_parallel_region, scatter_to_tensor_model_parallel_region, ) @@ -71,6 +69,4 @@ "split_tensor_along_last_dim", "split_tensor_into_1d_equal_chunks", "gather_split_1d_tensor", - "gather_from_sequence_parallel_region_to_moe", - "reduce_scatter_to_sequence_parallel_region_from_moe", ] diff --git a/megatron/core/tensor_parallel/cross_entropy.py b/megatron/core/tensor_parallel/cross_entropy.py index 0066d126f..27c8f0634 100644 --- a/megatron/core/tensor_parallel/cross_entropy.py +++ b/megatron/core/tensor_parallel/cross_entropy.py @@ -23,6 +23,7 @@ class VocabParallelCrossEntropy: def calculate_logits_max( vocab_parallel_logits: torch.Tensor, ) -> Tuple[torch.Tensor, torch.Tensor]: + """Calculates logits_max.""" vocab_parallel_logits = vocab_parallel_logits.float() # Maximum value along vocab dimension across all GPUs. @@ -38,6 +39,7 @@ def calculate_predicted_logits( vocab_start_index: int, vocab_end_index: int, ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: + """Calculates predicted logits.""" # In-place subtraction reduces memory pressure. vocab_parallel_logits -= logits_max.unsqueeze(dim=-1) @@ -69,6 +71,7 @@ def calculate_predicted_logits( def calculate_cross_entropy_loss( exp_logits: torch.Tensor, predicted_logits: torch.Tensor, sum_exp_logits: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor]: + """Calculates cross entropy loss.""" # Loss = log(sum(exp(logits))) - predicted-logit. loss = torch.log(sum_exp_logits) - predicted_logits @@ -82,6 +85,7 @@ def calculate_cross_entropy_loss( def prepare_gradient_calculation_operands( softmax: torch.Tensor, target_mask: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: + """Prepare gradient calculation operands.""" # All the inputs have softmax as thier gradient. grad_input = softmax @@ -105,6 +109,7 @@ def calculate_gradients( grad_input: torch.Tensor, grad_output: torch.Tensor, ) -> torch.Tensor: + """Calculates gradients.""" grad_2d[arange_1d, masked_target_1d] -= softmax_update @@ -117,6 +122,7 @@ def calculate_gradients( class _VocabParallelCrossEntropy(torch.autograd.Function): @staticmethod def forward(ctx, vocab_parallel_logits, target, label_smoothing=0.0): + """Vocab parallel cross entropy forward function.""" vocab_parallel_logits, logits_max = VocabParallelCrossEntropy.calculate_logits_max( vocab_parallel_logits @@ -157,7 +163,7 @@ def forward(ctx, vocab_parallel_logits, target, label_smoothing=0.0): vocab_size = exp_logits.size(-1) if label_smoothing > 0: - """ + r""" We'd like to assign 1 / (K - 1) probability mass to every index that is not the ground truth. = (1 - alpha) * y_gt + alpha * mean(y_{i for i != gt}) = (1 - alpha) * y_gt + (alpha / (K - 1)) * \sum_{i != gt} y_i @@ -165,11 +171,12 @@ def forward(ctx, vocab_parallel_logits, target, label_smoothing=0.0): = (K * (1 - alpha) - 1) / (K - 1)) * y_gt + (alpha / (K - 1)) * \sum_{i} y_i = (1 - (alpha * K) / (K - 1)) * y_gt + ( (alpha * K) / (K - 1) ) * \sum_{i} y_i / K From: https://github.com/NVIDIA/NeMo/blob/main/nemo/collections/common/losses/smoothed_cross_entropy.py - """ + """ # pylint: disable=line-too-long assert 1.0 > label_smoothing > 0.0 smoothing = label_smoothing * vocab_size / (vocab_size - 1) - # Exp logits at this point are normalized probabilities. So we can just take the log to get log-probs. + # Exp logits at this point are normalized probabilities. + # So we can just take the log to get log-probs. log_probs = torch.log(exp_logits) mean_log_probs = log_probs.mean(dim=-1) loss = (1.0 - smoothing) * loss - smoothing * mean_log_probs @@ -183,6 +190,7 @@ def forward(ctx, vocab_parallel_logits, target, label_smoothing=0.0): @staticmethod def backward(ctx, grad_output): + """Vocab parallel cross entropy backward function.""" # Retreive tensors from the forward path. softmax, target_mask, masked_target_1d = ctx.saved_tensors @@ -214,11 +222,11 @@ def vocab_parallel_cross_entropy(vocab_parallel_logits, target, label_smoothing= Args: vocab_parallel_logits: logits split across tensor parallel ranks - dimension is [sequence_length, batch_size, vocab_size/num_parallel_ranks] + dimension is [sequence_length, batch_size, vocab_size/num_parallel_ranks] target: correct vocab ids of dimseion [sequence_length, micro_batch_size] - lobal_smoothing: smoothing factor, must be in range [0.0, 1.0) + label_smoothing: smoothing factor, must be in range [0.0, 1.0) default is no smoothing (=0.0) """ return _VocabParallelCrossEntropy.apply(vocab_parallel_logits, target, label_smoothing) diff --git a/megatron/core/tensor_parallel/layers.py b/megatron/core/tensor_parallel/layers.py index 903b4ed87..fde8c106f 100644 --- a/megatron/core/tensor_parallel/layers.py +++ b/megatron/core/tensor_parallel/layers.py @@ -5,22 +5,23 @@ import os import warnings +from functools import partial from typing import Any, Callable, List, Optional, Tuple import torch import torch.nn.functional as F -from torch.cuda.amp import custom_bwd, custom_fwd from torch.nn.parameter import Parameter from megatron.core.model_parallel_config import ModelParallelConfig from megatron.core.parallel_state import ( + get_expert_tensor_parallel_rank, + get_expert_tensor_parallel_world_size, get_global_memory_buffer, - get_tensor_and_expert_parallel_rank, - get_tensor_and_expert_parallel_world_size, get_tensor_model_parallel_group, get_tensor_model_parallel_rank, get_tensor_model_parallel_world_size, ) +from megatron.core.utils import is_torch_min_version from ..dist_checkpointing.mapping import ShardedStateDict from ..transformer.utils import make_sharded_tensors_for_checkpoint @@ -49,6 +50,22 @@ } +if is_torch_min_version("2.4.0a0"): + custom_fwd = partial(torch.amp.custom_fwd, device_type="cuda") + custom_bwd = partial(torch.amp.custom_bwd, device_type="cuda") +else: + custom_fwd = torch.cuda.amp.custom_fwd + custom_bwd = torch.cuda.amp.custom_bwd + + +if is_torch_min_version("1.13.0"): + dist_all_gather_func = torch.distributed.all_gather_into_tensor + dist_reduce_scatter_func = torch.distributed.reduce_scatter_tensor +else: + dist_all_gather_func = torch.distributed._all_gather_base + dist_reduce_scatter_func = torch.distributed._reduce_scatter_base + + def param_is_not_tensor_parallel_duplicate(param): """Returns true if the passed-in parameter is not a duplicate parameter on another TP rank.""" @@ -90,16 +107,14 @@ def maybe_copy(attribute): maybe_copy(attribute) -def _initialize_affine_weight_gpu( - weight, init_method, partition_dim, stride=1, expert_parallel=False -): +def _initialize_affine_weight_gpu(weight, init_method, partition_dim, stride=1, is_expert=False): """Initialize affine weight for model parallel on GPU.""" set_tensor_model_parallel_attributes( tensor=weight, is_parallel=True, dim=partition_dim, stride=stride ) - if not expert_parallel: + if not is_expert: with get_cuda_rng_tracker().fork(): init_method(weight) else: @@ -315,11 +330,11 @@ def linear_with_frozen_weight( weight: torch.Tensor, bias: Optional[torch.Tensor], gradient_accumulation_fusion: bool, - async_grad_allreduce: bool, + allreduce_dgrad: bool, sequence_parallel: bool, grad_output_buffer: Optional[List[torch.Tensor]] = None, wgrad_deferral_limit: None = None, - allreduce_dgrad: bool = None, + async_grad_allreduce: Optional[bool] = None, ) -> torch.Tensor: """Linear layer execution with weight.requires_grad == False. @@ -339,8 +354,9 @@ def linear_with_frozen_weight( gradient_accumulation_fusion (bool required): dummy argument, used to keep the API unified between all forward implementation functions. - async_grad_allreduce (bool required): dummy argument, used to - keep the API unified between all forward implementation functions. + allreduce_dgrad (bool, required): Do the allreduce of input gradients. + Here, async and sync allreduce are the same. If sequence_parallel is + True, this must be False, as no all reduce is performed. sequence_parallel (bool required): Indicates that sequence parallelism is used and thus in the forward pass the input is @@ -353,12 +369,18 @@ def linear_with_frozen_weight( wgrad_deferral_limit (int optional): dummy argument, used to keep the API unified between all forward implementation functions. - allreduce_dgrad (bool): Do the allreduce of input gradients. - Here, async and sync allreduce are the same. If sequence_parallel is - True, this must be False, as no all reduce is performed. + + async_grad_allreduce (bool optional): Will be removed with 0.11.0. + Please use allreduce_dgrad instead. """ + if async_grad_allreduce is not None: + warnings.warn( + "async_grad_allreduce is deprecated, not in use anymore and will" + " be fully removed with 0.11.0. Please use allreduce_dgrad instead." + ) + assert grad_output_buffer is None, ( "grad_output_buffer kwarg is only supported with " "linear_with_grad_accumulation_and_async_allreduce" @@ -373,13 +395,6 @@ def linear_with_frozen_weight( else: input = input - if allreduce_dgrad is None: - warnings.warn( - "`async_grad_allreduce` is deprecated and will be removed in a future release. " - "Please ue `allreduce_dgrad` instead." - ) - allreduce_dgrad = async_grad_allreduce - args = [input, weight, bias, allreduce_dgrad] return LinearWithFrozenWeight.apply(*args) @@ -416,9 +431,7 @@ def forward( dim_size[0] = dim_size[0] * world_size all_gather_buffer = get_global_memory_buffer().get_tensor(dim_size, input.dtype, "mpu") - torch.distributed._all_gather_base( - all_gather_buffer, input, group=get_tensor_model_parallel_group() - ) + dist_all_gather_func(all_gather_buffer, input, group=get_tensor_model_parallel_group()) total_input = all_gather_buffer else: total_input = input @@ -452,7 +465,7 @@ def backward(ctx, grad_output): all_gather_buffer = get_global_memory_buffer().get_tensor( dim_size, input.dtype, "mpu" ) - handle = torch.distributed._all_gather_base( + handle = dist_all_gather_func( all_gather_buffer, input, group=get_tensor_model_parallel_group(), async_op=True ) @@ -486,7 +499,7 @@ def backward(ctx, grad_output): dim_size, dtype=input.dtype, device=torch.cuda.current_device(), requires_grad=False ) # reduce_scatter - handle = torch.distributed._reduce_scatter_base( + handle = dist_reduce_scatter_func( sub_grad_input, grad_input, group=get_tensor_model_parallel_group(), async_op=True ) # Here we rely on CUDA_DEVICE_MAX_CONNECTIONS=1 to ensure that the @@ -548,11 +561,11 @@ def linear_with_grad_accumulation_and_async_allreduce( weight: torch.Tensor, bias: Optional[torch.Tensor], gradient_accumulation_fusion: bool, - sequence_parallel: bool, allreduce_dgrad: bool, - async_grad_allreduce: Optional[bool] = None, + sequence_parallel: bool, grad_output_buffer: Optional[List[torch.Tensor]] = None, wgrad_deferral_limit: Optional[int] = 0, + async_grad_allreduce: Optional[bool] = None, ) -> torch.Tensor: """Linear layer execution with asynchronous communication and gradient accumulation fusion in backprop. @@ -600,11 +613,6 @@ def linear_with_grad_accumulation_and_async_allreduce( gradients. If sequence_parallel is True, this must be False, as no all reduce is performed. - async_grad_allreduce (bool optional): Do the allreduce of input - gradients asyncronously with the computation of weight - gradients. If sequence_parallel is True, this must be - False, as no all reduce is performed. Will be deprecated with 0.10.0 - sequence_parallel (bool required): Indicates that sequence parallelism is used and thus in the forward pass the input is all gathered, and the backward pass the input gradients are @@ -618,11 +626,14 @@ def linear_with_grad_accumulation_and_async_allreduce( micro-batches for which embedding weight gradient GEMM should be deferred. Disable by setting this to 0. Defaults to 0. + async_grad_allreduce (bool optional): Will be removed with 0.11.0. + Please use allreduce_dgrad instead. """ + if async_grad_allreduce is not None: warnings.warn( "async_grad_allreduce is deprecated, not in use anymore and will" - " be fully removed with 0.10.0. Please use allreduce_dgrad instead." + " be fully removed with 0.11.0. Please use allreduce_dgrad instead." ) args = [ @@ -743,15 +754,13 @@ def __init__( self.config = config self.disable_grad_reduce = disable_grad_reduce - self.explicit_expert_comm = self.is_expert and ( - config.tensor_model_parallel_size > 1 or self.expert_parallel - ) - if self.explicit_expert_comm and config.moe_extended_tp: - world_size = get_tensor_and_expert_parallel_world_size() - rank = get_tensor_and_expert_parallel_rank() + if is_expert: + world_size = get_expert_tensor_parallel_world_size() + rank = get_expert_tensor_parallel_rank() else: world_size = get_tensor_model_parallel_world_size() rank = get_tensor_model_parallel_rank() + self.explicit_expert_comm = self.is_expert and (world_size > 1 or self.expert_parallel) self.output_size_per_partition = divide(output_size, world_size) @@ -794,7 +803,7 @@ def __init__( init_method, partition_dim=0, stride=stride, - expert_parallel=(self.is_expert and self.expert_parallel), + is_expert=self.is_expert, ) setattr(self.weight, 'allreduce', not (self.is_expert and self.expert_parallel)) @@ -936,7 +945,7 @@ def forward( weight=weight, bias=bias, gradient_accumulation_fusion=self.gradient_accumulation_fusion, - async_grad_allreduce=allreduce_dgrad, + allreduce_dgrad=allreduce_dgrad, sequence_parallel=False if self.explicit_expert_comm else self.sequence_parallel, grad_output_buffer=( self.grad_output_buffer if self.config.defer_embedding_wgrad_compute else None @@ -946,7 +955,6 @@ def forward( if self.config.defer_embedding_wgrad_compute else None ), - allreduce_dgrad=allreduce_dgrad, ) gather_output = self.gather_output @@ -1044,17 +1052,14 @@ def __init__( if self.sequence_parallel and not self.input_is_parallel: raise RuntimeError("To enable `sequence_parallel`, `input_is_parallel` must be `True`") - self.explicit_expert_comm = self.is_expert and ( - config.tensor_model_parallel_size > 1 or self.expert_parallel - ) - # Divide the weight matrix along the last dimension. - if self.explicit_expert_comm and config.moe_extended_tp: - world_size = get_tensor_and_expert_parallel_world_size() - rank = get_tensor_and_expert_parallel_rank() + if self.is_expert: + world_size = get_expert_tensor_parallel_world_size() + rank = get_expert_tensor_parallel_rank() else: world_size = get_tensor_model_parallel_world_size() rank = get_tensor_model_parallel_rank() + self.explicit_expert_comm = self.is_expert and (world_size > 1 or self.expert_parallel) self.input_size_per_partition = divide(input_size, world_size) @@ -1097,7 +1102,7 @@ def __init__( init_method, partition_dim=1, stride=stride, - expert_parallel=(self.is_expert and self.expert_parallel), + is_expert=self.is_expert, ) setattr(self.weight, 'allreduce', not (self.is_expert and self.expert_parallel)) @@ -1167,10 +1172,9 @@ def forward(self, input_): weight=self.weight, bias=None, gradient_accumulation_fusion=self.gradient_accumulation_fusion, - async_grad_allreduce=allreduce_dgrad, + allreduce_dgrad=allreduce_dgrad, sequence_parallel=False, grad_output_buffer=None, - allreduce_dgrad=allreduce_dgrad, ) # All-reduce across all the partitions. diff --git a/megatron/core/tensor_parallel/mappings.py b/megatron/core/tensor_parallel/mappings.py index 3addd8d2e..cdd720687 100644 --- a/megatron/core/tensor_parallel/mappings.py +++ b/megatron/core/tensor_parallel/mappings.py @@ -3,16 +3,22 @@ import torch from megatron.core.parallel_state import ( - get_expert_model_parallel_group, get_global_memory_buffer, - get_tensor_and_expert_parallel_group, get_tensor_model_parallel_group, get_tensor_model_parallel_rank, get_tensor_model_parallel_world_size, ) +from megatron.core.utils import is_torch_min_version from .utils import split_tensor_along_last_dim +if is_torch_min_version("1.13.0"): + dist_all_gather_func = torch.distributed.all_gather_into_tensor + dist_reduce_scatter_func = torch.distributed.reduce_scatter_tensor +else: + dist_all_gather_func = torch.distributed._all_gather_base + dist_reduce_scatter_func = torch.distributed._reduce_scatter_base + def _reduce(input_): """All-reduce the input tensor across model parallel group.""" @@ -46,11 +52,12 @@ def _split_along_last_dim(input_): return output -def _split_along_first_dim(input_): +def _split_along_first_dim(input_, group=None): """Split the tensor along its first dimension and keep the corresponding slice.""" - - world_size = get_tensor_model_parallel_world_size() + if group is None: + group = get_tensor_model_parallel_group() + world_size = torch.distributed.get_world_size(group) # Bypass the function if we are using only 1 GPU. if world_size == 1: return input_ @@ -61,7 +68,7 @@ def _split_along_first_dim(input_): dim_size % world_size == 0 ), "First dimension of the tensor should be divisible by tensor parallel size" local_dim_size = dim_size // world_size - rank = get_tensor_model_parallel_rank() + rank = torch.distributed.get_rank(group) dim_offset = rank * local_dim_size output = input_[dim_offset : dim_offset + local_dim_size].contiguous() @@ -104,7 +111,7 @@ def _reduce_scatter_along_last_dim(input_): return output -def _gather_along_first_dim(input_, output_split_sizes=None): +def _gather_along_first_dim(input_, group=None, output_split_sizes=None, use_global_buffer=False): """Gather tensors and concatenate along the first dimension. Args: @@ -118,7 +125,9 @@ def _gather_along_first_dim(input_, output_split_sizes=None): torch.Tensor: Gathered tensor. """ - world_size = get_tensor_model_parallel_world_size() + if group is None: + group = get_tensor_model_parallel_group() + world_size = torch.distributed.get_world_size(group) # Bypass the function if we are using only 1 GPU. if world_size == 1: return input_ @@ -127,22 +136,26 @@ def _gather_along_first_dim(input_, output_split_sizes=None): if output_split_sizes is None: dim_size[0] = dim_size[0] * world_size - output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) - torch.distributed._all_gather_base( - output, input_.contiguous(), group=get_tensor_model_parallel_group() - ) + if use_global_buffer: + output = get_global_memory_buffer().get_tensor(dim_size, input_.dtype, "mpu") + else: + output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) + dist_all_gather_func(output, input_.contiguous(), group=group) else: dim_size[0] = sum(output_split_sizes) - output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) + if use_global_buffer: + output = get_global_memory_buffer().get_tensor(dim_size, input_.dtype, "mpu") + else: + output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) output_tensor_list = list(torch.split(output, output_split_sizes, dim=0)) - torch.distributed.all_gather( - output_tensor_list, input_, group=get_tensor_model_parallel_group() - ) + torch.distributed.all_gather(output_tensor_list, input_, group=group) return output -def _reduce_scatter_along_first_dim(input_, input_split_sizes=None): +def _reduce_scatter_along_first_dim( + input_, group=None, input_split_sizes=None, use_global_buffer=False +): """Reduce-scatter the input tensor across model parallel group. Args: @@ -151,7 +164,9 @@ def _reduce_scatter_along_first_dim(input_, input_split_sizes=None): the input splits along the first dimension for each rank. If None, equal splitting is assumed. Default: None. """ - world_size = get_tensor_model_parallel_world_size() + if group is None: + group = get_tensor_model_parallel_group() + world_size = torch.distributed.get_world_size(group) # Bypass the function if we are using only 1 GPU. if world_size == 1: return input_ @@ -164,74 +179,22 @@ def _reduce_scatter_along_first_dim(input_, input_split_sizes=None): dim_size[0] = dim_size[0] // world_size - output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) - torch.distributed._reduce_scatter_base( - output, input_.contiguous(), group=get_tensor_model_parallel_group() - ) + if use_global_buffer: + output = get_global_memory_buffer().get_tensor(dim_size, input_.dtype, "mpu") + else: + output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) + dist_reduce_scatter_func(output, input_.contiguous(), group=group) else: - rank = torch.distributed.get_rank(get_tensor_model_parallel_group()) + rank = torch.distributed.get_rank(group) input_tensor_list = list(torch.split(input_, input_split_sizes, dim=0)) - output = torch.empty_like(input_tensor_list[rank]) - torch.distributed.reduce_scatter( - output, input_tensor_list, group=get_tensor_model_parallel_group() - ) - return output - - -def _gather_along_first_dim_moe(input_, use_global_buffer=False): - """Gather tensors and concatenate along the first dimension.""" - group = get_tensor_and_expert_parallel_group() - world_size = torch.distributed.get_world_size(group=group) - # Bypass the function if we are using only 1 GPU. - if world_size == 1: - return input_ - - dim_size = list(input_.size()) - dim_size[0] = dim_size[0] * world_size - - if use_global_buffer: - output = get_global_memory_buffer().get_tensor(dim_size, input_.dtype, "mpu") - else: - output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) - torch.distributed._all_gather_base(output, input_.contiguous(), group=group) - - return output - - -def _reduce_scatter_along_first_dim_moe(input_, use_global_buffer=False): - """Reduce-scatter the input tensor across model parallel group.""" - group = get_tensor_and_expert_parallel_group() - world_size = torch.distributed.get_world_size(group=group) - # Bypass the function if we are using only 1 GPU. - if world_size == 1: - return input_ - - dim_size = list(input_.size()) - assert dim_size[0] % world_size == 0 - dim_size[0] = dim_size[0] // world_size - - if use_global_buffer: - output = get_global_memory_buffer().get_tensor(dim_size, input_.dtype, "mpu") - else: - output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) - torch.distributed._reduce_scatter_base(output, input_.contiguous(), group=group) - return output - - -def _gather_along_first_dim_expert_parallel(input_): - """Gather tensors and concatenate along the first dimension.""" - group = get_expert_model_parallel_group() - world_size = torch.distributed.get_world_size(group=group) - # Bypass the function if we are using only 1 GPU. - if world_size == 1: - return input_ - - dim_size = list(input_.size()) - dim_size[0] = dim_size[0] * world_size - - output = torch.empty(dim_size, dtype=input_.dtype, device=torch.cuda.current_device()) - torch.distributed._all_gather_base(output, input_.contiguous(), group=group) + if use_global_buffer: + output = get_global_memory_buffer().get_tensor( + input_tensor_list[rank].shape, input_.dtype, "mpu" + ) + else: + output = torch.empty_like(input_tensor_list[rank]) + torch.distributed.reduce_scatter(output, input_tensor_list, group=group) return output @@ -334,16 +297,32 @@ class _GatherFromSequenceParallelRegion(torch.autograd.Function): """Gather the input from sequence parallel region and concatinate.""" @staticmethod - def symbolic(graph, input_, tensor_parallel_output_grad=True, output_split_sizes=None): + def symbolic( + graph, + input_, + tensor_parallel_output_grad=True, + group=None, + output_split_sizes=None, + use_global_buffer=False, + ): """Symbolic function for tracing.""" - return _gather_along_first_dim(input_, output_split_sizes) + return _gather_along_first_dim(input_, group, output_split_sizes, use_global_buffer) @staticmethod - def forward(ctx, input_, tensor_parallel_output_grad=True, output_split_sizes=None): + def forward( + ctx, + input_, + tensor_parallel_output_grad=True, + group=None, + output_split_sizes=None, + use_global_buffer=False, + ): """Forward function.""" ctx.tensor_parallel_output_grad = tensor_parallel_output_grad + ctx.group = group ctx.output_split_sizes = output_split_sizes - return _gather_along_first_dim(input_, ctx.output_split_sizes) + ctx.use_global_buffer = use_global_buffer + return _gather_along_first_dim(input_, group, output_split_sizes, use_global_buffer) @staticmethod def backward(ctx, grad_output): @@ -356,76 +335,46 @@ def backward(ctx, grad_output): # output gradients need to be scattered. if tensor_parallel_output_grad: return ( - _reduce_scatter_along_first_dim(grad_output, ctx.output_split_sizes), + _reduce_scatter_along_first_dim( + grad_output, ctx.group, ctx.output_split_sizes, ctx.use_global_buffer + ), + None, + None, None, None, ) else: assert ctx.output_split_sizes is None - return _split_along_first_dim(grad_output), None, None + return _split_along_first_dim(grad_output, ctx.group), None, None, None, None class _ReduceScatterToSequenceParallelRegion(torch.autograd.Function): """Reduce scatter the input from the model parallel region.""" @staticmethod - def symbolic(graph, input_, input_split_sizes=None): + def symbolic(graph, input_, group=None, input_split_sizes=None, use_global_buffer=False): """Symbolic function for tracing.""" - return _reduce_scatter_along_first_dim(input_, input_split_sizes) + return _reduce_scatter_along_first_dim(input_, group, input_split_sizes, use_global_buffer) @staticmethod - def forward(ctx, input_, input_split_sizes=None): + def forward(ctx, input_, group=None, input_split_sizes=None, use_global_buffer=False): """Forward function.""" + ctx.group = group ctx.input_split_sizes = input_split_sizes - return _reduce_scatter_along_first_dim(input_, input_split_sizes) - - @staticmethod - def backward(ctx, grad_output): - """Backward function.""" - input_split_sizes = ctx.input_split_sizes - return _gather_along_first_dim(grad_output, input_split_sizes), None - - -class _GatherFromSequenceParallelRegionToMOE(torch.autograd.Function): - """Gather the input from model parallel region and concatenate.""" # TODO - - @staticmethod - def symbolic(graph, input_, use_global_buffer=False): - """Symbolic function for tracing.""" - return _gather_along_first_dim_moe(input_, use_global_buffer) - - @staticmethod - def forward(ctx, input_, use_global_buffer=False): - """Forward function.""" - ctx.use_global_buffer = use_global_buffer - return _gather_along_first_dim_moe(input_, use_global_buffer) - - @staticmethod - def backward(ctx, grad_output): - """Backward function.""" - use_global_buffer = ctx.use_global_buffer - return _reduce_scatter_along_first_dim_moe(grad_output, use_global_buffer), None - - -class _ReduceScatterToSequenceParallelRegionFromMOE(torch.autograd.Function): - """Reduce scatter the input from the model parallel region.""" - - @staticmethod - def symbolic(graph, input_, use_global_buffer=False): - """Symbolic function for tracing.""" - return _reduce_scatter_along_first_dim_moe(input_, use_global_buffer) - - @staticmethod - def forward(ctx, input_, use_global_buffer=False): - """Forward function.""" ctx.use_global_buffer = use_global_buffer - return _reduce_scatter_along_first_dim_moe(input_, use_global_buffer) + return _reduce_scatter_along_first_dim(input_, group, input_split_sizes, use_global_buffer) @staticmethod def backward(ctx, grad_output): """Backward function.""" + input_split_sizes = ctx.input_split_sizes use_global_buffer = ctx.use_global_buffer - return _gather_along_first_dim_moe(grad_output, use_global_buffer), None + return ( + _gather_along_first_dim(grad_output, ctx.group, input_split_sizes, use_global_buffer), + None, + None, + None, + ) class _AllGatherFromTensorParallelRegion(torch.autograd.Function): @@ -516,61 +465,59 @@ def backward(ctx, *grad_output): def copy_to_tensor_model_parallel_region(input_): - """Wrapper for autograd function""" + """Wrapper for autograd function: forward: copy, backward allreduce""" return _CopyToModelParallelRegion.apply(input_) def reduce_from_tensor_model_parallel_region(input_): - """Wrapper for autograd function""" + """Wrapper for autograd function: forward: all reduce, backward copy""" return _ReduceFromModelParallelRegion.apply(input_) def scatter_to_tensor_model_parallel_region(input_): - """Wrapper for autograd function""" + """Wrapper for autograd function: forward: RS, backward: AG """ return _ScatterToModelParallelRegion.apply(input_) def gather_from_tensor_model_parallel_region(input_): - """Wrapper for autograd function""" + """Wrapper for autograd function: forward: AG, backward: split """ return _GatherFromModelParallelRegion.apply(input_) def scatter_to_sequence_parallel_region(input_): - """Wrapper for autograd function""" + """Wrapper for autograd function: forward: split, backward: AG """ return _ScatterToSequenceParallelRegion.apply(input_) def gather_from_sequence_parallel_region( - input_, tensor_parallel_output_grad=True, output_split_sizes=None + input_, + tensor_parallel_output_grad=True, + group=None, + output_split_sizes=None, + use_global_buffer=False, ): - """Wrapper for autograd function""" + """Wrapper for autograd function: forward: AG, backward: RS """ return _GatherFromSequenceParallelRegion.apply( - input_, tensor_parallel_output_grad, output_split_sizes + input_, tensor_parallel_output_grad, group, output_split_sizes, use_global_buffer ) -def reduce_scatter_to_sequence_parallel_region(input_, input_split_sizes=None): - """Wrapper for autograd function""" - return _ReduceScatterToSequenceParallelRegion.apply(input_, input_split_sizes) - - -def gather_from_sequence_parallel_region_to_moe(input_, use_global_buffer=False): - """Wrapper for autograd function""" - return _GatherFromSequenceParallelRegionToMOE.apply(input_, use_global_buffer) - - -def reduce_scatter_to_sequence_parallel_region_from_moe(input_, use_global_buffer=False): - """Wrapper for autograd function""" - return _ReduceScatterToSequenceParallelRegionFromMOE.apply(input_, use_global_buffer) +def reduce_scatter_to_sequence_parallel_region( + input_, group=None, input_split_sizes=None, use_global_buffer=False +): + """Wrapper for autograd function: forward: RS, backward AG """ + return _ReduceScatterToSequenceParallelRegion.apply( + input_, group, input_split_sizes, use_global_buffer + ) def all_gather_last_dim_from_tensor_parallel_region(input_): - """Wrapper for autograd function""" + """Wrapper for autograd function: forward: AG, backward RS """ return _AllGatherFromTensorParallelRegion.apply(input_) def reduce_scatter_last_dim_to_tensor_parallel_region(input_): - """Wrapper for autograd function""" + """Wrapper for autograd function: forward: RS, backward AG: AG """ return _ReduceScatterToTensorParallelRegion.apply(input_) diff --git a/megatron/core/tensor_parallel/random.py b/megatron/core/tensor_parallel/random.py index 4b144d416..f3d4ab772 100644 --- a/megatron/core/tensor_parallel/random.py +++ b/megatron/core/tensor_parallel/random.py @@ -14,6 +14,7 @@ from megatron.core.parallel_state import ( get_expert_model_parallel_rank, + get_expert_tensor_parallel_rank, get_tensor_model_parallel_rank, ) from megatron.core.utils import is_te_min_version, safely_set_viewless_tensor_data @@ -198,13 +199,16 @@ def model_parallel_cuda_manual_seed(seed): initialized. Also, no torch.cuda.manual_seed should be called after this function. Basically, this is replacement for that function. - Two set of RNG states are tracked: + Three set of RNG states are tracked: default state: This is for data parallelism and is the same among a set of model parallel GPUs but different across different model parallel groups. This is used for example for dropout in the non-tensor-model-parallel regions. tensor-model-parallel state: This state is different among a set of model parallel GPUs, but the same across data parallel groups. This is used for example for dropout in model parallel regions. + expert-parallel-seed: This state is only used for the expert layer of MoE models. + It is different among expert-tensor and expert-model parallel GPUs, and the same + across expert-data parallel groups. """ # 2718 is just for fun and any POSITIVE value will work. offset = seed + 2718 @@ -222,7 +226,7 @@ def model_parallel_cuda_manual_seed(seed): _CUDA_RNG_STATE_TRACKER.add(_MODEL_PARALLEL_RNG_TRACKER_NAME, tensor_model_parallel_seed) expert_parallel_seed = ( - seed + 1024 + 100 * get_expert_model_parallel_rank() + get_tensor_model_parallel_rank() + seed + 1024 + 100 * get_expert_model_parallel_rank() + get_expert_tensor_parallel_rank() ) _CUDA_RNG_STATE_TRACKER.add(_EXPERT_PARALLEL_RNG_TRACKER_NAME, expert_parallel_seed) diff --git a/megatron/core/tensor_parallel/utils.py b/megatron/core/tensor_parallel/utils.py index d7c191b41..17249ac3f 100644 --- a/megatron/core/tensor_parallel/utils.py +++ b/megatron/core/tensor_parallel/utils.py @@ -5,12 +5,12 @@ import torch from megatron.core import parallel_state -from megatron.core.parallel_state import ( - get_tensor_model_parallel_group, - get_tensor_model_parallel_rank, - get_tensor_model_parallel_world_size, -) -from megatron.core.utils import divide +from megatron.core.utils import divide, is_torch_min_version + +if is_torch_min_version("1.13.0"): + dist_all_gather_func = torch.distributed.all_gather_into_tensor +else: + dist_all_gather_func = torch.distributed._all_gather_base def split_tensor_along_last_dim( @@ -82,14 +82,7 @@ def gather_split_1d_tensor(tensor): gathered = torch.empty( numel_gathered, dtype=tensor.dtype, device=torch.cuda.current_device(), requires_grad=False ) - # TODO: This API is experimental in pytorch (as of Feb 2022) and - # this might break in future pytorch releases. We chose this API - # as opposed to torch.distributed.all_gather for efficiency reasons. - # This API calls directly NCCL all-gather versus the former does - # internal copies and can potentially cause slow down. - torch.distributed._all_gather_base( - gathered, tensor, group=parallel_state.get_tensor_model_parallel_group() - ) + dist_all_gather_func(gathered, tensor, group=parallel_state.get_tensor_model_parallel_group()) return gathered @@ -104,6 +97,7 @@ class VocabUtility: def vocab_range_from_per_partition_vocab_size( per_partition_vocab_size: int, rank, world_size: int ) -> Sequence[int]: + """Vocab range from per partition vocab size.""" index_f = rank * per_partition_vocab_size index_l = index_f + per_partition_vocab_size return index_f, index_l @@ -112,6 +106,7 @@ def vocab_range_from_per_partition_vocab_size( def vocab_range_from_global_vocab_size( global_vocab_size: int, rank: int, world_size: int ) -> Sequence[int]: + """Vocab range from global vocab size.""" per_partition_vocab_size = divide(global_vocab_size, world_size) return VocabUtility.vocab_range_from_per_partition_vocab_size( per_partition_vocab_size, rank, world_size diff --git a/megatron/core/timers.py b/megatron/core/timers.py index e7070e37d..0ae89330d 100644 --- a/megatron/core/timers.py +++ b/megatron/core/timers.py @@ -8,29 +8,44 @@ import torch +from megatron.core.utils import is_torch_min_version + +if is_torch_min_version("1.13.0"): + dist_all_gather_func = torch.distributed.all_gather_into_tensor +else: + dist_all_gather_func = torch.distributed._all_gather_base + class TimerBase(ABC): + """Timer base class.""" + def __init__(self, name): self.name = name @abstractmethod def start(self, barrier=False): + """Start the timer.""" pass @abstractmethod def stop(self, barrier=False): + """Stop the timer.""" pass @abstractmethod def reset(self): + """Reset timer.""" pass @abstractmethod def elapsed(self, reset=True, barrier=False): + """Calculates the elapsed time.""" pass class DummyTimer(TimerBase): + """Dummy Timer.""" + def __init__(self): super().__init__('dummy timer') @@ -140,6 +155,7 @@ def elapsed(self, reset=True, barrier=False): return _elapsed def active_time(self): + """Returns the active time.""" return self._active_time @@ -151,7 +167,8 @@ def __init__(self, log_level, log_option): Args: log_level (int): Log level to control what timers are enabled. - log_option (str): Setting for logging statistics over ranks for all the timers. Allowed: ['max', 'minmax', 'all']. + log_option (str): Setting for logging statistics over ranks for all the timers. + Allowed: ['max', 'minmax', 'all']. """ self._log_level = log_level allowed_log_options = set(['max', 'minmax', 'all']) @@ -236,9 +253,7 @@ def _get_elapsed_time_all_ranks(self, names, reset, barrier): rank_name_to_time[rank, i] = self._timers[name].elapsed(reset=reset) # See the note above for why we are not using gather. - torch.distributed._all_gather_base( - rank_name_to_time.view(-1), rank_name_to_time[rank, :].view(-1) - ) + dist_all_gather_func(rank_name_to_time.view(-1), rank_name_to_time[rank, :].view(-1)) return rank_name_to_time @@ -309,10 +324,13 @@ def get_all_timers_string( """Returns the output string with logged timer values according to configured options. Args: - names (List[str]): Names of the timers to log. If None, all registered timers are fetched. Defaults to None. - normalizer (float, optional): Normalizes the timer values by the factor. Defaults to 1.0. + names (List[str]): Names of the timers to log. If None, all registered timers are + fetched. Defaults to None. + normalizer (float, optional): Normalizes the timer values by the factor. + Defaults to 1.0. reset (bool, optional): Whether to reset timer values after logging. Defaults to True. - barrier (bool, optional): Whether to do a global barrier before time measurments. Defaults to False. + barrier (bool, optional): Whether to do a global barrier before time measurments. + Defaults to False. Raises: Exception: Raises if log option is invalid. @@ -348,15 +366,19 @@ def log( reset: bool = True, barrier: bool = False, ): - """logs the timers passed in names to stdout. Example usage is to log average per step value for timer 'foo', - this function can be called with normalizer factor set to logging interval. + """logs the timers passed in names to stdout. Example usage is to log average per step + value for timer 'foo', this function can be called with normalizer factor set to logging + interval. Args: names (List[str]): Names of the timers to log. - rank (int, optional): logs the timers to a specific rank. If set to None, logs to the last rank. Defaults to None. - normalizer (float, optional): Normalizes the timer values by the factor. Defaults to 1.0. + rank (int, optional): logs the timers to a specific rank. If set to None, logs to the + last rank. Defaults to None. + normalizer (float, optional): Normalizes the timer values by the factor. + Defaults to 1.0. reset (bool, optional): Whether to reset timer values after logging. Defaults to True. - barrier (bool, optional): Whether to do a global barrier before time measurments. Defaults to False. + barrier (bool, optional): Whether to do a global barrier before time measurments. + Defaults to False. """ output_string = self.get_all_timers_string(names, normalizer, reset, barrier) @@ -375,15 +397,18 @@ def write( reset: bool = True, barrier: bool = False, ): - """Write timers to a tensorboard writer. Note that we only report maximum time across ranks to tensorboard. + """Write timers to a tensorboard writer. Note that we only report maximum time across ranks + to tensorboard. Args: names (List[str]): Names of the timers to log. writer (SummaryWriter): Tensorboard SummaryWriter object iteration (int): Current iteration. - normalizer (float, optional): Normalizes the timer values by the factor. Defaults to 1.0. + normalizer (float, optional): Normalizes the timer values by the factor. + Defaults to 1.0. reset (bool, optional): Whether to reset timer values after logging. Defaults to True. - barrier (bool, optional): Whether to do a global barrier before time measurments. Defaults to False. + barrier (bool, optional): Whether to do a global barrier before time measurments. + Defaults to False. """ # currently when using add_scalars, # torch.utils.add_scalars makes each timer its own run, which diff --git a/megatron/core/transformer/attention.py b/megatron/core/transformer/attention.py index 850dec88e..583e3c1e6 100644 --- a/megatron/core/transformer/attention.py +++ b/megatron/core/transformer/attention.py @@ -1,12 +1,16 @@ # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Union +from typing import Tuple, Union import torch +from torch import Tensor -from megatron.core import parallel_state, tensor_parallel -from megatron.core.models.common.embeddings import apply_rotary_pos_emb +from megatron.core import InferenceParams, parallel_state, tensor_parallel +from megatron.core.models.common.embeddings.rope_utils import ( + apply_rotary_pos_emb, + apply_rotary_pos_emb_with_cos_sin, +) from megatron.core.parallel_state import ( get_data_parallel_group, get_data_parallel_rank, @@ -22,11 +26,17 @@ from .enums import AttnMaskType from .transformer_config import TransformerConfig +try: + from flash_attn import flash_attn_with_kvcache +except: + flash_attn_with_kvcache = None + + try: import transformer_engine # pylint: disable=unused-import HAVE_TE = True - from megatron.core.transformer.custom_layers.transformer_engine import SplitAlongDim + from megatron.core.extensions.transformer_engine import SplitAlongDim except ImportError: HAVE_TE = False SplitAlongDim = None @@ -34,6 +44,10 @@ @dataclass class SelfAttentionSubmodules: + """ + Configuration class for specifying the submodules of a self-attention. + """ + linear_qkv: Union[ModuleSpec, type] = None core_attention: Union[ModuleSpec, type] = None linear_proj: Union[ModuleSpec, type] = None @@ -43,6 +57,10 @@ class SelfAttentionSubmodules: @dataclass class CrossAttentionSubmodules: + """ + Configuration class for specifying the submodules of a cross-attention. + """ + linear_q: Union[ModuleSpec, type] = None linear_kv: Union[ModuleSpec, type] = None core_attention: Union[ModuleSpec, type] = None @@ -63,6 +81,7 @@ def __init__( layer_number: int, attn_mask_type: AttnMaskType, attention_type: str, + cp_comm_type: str = None, ): super().__init__(config=config) @@ -90,6 +109,7 @@ def __init__( layer_number=self.layer_number, attn_mask_type=self.attn_mask_type, attention_type=self.attention_type, + cp_comm_type=cp_comm_type, ) self.checkpoint_core_attention = self.config.recompute_granularity == 'selective' @@ -116,6 +136,7 @@ def _checkpointed_attention_forward( attention_mask, rotary_pos_emb=None, attn_mask_type=None, + attention_bias=None, packed_seq_params=None, ): """Forward method with selective activation checkpointing.""" @@ -133,6 +154,7 @@ def custom_forward(*inputs): value, attention_mask, attn_mask_type=attn_mask_type, + attention_bias=attention_bias, packed_seq_params=packed_seq_params, ) return output_ @@ -158,7 +180,16 @@ def _allocate_memory(self, inference_max_sequence_length, batch_size, dim, dtype device=torch.cuda.current_device(), ) - def _adjust_key_value_for_inference(self, inference_params, key, value, rotary_pos_emb): + def _adjust_key_value_for_inference( + self, + inference_params: InferenceParams, + query: Tensor, + key: Tensor, + value: Tensor, + rotary_pos_emb: Tensor, + rotary_pos_cos: Tensor = None, + rotary_pos_sin: Tensor = None, + ) -> Tuple[Tensor, Tensor, Tensor, Tensor, Tensor]: """ Saves the generated key and value tensors to the end of the buffers in inference_params. Returns the full size keys and values from the provided inference_params, as well as @@ -169,7 +200,7 @@ def _adjust_key_value_for_inference(self, inference_params, key, value, rotary_p """ attn_mask_type = self.attn_mask_type if inference_params is None: - return key, value, rotary_pos_emb, attn_mask_type + return query, key, value, rotary_pos_emb, attn_mask_type # ================================================= # Pre-allocate memory for key-values for inference. @@ -204,6 +235,30 @@ def _adjust_key_value_for_inference(self, inference_params, key, value, rotary_p sequence_start = inference_params.sequence_len_offset sequence_end = sequence_start + key.size(0) assert sequence_end <= inference_key_memory.size(0) + + if self.config.flash_decode: + assert ( + rotary_pos_cos is not None and rotary_pos_sin is not None + ), "Flash decoding requires precomputed cos and sin tensors" + if inference_params.sequence_len_offset > 0: # Decode phase, not prefill + rotary_pos_cos_q = rotary_pos_cos[sequence_end - 1 : sequence_end] + rotary_pos_sin_q = rotary_pos_sin[sequence_end - 1 : sequence_end] + rotary_pos_cos_k = rotary_pos_cos[sequence_end - 1 : sequence_end] + rotary_pos_sin_k = rotary_pos_sin[sequence_end - 1 : sequence_end] + else: + rotary_pos_cos_q = rotary_pos_cos[:sequence_end] + rotary_pos_sin_q = rotary_pos_sin[:sequence_end] + rotary_pos_cos_k = rotary_pos_cos[:sequence_end] + rotary_pos_sin_k = rotary_pos_sin[:sequence_end] + + # Flash Decoding assumes that the keys stored in the KV Cache already have RoPE applied. + # Apply RoPE before we store the keys to make it compatible with flash decoding kernel. + key = apply_rotary_pos_emb_with_cos_sin(key, rotary_pos_cos_k, rotary_pos_sin_k) + query = apply_rotary_pos_emb_with_cos_sin(query, rotary_pos_cos_q, rotary_pos_sin_q) + else: + rotary_pos_cos_q = None + rotary_pos_sin_q = None + # Copy key and values. inference_key_memory[sequence_start:sequence_end, batch_start:batch_end, ...] = key inference_value_memory[sequence_start:sequence_end, batch_start:batch_end, ...] = value @@ -212,14 +267,14 @@ def _adjust_key_value_for_inference(self, inference_params, key, value, rotary_p # adjust the key rotary positional embedding if rotary_pos_emb is None: - return key, value, rotary_pos_emb, attn_mask_type + return query, key, value, rotary_pos_emb, attn_mask_type q_pos_emb, k_pos_emb = rotary_pos_emb q_pos_emb = q_pos_emb[sequence_start:sequence_end, :, :, :] k_pos_emb = k_pos_emb[:sequence_end, :, :, :] rotary_pos_emb = (q_pos_emb, k_pos_emb) - return key, value, rotary_pos_emb, attn_mask_type + return query, key, value, rotary_pos_emb, attn_mask_type @abstractmethod def get_query_key_value_tensors(self, hidden_states, key_value_states): @@ -228,6 +283,52 @@ def get_query_key_value_tensors(self, hidden_states, key_value_states): is "self-attn" or "cross-attn". """ + def flash_decoding( + self, + sequence_len_offset: Tensor, + query_layer: Tensor, + key_layer: Tensor, + value_layer: Tensor, + inference_key_memory: Tensor, + inference_value_memory: Tensor, + rotary_cos: Tensor, + rotary_sin: Tensor, + ) -> (Tensor, Tensor): + """ + The flash decoding kernel will do the following in a single execution: + 1. Compute RoPE embedding with precomputed cos & sin tensors + 2. Update the KV Cache + 3. Performs the flash attention operation + """ + assert flash_attn_with_kvcache is not None, ( + "Flash Decoding requires the flash_attn_with_kvcache kernel, " + "available in the flash-attn package." + ) + cache_seqlens = sequence_len_offset - 1 + q = query_layer.permute(1, 0, 2, 3) + k = key_layer.permute(1, 0, 2, 3) + v = value_layer.permute(1, 0, 2, 3) + k_cache = inference_key_memory.permute(1, 0, 2, 3) + v_cache = inference_value_memory.permute(1, 0, 2, 3) + + if rotary_cos is not None: + rotary_cos = rotary_cos.to(query_layer.dtype) + if rotary_sin is not None: + rotary_sin = rotary_sin.to(query_layer.dtype) + + out = flash_attn_with_kvcache( + q=q, + k_cache=k_cache, + v_cache=v_cache, + k=k, + v=v, + rotary_cos=rotary_cos, + rotary_sin=rotary_sin, + cache_seqlens=cache_seqlens, + rotary_interleaved=False, + ) + return out + def forward( self, hidden_states, @@ -235,9 +336,20 @@ def forward( key_value_states=None, inference_params=None, rotary_pos_emb=None, + rotary_pos_cos=None, + rotary_pos_sin=None, + attention_bias=None, packed_seq_params=None, ): + """ + Perform a forward pass through the attention module. + """ + # hidden_states: [sq, b, h] + if self.config.flash_decode: + rotary_pos_emb = None + else: + assert rotary_pos_cos is None and rotary_pos_sin is None # For self attention we just duplicate the rotary_pos_emb if it isn't already if rotary_pos_emb is not None and not isinstance(rotary_pos_emb, tuple): @@ -253,8 +365,36 @@ def forward( # =================================================== # Adjust key, value, and rotary_pos_emb for inference # =================================================== - key, value, rotary_pos_emb, attn_mask_type = self._adjust_key_value_for_inference( - inference_params, key, value, rotary_pos_emb + + # This branch only runs in the decode phase of flash decoding and returns after the linear + # projection. This conditional is not used in the prefill phase or non-flash-decoding cases. + if ( + self.config.flash_decode + and inference_params is not None + and self.layer_number + in inference_params.key_value_memory_dict # Decode phase if key already exists + ): + assert inference_params.sequence_len_offset is not None + inference_key_memory, inference_value_memory = inference_params.key_value_memory_dict[ + self.layer_number + ] + output = self.flash_decoding( + sequence_len_offset=inference_params.sequence_len_offset, + query_layer=query, + key_layer=key, + value_layer=value, + inference_key_memory=inference_key_memory, + inference_value_memory=inference_value_memory, + rotary_cos=rotary_pos_cos, + rotary_sin=rotary_pos_sin, + ) + out = output.transpose(0, 1).contiguous() + context_layer = out.view(out.size(0), out.size(1), -1) + output, bias = self.linear_proj(context_layer) + return output, bias + + query, key, value, rotary_pos_emb, attn_mask_type = self._adjust_key_value_for_inference( + inference_params, query, key, value, rotary_pos_emb, rotary_pos_cos, rotary_pos_sin ) if packed_seq_params is not None: @@ -265,12 +405,18 @@ def forward( # ================================================ # relative positional embedding (rotary embedding) # ================================================ - if rotary_pos_emb is not None: + if rotary_pos_emb is not None and not self.config.flash_decode: q_pos_emb, k_pos_emb = rotary_pos_emb if packed_seq_params is not None: - cu_seqlens_q = packed_seq_params.cu_seqlens_q - cu_seqlens_kv = packed_seq_params.cu_seqlens_kv + if packed_seq_params.cu_seqlens_q_padded is not None: + cu_seqlens_q = packed_seq_params.cu_seqlens_q_padded + else: + cu_seqlens_q = packed_seq_params.cu_seqlens_q + if packed_seq_params.cu_seqlens_kv_padded is not None: + cu_seqlens_kv = packed_seq_params.cu_seqlens_kv_padded + else: + cu_seqlens_kv = packed_seq_params.cu_seqlens_kv else: cu_seqlens_q = cu_seqlens_kv = None query = apply_rotary_pos_emb( @@ -294,6 +440,7 @@ def forward( value, attention_mask, attn_mask_type=attn_mask_type, + attention_bias=attention_bias, packed_seq_params=packed_seq_params, ) else: @@ -303,10 +450,11 @@ def forward( value, attention_mask, attn_mask_type=attn_mask_type, + attention_bias=attention_bias, packed_seq_params=packed_seq_params, ) - if packed_seq_params is not None: + if packed_seq_params is not None and packed_seq_params.qkv_format == 'thd': # reshape to same output shape as unpacked case # (t, np, hn) -> (t, b=1, h=np*hn) # t is the pack size = sum (sq_i) @@ -335,6 +483,7 @@ def __init__( submodules: SelfAttentionSubmodules, layer_number: int, attn_mask_type=AttnMaskType.padding, + cp_comm_type: str = None, ): super().__init__( config=config, @@ -342,6 +491,7 @@ def __init__( layer_number=layer_number, attn_mask_type=attn_mask_type, attention_type="self", + cp_comm_type=cp_comm_type, ) self.linear_qkv = build_module( @@ -514,6 +664,7 @@ def __init__( submodules: CrossAttentionSubmodules, layer_number: int, attn_mask_type=AttnMaskType.padding, + cp_comm_type: str = None, ): super().__init__( config=config, @@ -521,6 +672,7 @@ def __init__( layer_number=layer_number, attn_mask_type=attn_mask_type, attention_type="cross", + cp_comm_type=cp_comm_type, ) if self.config.num_query_groups != self.config.num_attention_heads: diff --git a/megatron/core/transformer/dot_product_attention.py b/megatron/core/transformer/dot_product_attention.py index d5c014cab..cb52fca1f 100644 --- a/megatron/core/transformer/dot_product_attention.py +++ b/megatron/core/transformer/dot_product_attention.py @@ -41,6 +41,7 @@ def __init__( attention_type: str, attention_dropout: float = None, softmax_scale: float = None, + cp_comm_type: str = None, ): super().__init__(config=config) @@ -101,12 +102,15 @@ def forward( value: Tensor, attention_mask: Tensor, attn_mask_type: AttnMaskType = None, + attention_bias: Tensor = None, packed_seq_params: Optional[PackedSeqParams] = None, ): + """Forward.""" assert packed_seq_params is None, ( "Packed sequence is not supported by DotProductAttention." "Please use TEDotProductAttention instead." ) + assert attention_bias is None, "Attention bias is not supported for DotProductAttention." # =================================== # Raw attention scores. [b, n/p, s, s] diff --git a/megatron/core/transformer/mlp.py b/megatron/core/transformer/mlp.py index e82d6ecd2..cead6d466 100644 --- a/megatron/core/transformer/mlp.py +++ b/megatron/core/transformer/mlp.py @@ -1,13 +1,12 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. from dataclasses import dataclass -from typing import Optional, Tuple, Union +from typing import Optional, Union import numpy as np import torch import torch.nn.functional as F -from megatron.core import parallel_state from megatron.core.dist_checkpointing import ShardedTensor from megatron.core.dist_checkpointing.mapping import ( ReplicaId, @@ -20,7 +19,6 @@ from megatron.core.transformer.module import MegatronModule from megatron.core.transformer.spec_utils import ModuleSpec, build_module from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.utils import make_sharded_tensors_for_checkpoint @dataclass @@ -59,7 +57,8 @@ def __init__( self.input_size = input_size if input_size != None else self.config.hidden_size - # If this is a gated linear unit we double the output width, see https://arxiv.org/pdf/2002.05202.pdf + # If this is a gated linear unit we double the output width + # see https://arxiv.org/pdf/2002.05202.pdf ffn_hidden_size = self.config.ffn_hidden_size if self.config.gated_linear_unit: ffn_hidden_size *= 2 @@ -93,7 +92,7 @@ def __init__( ) def forward(self, hidden_states): - + """Perform the forward pass through the MLP block.""" # [s, b, 4 * h/p] intermediate_parallel, bias_parallel = self.linear_fc1(hidden_states) @@ -149,19 +148,26 @@ def apply_swiglu_sharded_factory(original_sh_ten, sharded_offsets): # We must split the tensor into 2 parts, each sharded separately. # This requires a ShardedTensorFactory which `chunk`s during saving # and `cat`s during loading - tp_rank = parallel_state.get_tensor_model_parallel_rank() - tp_size = parallel_state.get_tensor_model_parallel_world_size() + swiglu_shard_axis = 0 prepend_axis_num = len(sharded_offsets) original_shape = original_sh_ten.local_shape original_numel = int(np.prod(original_shape)) + local_axis_size = original_shape[swiglu_shard_axis] + assert ( + original_sh_ten.global_offset[swiglu_shard_axis + prepend_axis_num] % local_axis_size == 0 + ) + rank_offset = ( + original_sh_ten.global_offset[swiglu_shard_axis + prepend_axis_num] // local_axis_size + ) + axis_frag = original_sh_ten.axis_fragmentations[swiglu_shard_axis + prepend_axis_num] @torch.no_grad() def sh_ten_build_fn( key: str, t: torch.Tensor, replica_id: ReplicaId, flattened_range: Optional[slice] ): - offset_w = (swiglu_shard_axis + prepend_axis_num, tp_rank, tp_size * 2) - offset_v = (swiglu_shard_axis + prepend_axis_num, tp_size + tp_rank, tp_size * 2) + offset_w = (swiglu_shard_axis + prepend_axis_num, rank_offset, axis_frag * 2) + offset_v = (swiglu_shard_axis + prepend_axis_num, rank_offset + axis_frag, axis_frag * 2) if flattened_range is None: tensor_w, tensor_v = torch.chunk(t, 2, dim=swiglu_shard_axis) return [ diff --git a/megatron/core/transformer/moe/README.md b/megatron/core/transformer/moe/README.md index a7ee75bcb..e08f94f2c 100644 --- a/megatron/core/transformer/moe/README.md +++ b/megatron/core/transformer/moe/README.md @@ -1,6 +1,6 @@ # Megatron Core MoE Key Features -Megatron-Core offers rich parallelism mappings, combining Expert Parallelism with tensor, data, sequence, and pipeline parallelism. This boosts Mixtral 8X7B bf16 training to achieve **438 TFLOPS** as of MCore v0.8. +Megatron-Core offers rich parallelism mappings, combining Expert Parallelism with tensor, data, sequence, and pipeline parallelism. This boosts Mixtral 8X7B bf16 training to achieve **468 TFLOPS** as of MCore v0.9. ### Parallelism @@ -25,6 +25,7 @@ Megatron-Core offers rich parallelism mappings, combining Expert Parallelism wit - Supported dtype: bf16 - Performance improvements for larger MoE models - Enable `--tp-comm-overlap` for MoE +- FP8 training support ### Token Dispatch Mechanism - Dropless / No token drop @@ -34,11 +35,15 @@ Megatron-Core offers rich parallelism mappings, combining Expert Parallelism wit - Checkpoint converter for Mixtral models, see the [example](https://github.com/NVIDIA/Megatron-LM/tree/main/examples/mixtral) for details. - Distributed checkpoining - Per-layer logging +- Upcycling Support +- Granular upcycling ## Upcoming features -- Token permutation / unpermutation fusion -- Fused Sinkhorn Kernel -- FP8 training support +- New Parallelism for Large-scale MoE training +- FP8 support for GroupedGEMM +- Token permutation / Unpermutation fusion +- TopK Router Fusion +- MoE Layer Frequency # User Guide @@ -48,6 +53,7 @@ Megatron-Core offers rich parallelism mappings, combining Expert Parallelism wit | --- | --- | | --num-experts | Number of Experts in MoE (None means no MoE) | | --expert-model-parallel-size | Degree of expert model parallelism. Default is 1. | +| --expert-tensor-parallel-size | Degree of tensor model parallelism of expert layer. Default is same to --tensor-model-parallel-size. | | --moe-grouped-gemm | When there are multiple experts per rank, launch multiple local GEMM kernels in multiple streams to improve the utilization and performance with GroupedLinear in TransformerEngine. | | --moe-router-load-balancing-type | Determines the load balancing strategy for the router. "aux_loss" corresponds to the load balancing loss used in GShard and SwitchTransformer, "sinkhorn" corresponds to the balancing algorithm used in S-BASE, and "none" implies no load balancing. The default is "aux_loss". | | --moe-router-topk | Number of experts to route to for each token. The default is 2. | @@ -60,7 +66,6 @@ Megatron-Core offers rich parallelism mappings, combining Expert Parallelism wit | --moe-pad-expert-input-to-capacity | Pads the input for each expert to match the expert capacity length, effective only after the --moe-expert-capacity-factor is set. | | --moe-token-drop-policy | The policy to drop tokens. Can be either "probs" or "position". If "probs", the tokens with the lowest probabilities will be dropped. If "position", tokens at the end of each batch will be dropped. | | --moe-layer-recompute | Enable activation checkpointing for moe_layer, should be used when memory is not sufficient. | -| --moe-extended-tp | (Experimental) Alternative parallelization strategy for expert parallelism. Instead of distributing experts across *expert_model_parallel_size*, each expert is sharded along extendended tensor parallel domain (tensor_model_paralle_size * expert_model_parallel_size). It avoids the load balancing problem with MOE training. Only available with `--moe-token-dispatcher-type allgather`. | | --moe-shared-expert-intermediate-size | Set shared expert total ffn hidden size. It should be equal to `num_shared_experts * ffn_size_of_each_shared_expert` if there are multiple shared experts. None means no shared expert. | | --moe-shared-expert-overlap | (Experimental, may changed) If this is set, the communications/computations in the shared experts and the dispatcher will overlap (The `alltoall` dispatcher is needed.) Otherwise, the shared expert runs after the routed experts. | | --moe-use-upcycling | Load the dense model checkpoint, convert it into an MoE model at runtime and start training. The converted model will be saved to the path specified by `--save` before training begins. Upcycling is implemented on the top of distributed checkpointing, so it supports parallel modes different from the dense model.| @@ -159,9 +164,11 @@ The `MLP` computation part in the shared experts are overlapped with the `AlltoA Both the forward and the backward pass can overlap. But to get the overlapping in the backward pass, the PyTorch version should `>= 2.2.0`. ### Upcycling -Use `--moe-use-upcycling` to enable the upcycling feature, which will load the dense model from the directory specified by `--load`, convert it into an MoE model at runtime and start training. The converted model will be saved to the path specified by `--save` before training begins. Upcycling is implemented on the top of distributed checkpointing, so it supports parallel modes different from the dense model. +Use `--moe-use-upcycling` to enable upcycling, which loads the dense model from the `--load` directory, converts it to an MoE model at runtime, and starts training. The converted model is saved to the `--save` path before training begins. Upcycling is built on distributed checkpointing, supporting parallel modes different from existing dense checkpoints, such as arbitrary expert parallelism during upcycling. + +We currently only support the default upcycling strategy, which duplicates the existing MLP to multiple experts, with each expert starting from a copy of the MLP. In the future, we will support more state-of-the-art upcycling strategies, such as Granular upcycling from [our recent research work](https://arxiv.org/abs/2410.07524). -The MoE model structure is defined through script arguments. All MoE-related arguments (such as `--num-experts`) can be customized; however, other model structure arguments must be consistent with those of the dense model. +Note: The MoE model structure is defined through script arguments. All MoE-related arguments (such as `--num-experts`) can be customized; however, other model structure arguments must be consistent with those of the dense model. ## MoE training example:
@@ -321,6 +328,21 @@ Here we provide some general rules to get better performance: - The efficiency of CP largely depends on whether its communication can be overlapped with computation. - Emperically, use CP when sequence length >= 8K. +### MoE Parallel Folding + +MoE Parallel Folding separates the MoE related parallel groups from Dense groups. +1. Traditional MoE parallel groups are entangled with dense by using a 5-dimension parallel group generator with default order `tp-cp-ep-dp-pp`. The EP group in MoE is a sub-group of DP in Attention. +2. With MoE Parallel Fodling, we use a parallel group generator with `tp-cp-dp-pp` for Attention, and another with `tp-ep-dp-pp` for MoE. The EPxTP group in MoE is a sub-group of DPxCPxTP in Attention. + +By setting `--expert-tensor-parallel-size`, we can set MoE-specific TP size. + +#### Advantages of MoE Parallel Folding +1. The CP and EP group are folded together by defualt, such that: + 1. It reduces the minimal required GPUs to turn on both CP and EP. For example, the traditional way with (CP=8, EP=8) needs at least 64 GPUs, for now it only requires 8 GPUs. + 2. The CP and EP communication can be both put in the NVLink domain. +2. We can set different TP sizes for Attention and MoE part. + 1. For MoE, EP is often more efficient than TP. But in the traditional way, only using EP can get OOM for most models. + 2. With MoE parallel folding, we can turn on TP for Attention part and setting TP=1 for MoE models, which often gets better MFU. ### End-to-End Training Practice **Use the latest NVIDIA PyTorch or NeMo Docker Image** @@ -345,7 +367,7 @@ Here we provide some general rules to get better performance: **OOM Caused by Token Distribution Imbalance when Training From Scratch** MoE suffers from a severe load imbalance issue when the router is under-trained, leading to the model easily running out of memory (OOM), which typically occurs in the first 100~300 steps when training from scratch. Therefore, there are two recommended ways during the first 200 steps to avoid the OOM problem, which can be removed after the token distribution is more stable: -1. Use Extended-TP(`-moe-extended-tp`) to replace EP with TP in MoELayer, this can prevent the load imbalancing between EP ranks. Since current ETP implementation has some memeory overhead, you can further enable activation recomputation only for MoE Layer by adding `--moe-layer-recompute`. +1. Increase the `expert-tensor-parallel-size` and decrease `expert-model-parallel-size` to replace EP with TP in MoELayer, this can prevent the load imbalancing between EP ranks. Since current ETP implementation has some memeory overhead, you can further enable activation recomputation only for MoE Layer by adding `--moe-layer-recompute`. 2. Setting capacity factor to a relatively small number like 1.0 by adding `--moe-token-capacity-factor 1.0`. ### Reference Best Parallel Mapping @@ -363,4 +385,4 @@ Server: - InfiniBand 8x400 Gbit/s Docker Image: -- PyTorch 24.04 with TransformerEngine v1.9 \ No newline at end of file +- PyTorch 24.09 with TransformerEngine v1.11 \ No newline at end of file diff --git a/megatron/core/transformer/moe/experts.py b/megatron/core/transformer/moe/experts.py index 1bb5da588..8389547de 100644 --- a/megatron/core/transformer/moe/experts.py +++ b/megatron/core/transformer/moe/experts.py @@ -2,7 +2,7 @@ import itertools from copy import deepcopy -from functools import partial +from functools import partial, wraps from math import ceil from typing import Optional, Tuple @@ -35,6 +35,54 @@ from megatron.core.transformer.transformer_config import TransformerConfig from megatron.core.transformer.utils import make_sharded_object_for_checkpoint +try: + + from megatron.core.extensions.transformer_engine import Fp8Padding, Fp8Unpadding + + HAVE_TE = True + +except ImportError: + + HAVE_TE = False + + +def expert_dist_ckpt_decorator(func): + """Decorator of shared_state_dict in expert layer for distributed checkpoint. + + Since !1940, the TP size for Expert layer can be different with Attention. + To make distributed checkpoint work in such cases, we use a decorator to + replace the default TP parallel states with expert-TP parallel states. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + # Store original states + original_rank = parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK + original_size = parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE + original_group = parallel_state._TENSOR_MODEL_PARALLEL_GROUP + try: + # Set new states + parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK = ( + parallel_state.get_expert_tensor_parallel_rank() + ) + parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE = ( + parallel_state.get_expert_tensor_parallel_world_size() + ) + parallel_state._TENSOR_MODEL_PARALLEL_GROUP = ( + parallel_state.get_expert_tensor_parallel_group() + ) + + # Execute the function + result = func(*args, **kwargs) + finally: + # Restore original states + parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK = original_rank + parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE = original_size + parallel_state._TENSOR_MODEL_PARALLEL_GROUP = original_group + return result + + return wrapper + class GroupedMLP(MegatronModule): """An efficient implementation of the Experts layer using GroupedGEMM. @@ -66,11 +114,8 @@ def glu(x): self.activation_func = self.config.activation_func # How many feature each rank holds for fc1 and fc2, respectively. - self.moe_extended_tp = config.moe_extended_tp - if config.moe_extended_tp: - tp_size = parallel_state.get_tensor_and_expert_parallel_world_size() - else: - tp_size = parallel_state.get_tensor_model_parallel_world_size() + tp_size = parallel_state.get_expert_tensor_parallel_world_size() + tp_rank = parallel_state.get_expert_tensor_parallel_rank() fc1_output_size = self.config.ffn_hidden_size * self.num_local_experts if config.gated_linear_unit: @@ -109,6 +154,8 @@ def glu(x): partition_dim=1, init_method=config.init_method, params_dtype=config.params_dtype, + rank=tp_rank, + world_size=tp_size, ) _initialize_affine_weight_cpu( self.weight2, @@ -118,6 +165,8 @@ def glu(x): partition_dim=0, init_method=config.output_layer_init_method, params_dtype=config.params_dtype, + rank=tp_rank, + world_size=tp_size, ) else: self.weight1 = Parameter( @@ -138,16 +187,10 @@ def glu(x): ) if config.perform_initialization: _initialize_affine_weight_gpu( - self.weight1, - config.init_method, - partition_dim=1, - expert_parallel=self.expert_parallel, + self.weight1, config.init_method, partition_dim=1, is_expert=True ) _initialize_affine_weight_gpu( - self.weight2, - config.output_layer_init_method, - partition_dim=0, - expert_parallel=self.expert_parallel, + self.weight2, config.output_layer_init_method, partition_dim=0, is_expert=True ) setattr(self.weight1, 'allreduce', not self.expert_parallel) setattr(self.weight2, 'allreduce', not self.expert_parallel) @@ -193,6 +236,7 @@ def forward(self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: return fc2_output, None + @expert_dist_ckpt_decorator def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): """ Maps local expert to global experts. @@ -200,11 +244,6 @@ def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): whereas the optimizer states are not due to the limitation from weight transposing. That is, for finetuning scenario, the checkpoint is compatible with the SequentialMLP. """ - if self.moe_extended_tp: - raise NotImplementedError( - 'Currently distributed checkpointing is not supported for moe_extended_tp' - ) - sharded_state_dict = {} num_global_experts = ( parallel_state.get_expert_model_parallel_world_size() * self.num_local_experts @@ -216,11 +255,7 @@ def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): tp_rank = parallel_state.get_tensor_model_parallel_rank() prepend_axis_num = len(sharded_offsets) - replica_id = ( - 0, - 0, - parallel_state.get_data_modulo_expert_parallel_rank(with_context_parallel=True), - ) + replica_id = (0, 0, parallel_state.get_expert_data_parallel_rank()) local_ffn_dim_size = ( self.weight2.numel() // self.num_local_experts // self.config.hidden_size @@ -532,7 +567,7 @@ def sh_ten_merge_fn(sub_state_dict, tp_axis: int, with_glu: bool): replica_id = ( 0, parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_data_modulo_expert_parallel_rank(with_context_parallel=True), + parallel_state.get_expert_data_parallel_rank(), ) # Add fake _extra_state to be compatible with SequentialMLP for expert_local_idx in range(self.num_local_experts): @@ -562,7 +597,6 @@ class TEGroupedMLP(MegatronModule): def __init__(self, num_local_experts, config: TransformerConfig, submodules: MLPSubmodules): super().__init__(config=config) - self.moe_extended_tp = config.moe_extended_tp self.num_local_experts = num_local_experts self.input_size = self.config.hidden_size @@ -599,17 +633,10 @@ def __init__(self, num_local_experts, config: TransformerConfig, submodules: MLP tp_comm_buffer_name='fc2', ) - def remove_extra_states_check(self, incompatible_keys): - """ - Remove extra _extra_state from unexpected keys. - These keys are for dist ckpt compatibility with SequentialMLP. - """ - keys = deepcopy(incompatible_keys.unexpected_keys) - for key in keys: - if '_extra_state' in key: - incompatible_keys.unexpected_keys.remove(key) - - self.register_load_state_dict_post_hook(remove_extra_states_check) + if self.config.fp8: + assert HAVE_TE, "FP8 requires TE." + self.fp8_padding = Fp8Padding(self.num_local_experts) + self.fp8_unpadding = Fp8Unpadding(self.num_local_experts) def forward( self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: torch.Tensor @@ -625,6 +652,12 @@ def forward( output (torch.Tensor): The output of the local experts. """ tokens_per_expert = tokens_per_expert.tolist() + if self.config.fp8: + actual_tokens_per_expert = tokens_per_expert + permuted_local_hidden_states, tokens_per_expert = self.fp8_padding( + permuted_local_hidden_states, tokens_per_expert + ) + intermediate_parallel, bias_parallel = self.linear_fc1( permuted_local_hidden_states, tokens_per_expert ) @@ -646,7 +679,18 @@ def forward( raise ValueError("Only support fusion of gelu and swiglu") else: if bias_parallel is not None: - intermediate_parallel = intermediate_parallel + bias_parallel + shape = intermediate_parallel.shape + intermediate_parallel = torch.cat( + [ + t + b + for t, b in zip( + torch.split( + intermediate_parallel.view(-1, shape[-1]), tokens_per_expert + ), + bias_parallel, + ) + ] + ).view(shape) if self.config.gated_linear_unit: def glu(x): @@ -659,8 +703,13 @@ def glu(x): output, output_bias = self.linear_fc2(intermediate_parallel, tokens_per_expert) + # upad and concat the output + if self.config.fp8: + output = self.fp8_unpadding(output, actual_tokens_per_expert) + return output, output_bias + @expert_dist_ckpt_decorator def sharded_state_dict( self, prefix: str = '', sharded_offsets: tuple = (), metadata: Optional[dict] = None ) -> ShardedStateDict: @@ -668,10 +717,6 @@ def sharded_state_dict( Maps local expert to global experts. The sharded state dict is interchangable with SequentialMLP's. """ - if self.moe_extended_tp: - raise NotImplementedError( - 'Currently distributed checkpointing is not supported for moe_extended_tp' - ) sharded_state_dict = {} for name, module in self._modules.items(): sub_sd = module.sharded_state_dict(f'{name}.', sharded_offsets, metadata) @@ -706,7 +751,6 @@ class SequentialMLP(MegatronModule): def __init__(self, num_local_experts, config: TransformerConfig, submodules: MLPSubmodules): super().__init__(config=config) self.add_bias = config.add_bias_linear - self.moe_extended_tp = config.moe_extended_tp self.num_local_experts = num_local_experts self.local_experts = torch.nn.ModuleList() for _ in range(self.num_local_experts): @@ -762,13 +806,9 @@ def forward(self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: return output_local, output_bias_local + @expert_dist_ckpt_decorator def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): """Maps local expert to global experts.""" - if self.moe_extended_tp: - raise NotImplementedError( - 'Currently distributed checkpointing is not supported for moe_extended_tp' - ) - sharded_state_dict = {} num_global_experts = ( parallel_state.get_expert_model_parallel_world_size() * self.num_local_experts @@ -801,7 +841,7 @@ def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): ), f'Expected replica_id for {k} to be in (PP, TP, DP) format, got: {replica_id}' sh_ten.replica_id = ( *replica_id[:2], - parallel_state.get_data_modulo_expert_parallel_rank(with_context_parallel=True), + parallel_state.get_expert_data_parallel_rank(), ) sharded_state_dict.update(expert_state_dict) diff --git a/megatron/core/transformer/moe/legacy_a2a_token_dispatcher.py b/megatron/core/transformer/moe/legacy_a2a_token_dispatcher.py index 872c36aaa..dd5f447dd 100644 --- a/megatron/core/transformer/moe/legacy_a2a_token_dispatcher.py +++ b/megatron/core/transformer/moe/legacy_a2a_token_dispatcher.py @@ -3,17 +3,25 @@ from typing import List, Optional, Tuple import torch +import torch.distributed from megatron.core import parallel_state, tensor_parallel -from megatron.core.tensor_parallel.mappings import _gather_along_first_dim_expert_parallel -from megatron.core.transformer.moe.moe_utils import permute, unpermute +from megatron.core.transformer.moe.moe_utils import ( + get_capacity, + permute, + sort_chunks_by_idxs, + unpermute, +) from megatron.core.transformer.moe.token_dispatcher import MoETokenDispatcher from megatron.core.transformer.transformer_config import TransformerConfig class MoEAlltoAllSEQTokenDispatcher(MoETokenDispatcher): """ - The legacy implementation of the AlltoAll-based token dispatcher, which handles token dispatching on the sequence level instead of token level. The core of this implementation lies each device dispatching on the entire sequence, with the hidden state being partitioned. + The legacy implementation of the AlltoAll-based token dispatcher, which handles token + dispatching on the sequence level instead of token level. The core of this implementation + lies in each device dispatching on the entire sequence, with the hidden state being partitioned. + Note: This class is a replica of the MoEAlltoAllTokenDispatcher from version 0.8. """ @@ -34,12 +42,6 @@ def __init__( self.num_local_experts = num_local_experts self.num_experts = config.num_moe_experts assert self.num_local_experts > 0, "Expected at least one expert" - if self.num_local_experts > 1: - self.expert_ids_per_ep_rank = torch.tensor( - [i % self.num_local_experts for i in range(self.num_experts)], - dtype=torch.int32, - device=torch.cuda.current_device(), - ) self.local_expert_indices = local_expert_indices assert ( len(self.local_expert_indices) == self.num_local_experts @@ -48,13 +50,23 @@ def __init__( assert ( self.local_expert_indices[i] == self.local_expert_indices[i + 1] - 1 ), "local_expert_indices must be continous" - self.router_topk = config.moe_router_topk - self.add_bias = config.add_bias_linear self.ep_size = config.expert_model_parallel_size + self.tp_size = config.tensor_model_parallel_size self.probs = None self.input_splits = None self.output_splits = None - self.num_global_tokens_per_local_expert = None + # [tp_size * ep_size, num_local_experts]. Represents the number of tokens sent + # to each local expert by all ranks. + self.num_global_tokens_per_local_expert_cpu = None + input_chunk_idxs = torch.arange(self.num_experts) + # [num_local_experts, ep_size]. Sort the input chunks by local experts. + self.sort_input_by_local_experts = ( + input_chunk_idxs.reshape(-1, self.num_local_experts).T.ravel().tolist() + ) + # [ep_size, num_local_experts]. Restore the output chunks by local experts. + self.restore_output_by_local_experts = ( + input_chunk_idxs.reshape(self.num_local_experts, -1).T.ravel().tolist() + ) # Token drop and padding. # We need to keep track of the token num if we drop tokens without padding them. @@ -65,36 +77,48 @@ def __init__( assert self.config.moe_expert_capacity_factor is not None self.capacity = None - # A cuda stream synchronization is needed in self.token_permutation() in some cases, - # because there are several non-blocking DtoH data transfers called in self.preprocess(). - # The synchronization happens at different points based on MoE settings as late as possible. - # Valid sync points are "before_permutation_1", "before_ep_alltoall", "before_finish", and "no_sync". + # A cuda stream synchronization is needed in self.token_permutation() + # in some cases, because there are several non-blocking DtoH data + # transfers called in self.preprocess(). The synchronization happens + # at different points based on MoE settings as late as possible. + # Valid sync points are "before_permutation_1", "before_ep_alltoall", + # "before_finish", and "no_sync". self.cuda_sync_point = "no_sync" - def preprocess(self, indices: torch.Tensor) -> torch.Tensor: + def preprocess(self, routing_map: torch.Tensor) -> torch.Tensor: """ - Preprocess token indices for AlltoAll communication and token permutation. This method computes the number of tokens assigned to each expert based on the input indices. - It also initializes the necessary data structures for AlltoAll communication, such as input - and output splits, and the mapping between global tokens and local experts. + Preprocess routing map for AlltoAll communication and token permutation. + This method computes the number of tokens assigned to each expert based on + the routing map. It also initializes the necessary data structures for + AlltoAll communication, such as input and output splits, and the mapping + between global tokens and local experts. Args: - indices (torch.Tensor): Tensor of indices mapping tokens to experts. + routing_map (torch.Tensor): The mapping of tokens to experts, with shape + [num_tokens, num_experts]. Returns: torch.Tensor: Tensor containing the number of tokens assigned to local expert. """ - num_local_tokens_per_expert = torch.histc( - indices, bins=self.num_experts, min=0, max=self.num_experts - ) + num_local_tokens_per_expert = routing_map.sum(dim=0).long() # num_local_tokens_per_expert: [num_experts] ep_size = self.config.expert_model_parallel_size if self.drop_and_pad: - # probs: [num_experts, capacity] - self.capacity = self.probs.size(1) + # Drop and pad the input to capacity. + num_tokens = routing_map.size(0) * self.config.moe_router_topk + self.capacity = get_capacity( + num_tokens=num_tokens, + num_experts=self.num_experts, + capacity_factor=self.config.moe_expert_capacity_factor, + ) + self.num_out_tokens = self.capacity * self.num_experts num_tokens_per_local_expert = torch.full( (self.num_local_experts,), self.capacity * self.ep_size, dtype=torch.long ) + self.num_global_tokens_per_local_expert_cpu = torch.full( + (self.num_experts * self.tp_size,), self.capacity, dtype=torch.long + ) return num_tokens_per_local_expert elif self.config.moe_expert_capacity_factor is not None: # Token drop but no pad. A synchronization is needed before the first @@ -103,14 +127,17 @@ def preprocess(self, indices: torch.Tensor) -> torch.Tensor: torch.device("cpu"), non_blocking=True ) self.cuda_sync_point = "before_permutation_1" - elif ep_size > 1: - # Token dropless and enable ep. A synchronization is needed before expert parallel - # AlltoAll communication to get the `input_splits` and `output_splits` CPU values. - self.cuda_sync_point = "before_ep_alltoall" else: - # Token dropless and no ep. A synchronization is needed before the token_permutation() - # function returns to get the `tokens_per_expert` CPU value. - self.cuda_sync_point = "before_finish" + # Dropless + self.num_out_tokens = routing_map.size(0) * self.config.moe_router_topk + if self.ep_size > 1 or self.num_local_experts > 1: + # Token dropless and enable ep. A synchronization is needed before expert parallel + # AlltoAll communication to get the `input_splits` and `output_splits` CPU values. + self.cuda_sync_point = "before_ep_alltoall" + else: + # Token dropless and no ep. A synchronization is needed to get the + # `tokens_per_expert` CPU value. + self.cuda_sync_point = "before_finish" if ep_size > 1: # =================================================== @@ -122,8 +149,8 @@ def preprocess(self, indices: torch.Tensor) -> torch.Tensor: .to(torch.device("cpu"), non_blocking=True) .numpy() ) - num_global_tokens_per_expert = _gather_along_first_dim_expert_parallel( - num_local_tokens_per_expert + num_global_tokens_per_expert = tensor_parallel.gather_from_sequence_parallel_region( + num_local_tokens_per_expert, group=self.ep_group ).reshape(ep_size, self.num_experts) self.num_global_tokens_per_local_expert = num_global_tokens_per_expert[ :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 @@ -150,17 +177,16 @@ def preprocess(self, indices: torch.Tensor) -> torch.Tensor: ) if self.num_local_experts > 1: - # No further synchronization is needed because torch.repeat_interleave() calls stream - # synchronization internally when the `output_size` parameter is not provided. - self.cuda_sync_point = "no_sync" - self.global_input_tokens_local_experts_indices = torch.repeat_interleave( - self.expert_ids_per_ep_rank, self.num_global_tokens_per_local_expert.ravel() + self.num_global_tokens_per_local_expert_cpu = ( + self.num_global_tokens_per_local_expert.view(-1, self.num_local_experts).to( + torch.device("cpu"), non_blocking=True + ) ) return num_tokens_per_local_expert def token_permutation( - self, hidden_states: torch.Tensor, probs: torch.Tensor, indices: torch.Tensor + self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor]: """ Dispatch tokens to local experts using AlltoAll communication. @@ -168,7 +194,9 @@ def token_permutation( Args: hidden_states (torch.Tensor): Input token embeddings. probs (torch.Tensor): Probs of tokens assigned to experts. - indices (torch.Tensor): Indices of tokens assigned to experts. + Shape: [num_tokens, num_experts]. + routing_map (torch.Tensor): Mapping of tokens assigned to experts. + Shape: [num_tokens, num_experts]. Returns: Tuple[torch.Tensor, torch.Tensor]: @@ -178,10 +206,11 @@ def token_permutation( # Preprocess: Get the metadata for communication, permutation and computation operations. self.hidden_shape = hidden_states.shape self.probs = probs + self.routing_map = routing_map assert probs.dim() == 2, "Expected 2D tensor for probs" - assert indices.dim() == 2, "Expected 2D tensor for indices" + assert routing_map.dim() == 2, "Expected 2D tensor for routing map" hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) - tokens_per_expert = self.preprocess(indices) + tokens_per_expert = self.preprocess(routing_map) # Perform tensor parallel AlltoAll communication # hidden_states: [S*B/TP, H] -> [S*B, H/TP] @@ -193,10 +222,7 @@ def token_permutation( if self.cuda_sync_point == "before_permutation_1": torch.cuda.current_stream().synchronize() permutated_local_input_tokens, self.reversed_local_input_permutation_mapping = permute( - hidden_states, - indices, - num_out_tokens=self.num_out_tokens, - padded_mode=self.drop_and_pad, + hidden_states, routing_map, num_out_tokens=self.num_out_tokens ) # Perform expert parallel AlltoAll communication @@ -209,21 +235,13 @@ def token_permutation( self.input_splits, ) - # Permutation 2: Sort alltoall output by local experts when num_local_experts > 1. + # Permutation 2: Sort tokens by local expert. if self.num_local_experts > 1: - if not self.drop_and_pad: - global_input_tokens, self.reversed_global_input_permutation_mapping = permute( - global_input_tokens, self.global_input_tokens_local_experts_indices - ) - else: - global_input_tokens = global_input_tokens.reshape( - self.ep_size, self.num_local_experts, self.capacity, -1 - ) - global_input_tokens = ( - global_input_tokens.transpose(0, 1) - .reshape(self.num_local_experts * self.ep_size * self.capacity, -1) - .contiguous() - ) + global_input_tokens = sort_chunks_by_idxs( + global_input_tokens, + self.num_global_tokens_per_local_expert_cpu.ravel(), + self.sort_input_by_local_experts, + ) # Perform tensor parallel AllGather on the hidden dimension to obtain the input tokens. # global_input_tokens: [SEQL, H/TP] -> [SEQL, H] @@ -260,21 +278,13 @@ def token_unpermutation( hidden_states ) - # Unpermutation 2: expert output to AlltoAll input + # Unpermutation 2: Unsort tokens by local expert. if self.num_local_experts > 1: - if not self.drop_and_pad: - hidden_states = unpermute( - hidden_states, self.reversed_global_input_permutation_mapping - ) - else: - hidden_states = hidden_states.reshape( - self.num_local_experts, self.ep_size, self.capacity, -1 - ) - hidden_states = ( - hidden_states.transpose(0, 1) - .reshape(self.ep_size * self.num_local_experts * self.capacity, -1) - .contiguous() - ) + hidden_states = sort_chunks_by_idxs( + hidden_states, + self.num_global_tokens_per_local_expert_cpu.T.ravel(), + self.restore_output_by_local_experts, + ) # Perform expert parallel AlltoAll communication # hidden_states: [SEQL, H] -> [SEQL, H/TP] @@ -290,8 +300,8 @@ def token_unpermutation( permutated_local_input_tokens, self.reversed_local_input_permutation_mapping, probs=self.probs, - padded_mode=self.drop_and_pad, restore_shape=self.hidden_shape_before_permute, + routing_map=self.routing_map, ) # Perform tensor parallel AlltoAll communication diff --git a/megatron/core/transformer/moe/moe_layer.py b/megatron/core/transformer/moe/moe_layer.py index 8b393abc7..faefce4cf 100644 --- a/megatron/core/transformer/moe/moe_layer.py +++ b/megatron/core/transformer/moe/moe_layer.py @@ -42,15 +42,11 @@ def __init__(self, config: TransformerConfig, layer_number: int = None): self.expert_parallel_size = parallel_state.get_expert_model_parallel_world_size() assert self.expert_parallel_size > 0, "Expected non-negative expert parallel size" - if self.config.moe_extended_tp: - self.num_local_experts = self.config.num_moe_experts - local_expert_indices_offset = 0 - else: - assert self.config.num_moe_experts % self.expert_parallel_size == 0 - self.num_local_experts = self.config.num_moe_experts // self.expert_parallel_size - local_expert_indices_offset = ( - parallel_state.get_expert_model_parallel_rank() * self.num_local_experts - ) + assert self.config.num_moe_experts % self.expert_parallel_size == 0 + self.num_local_experts = self.config.num_moe_experts // self.expert_parallel_size + local_expert_indices_offset = ( + parallel_state.get_expert_model_parallel_rank() * self.num_local_experts + ) self.use_shared_expert = self.config.moe_shared_expert_intermediate_size is not None self.shared_expert_overlap = self.config.moe_shared_expert_overlap @@ -144,9 +140,9 @@ def forward(self, hidden_states: torch.Tensor): # process MoE def custom_forward(hidden_states): - probs, indices = self.router(hidden_states) + probs, routing_map = self.router(hidden_states) (dispatched_input, tokens_per_expert) = self.token_dispatcher.token_permutation( - hidden_states, probs, indices + hidden_states, probs, routing_map ) expert_output, mlp_bias = self.experts(dispatched_input, tokens_per_expert) output, mlp_bias = self.token_dispatcher.token_unpermutation(expert_output, mlp_bias) diff --git a/megatron/core/transformer/moe/moe_utils.py b/megatron/core/transformer/moe/moe_utils.py index 02a2cccca..0c1504d41 100644 --- a/megatron/core/transformer/moe/moe_utils.py +++ b/megatron/core/transformer/moe/moe_utils.py @@ -1,6 +1,7 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. import math +from typing import Optional import torch @@ -152,165 +153,72 @@ def set_loss_scale(scale: torch.Tensor): MoEAuxLossAutoScaler.main_loss_backward_scale = scale -def permute(tokens, indices, num_out_tokens: int = None, padded_mode: bool = False): - """Permute the tokens based on the indices. Token with the same index will be grouped together. - The input indices shape is [tokens, top_k], it indicates which experts were selected by each - token separately. - Args: - tokens (torch.Tensor): The input token tensor. - indices (torch.Tensor): The token to expert indices tensor, should have a shape of - [num_tokens] or [num_tokens, topk]. - num_out_tokens (int, optional): The effective output token count, when enabling the - capacity factor, should equal the number of tokens not - dropped. By default, set to None, meaning no tokens are - dropped. - padded_mode (bool, optional): If True, indicating the indices are padded to - [num_expert, capacity] to denote selected tokens per expert. - Defaults to False. +def permute(tokens, routing_map, num_out_tokens: int = None): + """Permute the tokens and probs based on the mask. + Tokens with the same designated expert will be grouped together. + The shape of mask is [tokens, num_experts], it indicates which experts were selected + by each token. - Returns: - torch.Tensor: The permuted tensor. - torch.Tensor: The sorted_indices corresponding permuted tensor. + Args: + tokens (torch.Tensor): The input token tensor, [num_tokens, hidden]. + routing_map (torch.Tensor): The sparse token to expert mapping, [num_tokens, num_experts]. + num_out_tokens (int, optional): The number of output tokens. If None, it's set to + the number of input tokens. """ - if padded_mode: - return permute_with_padded_tokens(tokens, indices) + num_tokens, hidden = tokens.shape + num_experts = routing_map.shape[1] - if indices.dim() == 1: - indices = indices.unsqueeze(1) + # mask [num_tokens, num_experts] -> [num_experts, num_tokens] + routing_map = routing_map.bool().T.contiguous() - topk = indices.size(1) - flatten_indices = indices.view(-1) - sorted_indices = torch.argsort(flatten_indices, stable=True) - if num_out_tokens is not None: - sorted_indices = sorted_indices[:num_out_tokens] - moe_gather_indices = (sorted_indices // topk).unsqueeze(1).expand(-1, tokens.size(-1)) - permuted_tokens = moe_gather.apply(tokens, moe_gather_indices) + # Create a dense expert-to-token mapping from the sparse token-to-expert mapping + token_indices = ( + torch.arange(num_tokens, device=routing_map.device).unsqueeze(0).expand(num_experts, -1) + ) + sorted_indices = token_indices.masked_select(routing_map) - return permuted_tokens, sorted_indices + # use the mapping to permute the tokens + permuted_input = tokens.index_select(0, sorted_indices) + + return permuted_input, sorted_indices def unpermute( permuted_tokens: torch.Tensor, sorted_indices: torch.Tensor, + restore_shape: torch.Size, probs: torch.Tensor = None, - padded_mode: bool = False, - restore_shape: torch.Size = None, + routing_map: torch.Tensor = None, ): - """Unpermute a tensor of permuted tokens based on sorted indices, and optionally merge the - tokens with their corresponding probabilities. - - Args: - permuted_tokens (torch.Tensor): 2D tensor [num_tokens*topk, hidden]. The tensor of permuted - tokens to be unpermuted. - sorted_indices (torch.Tensor): 1D tensor [num_tokens*topk]. The tensor of sorted indices - used to unpermute the tokens. - probs (torch.Tensor, optional): 2D tensor [num_tokens, topk]. The tensor of probabilities - corresponding to the permuted tokens. If provided, - the unpermuted tokens will be merged with their respective - probabilities. - padded_mode (bool, optional): If True, indicating the indices are padded to - [num_expert, capacity] to denote selected tokens per expert. - Defaults to False. - restore_shape (torch.Size, optional): The input shape before permutation, only used in - padding mode. Defaults to None. - - Returns: - torch.Tensor: The unpermuted tokens, optionally merged with probabilities. """ - if padded_mode: - return unpermute_with_padded_tokens( - permuted_tokens, sorted_indices, probs, restore_shape=restore_shape - ) - - assert sorted_indices.numel() == permuted_tokens.size( - 0 - ), f"Got {sorted_indices.numel()} != {permuted_tokens.size(0)}." - if probs is not None: - # Unpermute and merge the tokens with their probabilities - num_unpermuted_tokens = probs.numel() - assert probs.dim() == 2, f"Expected 2D tensor for probs, got {probs.dim()} dims." - topk = probs.size(1) - else: - # Unpermute the tokens without merge - num_unpermuted_tokens = permuted_tokens.size(0) - topk = 1 - - output_size = [num_unpermuted_tokens, permuted_tokens.shape[-1]] - moe_scatter_indices = sorted_indices.unsqueeze(1).expand(-1, permuted_tokens.size(-1)) - unpermuted_tokens = moe_scatter.apply(permuted_tokens, moe_scatter_indices, output_size) - unpermuted_tokens = unpermuted_tokens.reshape(-1, topk, permuted_tokens.size(-1)) - if probs is not None: - unpermuted_tokens = unpermuted_tokens * probs.unsqueeze(-1) - unpermuted_tokens = unpermuted_tokens.sum(dim=1) - - return unpermuted_tokens + Restore the original order of tokens after permutation. If probs are provided, it + will also apply them to the tokens before restoring the order. - -def permute_with_padded_tokens(tokens, indices): - """Permute the tokens based on the indices, only used in padding mode. - The input indices shape is [num_expert, capacity], it indicates which tokens were selected - by each expert separately. Args: - tokens (torch.Tensor): The input token tensor. - indices (torch.Tensor): A tensor with shape [num_expert, capacity], indicating the selected - tokens for each expert. + permuted_tokens (torch.Tensor): The permuted token tensor. + sorted_indices (torch.Tensor): The indices used to sort the tokens. + restore_shape (torch.Size): The shape of the unpermuted tensor. + probs (torch.Tensor, optional): The unpermuted probs tensor, + routing_map (torch.Tensor, optional): Token to expert mapping, shape + [num_tokens, num_experts]. Returns: - torch.Tensor: The permuted tensor. - torch.Tensor: The sorted_indices corresponding permuted tensor. - """ - permuted_tokens = tokens.index_select(dim=0, index=indices.view(-1)) - - return permuted_tokens, indices - - -def unpermute_with_padded_tokens( - permuted_tokens: torch.Tensor, - indices: torch.Tensor, - probs: torch.Tensor, - restore_shape: torch.Size, -) -> torch.Tensor: + torch.Tensor: The tokens restored to their original order. """ - Unpermutes a padded permuted tokens based on sorted indices and merges the tokens with their - corresponding probabilities. - - This function takes a tensor of permuted tokens and reorders them according to the provided - indices. It also combines the tokens with their associated probabilities. + _, hidden = restore_shape - Parameters: - permuted_tokens (torch.Tensor): A 2D tensor containing permuted tokens. - indices (torch.Tensor): A tensor with shape [num_expert, capacity], indicating the selected - tokens for each expert. - probs (torch.Tensor): A tensor with the same shape as indices, containing probabilities - corresponding to each token. - restore_shape (torch.Size): The target shape for the unpermuted tokens tensor. - - Returns: - torch.Tensor: A tensor of unpermuted tokens, merged with their probabilities. + if probs is not None: + assert routing_map is not None, "Mask must be provided to permute the probs." + permuted_probs = probs.T.contiguous().masked_select(routing_map.T.contiguous()) + permuted_tokens = permuted_tokens * permuted_probs.unsqueeze(-1) - """ - # Ensure permuted_tokens is 2D - assert permuted_tokens.dim() == 2, f"Got {permuted_tokens.dim()}D." - - # Reshape and expand probabilities and indices to match permuted_tokens - probs = probs.view(-1).unsqueeze(-1) - indices = indices.view(-1, 1).expand(-1, permuted_tokens.shape[1]) - assert ( - permuted_tokens.shape == indices.shape - ), "Shape mismatch between permuted_tokens and indices." - - # Combine tokens with their probabilities - combined_output = probs * permuted_tokens - - # Prepare a tensor of zeros with the desired output shape - empty_tokens = torch.zeros( - restore_shape, dtype=combined_output.dtype, device=combined_output.device + # Create an output tensor filled with zeros + output_tokens = torch.zeros( + restore_shape, device=permuted_tokens.device, dtype=permuted_tokens.dtype ) - - # Scatter the combined tokens back to their original positions - unpermuted_tokens = torch.scatter_add(empty_tokens, 0, indices, combined_output) - - return unpermuted_tokens + # Scatter add the permuted_input back to the original positions + output_tokens.scatter_add_(0, sorted_indices.unsqueeze(1).expand(-1, hidden), permuted_tokens) + return output_tokens def sort_chunks_by_idxs(input: torch.Tensor, split_sizes: torch.Tensor, sorted_idxs: torch.Tensor): @@ -323,7 +231,7 @@ def sort_chunks_by_idxs(input: torch.Tensor, split_sizes: torch.Tensor, sorted_i def topk_softmax_with_capacity( logits: torch.Tensor, topk: int, - capacity_factor: float = None, + capacity_factor: Optional[float] = None, pad_to_capacity: bool = False, drop_policy: str = "probs", use_pre_softmax: bool = False, @@ -339,15 +247,15 @@ def topk_softmax_with_capacity( drop_policy (str): The policy to drop tokens. Can be either "prob" or "position". If "prob", the tokens with the lowest probabilities will be dropped. If "position", tokens at the end of each batch will be dropped. - Returns: - Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: Probs, indices and tokens_per_expert - tensor. - - (1) If there's no token padding, the shape of probs and indices is [tokens, top_k], - indicating the selected experts for each token. - (2) If there's token padding, the shape of probs and indices is [num_expert, capacity], - indicating the tokens selected for each expert. + Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + - routing_probs (torch.Tensor): A tensor of shape [num_tokens, num_experts] containing + the routing probabilities for each token to each expert. + - routing_map (torch.Tensor): A mask tensor of shape [num_tokens, num_experts] + indicating which experts were selected for each token. True values represent + the selected experts. + - tokens_per_expert (torch.Tensor): A tensor of shape [num_experts] containing + the number of local tokens assigned to each expert. """ assert logits.dim() == 2, f"Expected 2D logits [num_tokens, num_experts], got {logits.dim()}." num_tokens = logits.shape[0] @@ -365,52 +273,40 @@ def topk_softmax_with_capacity( scores, top_indices = torch.topk(logits, k=topk, dim=1) probs = torch.softmax(scores, dim=-1, dtype=torch.float32).type_as(logits) + # TODO Try using element-wise operations instead of scatter? + topk_masked_gates = torch.zeros_like(logits).scatter(1, top_indices, probs) + topk_map = torch.zeros_like(logits).int().scatter(1, top_indices, 1).bool() + tokens_per_expert = topk_map.sum(dim=0) + if capacity_factor is None: # TopK without capacity - if deterministic_mode: - tokens_per_expert = torch.bincount(top_indices.view(-1), minlength=num_experts) - else: - tokens_per_expert = torch.histc(top_indices, bins=num_experts, min=0, max=num_experts) - return probs, top_indices, tokens_per_expert + return topk_masked_gates, topk_map, tokens_per_expert else: # TopK with capacity expert_capacity = get_capacity( num_tokens=num_tokens * topk, num_experts=num_experts, capacity_factor=capacity_factor ) - # TopK selection, Maskout unused experts - topk_masked_gates = torch.zeros_like(logits).scatter(1, top_indices, probs) - topk_mask = torch.zeros_like(logits).scatter(1, top_indices, 1) # Maskout exceeded tokens if drop_policy == "probs": - capacity_probs, capacity_indices = torch.topk( + _, capacity_indices = torch.topk( topk_masked_gates, k=expert_capacity, dim=0, sorted=False ) - capacity_mask = torch.zeros_like(logits).scatter(0, capacity_indices, 1) + capacity_mask = torch.zeros_like(logits).scatter(0, capacity_indices, 1).bool() elif drop_policy == "position": - _, capacity_indices = torch.topk(topk_mask, k=expert_capacity, dim=0, sorted=False) - capacity_mask = torch.zeros_like(logits).scatter(0, capacity_indices, 1) - capacity_probs = torch.gather(topk_masked_gates, 0, capacity_indices) + _, capacity_indices = torch.topk(topk_map.int(), k=expert_capacity, dim=0, sorted=False) + capacity_mask = torch.zeros_like(logits).scatter(0, capacity_indices, 1).bool() else: raise ValueError(f"Invalid drop_policy: {drop_policy}") if pad_to_capacity: - final_probs, final_indices = ( - capacity_probs.T.contiguous(), - capacity_indices.T.contiguous(), - ) - tokens_per_expert_before_capacity = topk_mask.sum(dim=0) + final_map = capacity_mask + final_probs = topk_masked_gates * final_map else: # Get exceed mask and maskout exceeded probs and indices - final_mask = torch.logical_and(topk_mask, capacity_mask) - drop_mask = torch.logical_not(final_mask) - exceed_mask = torch.gather(drop_mask, 1, top_indices) - final_probs = probs * torch.logical_not(exceed_mask) - final_indices = top_indices.clone().masked_fill_( - exceed_mask, torch.iinfo(torch.long).max - ) - tokens_per_expert_before_capacity = topk_mask.sum(dim=0) - return final_probs, final_indices, tokens_per_expert_before_capacity + final_map = torch.logical_and(topk_map, capacity_mask) + final_probs = topk_masked_gates * final_map + return final_probs, final_map, tokens_per_expert def save_to_aux_losses_tracker( @@ -509,50 +405,3 @@ def track_moe_metrics( ) clear_aux_losses_tracker() - - -class moe_gather(torch.autograd.Function): - """Gather the input tensor based on the map tensor.""" - - @staticmethod - def forward(ctx, input_, map_): - """Gather the input tensor based on the map tensor.""" - ctx.input_size = input_.size() - ctx.map = map_ - return torch.gather(input_, 0, map_) - - @staticmethod - def backward(ctx, grad_output): - """Scatter the grad_output tensor based on the map tensor.""" - input_size = ctx.input_size - map_ = ctx.map - - output = torch.zeros( - input_size, dtype=grad_output.dtype, device=torch.cuda.current_device() - ) - output.scatter_add_(0, map_, grad_output) - return output, None, None - - -class moe_scatter(torch.autograd.Function): - """Scatter the input tensor based on the map tensor.""" - - @staticmethod - def forward(ctx, input_, map_, output_size=None): - """Scatter the input tensor based on the map tensor.""" - ctx.map = map_ - - if output_size is not None: - output = torch.zeros(output_size, dtype=input_.dtype, device=input_.device) - else: - output = torch.zeros_like(input_) - - output.scatter_add_(0, map_, input_) - return output - - @staticmethod - def backward(ctx, grad_output): - """Gather the grad_output tensor based on the map tensor.""" - map_ = ctx.map - grad_input = torch.gather(grad_output, 0, map_) - return grad_input, None, None, None diff --git a/megatron/core/transformer/moe/router.py b/megatron/core/transformer/moe/router.py index 3e85ec53c..e03bd5c98 100644 --- a/megatron/core/transformer/moe/router.py +++ b/megatron/core/transformer/moe/router.py @@ -5,11 +5,7 @@ import torch from megatron.core import parallel_state -from megatron.core.tensor_parallel import ( - gather_from_sequence_parallel_region, - get_cuda_rng_tracker, - get_data_parallel_rng_tracker_name, -) +from megatron.core.tensor_parallel import gather_from_sequence_parallel_region from megatron.core.transformer.module import MegatronModule from megatron.core.transformer.moe.moe_utils import ( MoEAuxLossAutoScaler, @@ -39,14 +35,11 @@ def __init__(self, config: TransformerConfig) -> None: self.layer_number = None # Initialize the gate weights. + # TODO: Add support for GPU initialization, which requires updating the golden values. self.weight = torch.nn.Parameter( torch.empty((self.config.num_moe_experts, self.config.hidden_size), dtype=torch.float32) ) if config.perform_initialization: - if get_cuda_rng_tracker().is_initialized(): - with get_cuda_rng_tracker().fork(get_data_parallel_rng_tracker_name()): - config.init_method(self.weight) - else: config.init_method(self.weight) self.weight.data = self.weight.data.to(dtype=config.params_dtype) setattr(self.weight, 'sequence_parallel', config.sequence_parallel) @@ -74,8 +67,8 @@ def routing(self, logits: torch.Tensor): logits (torch.Tensor): Logits tensor. Returns: - Tuple[torch.Tensor, torch.Tensor]: - Tuple of tensors representing max probs and the indices. + Tuple[torch.Tensor, torch.Tensor]: A tuple containing token assignment + probabilities and mapping. """ raise NotImplementedError("Routing function not implemented.") @@ -115,7 +108,8 @@ def sinkhorn_load_balancing(self, logits: torch.Tensor): logits (torch.Tensor): The logits tensor. Returns: - torch.Tensor: The logits tensor after applying sinkhorn routing. + Tuple[torch.Tensor, torch.Tensor]: A tuple containing token assignment + probabilities and mask. """ def _sinkhorn_activation(logits): @@ -133,11 +127,12 @@ def _sinkhorn_activation(logits): ) # explicit fp32 conversion for stability _, indices = torch.topk(norm_logits, k=self.topk, dim=1) logits = _sinkhorn_activation(logits) - scores = torch.gather(logits, 1, indices) else: logits = _sinkhorn_activation(logits) - scores, indices = torch.topk(logits, k=self.topk, dim=1) - return scores, indices + _, indices = torch.topk(logits, k=self.topk, dim=1) + map = torch.zeros_like(logits).int().scatter(1, indices, 1).bool() + scores = logits * map + return scores, map def aux_loss_load_balancing(self, logits: torch.Tensor): """Apply loss-based load balancing to the logits tensor. @@ -146,10 +141,10 @@ def aux_loss_load_balancing(self, logits: torch.Tensor): logits (torch.Tensor): the logits tensor after gating, shape: [num_tokens, num_experts]. Returns: - probs (torch.Tensor): the probabilities tensor after load balancing. - indices (torch.Tensor): the indices tensor after top-k selection. + probs (torch.Tensor): The probabilities of token to experts assignment. + indices (torch.Tensor): The mask of token to experts assignment. """ - probs, indices, tokens_per_expert = topk_softmax_with_capacity( + probs, routing_map, tokens_per_expert = topk_softmax_with_capacity( logits, self.topk, capacity_factor=self.config.moe_expert_capacity_factor, @@ -163,7 +158,7 @@ def aux_loss_load_balancing(self, logits: torch.Tensor): # Apply load balancing loss scores = torch.softmax(logits, dim=-1, dtype=torch.float32) probs = self.apply_load_balancing_loss(scores, tokens_per_expert, activation=probs) - return probs, indices + return probs, routing_map def apply_load_balancing_loss( self, @@ -174,10 +169,10 @@ def apply_load_balancing_loss( """Applies auxiliary loss to the MoE layer. Args: - probs (torch.Tensor): - The probs output by the router for each token. [num_tokens, num_experts] - num_local_tokens_per_expert (torch.Tensor): - The number of tokens per expert. [num_experts] + probs (torch.Tensor): The probs output by the router for each token. + [num_tokens, num_experts] + num_local_tokens_per_expert (torch.Tensor): The number of tokens per expert. + [num_experts] activation (torch.Tensor): The activation tensor to attach the gradient function to. Returns: @@ -258,8 +253,9 @@ def routing(self, logits: torch.Tensor): logits (torch.Tensor): Logits tensor after gating. Returns: - probs (torch.Tensor): the probabilities tensor after load balancing. - indices (torch.Tensor): the indices tensor after top-k selection. + probs (torch.Tensor): The probabilities of token to experts assignment. + routing_map (torch.Tensor): The mapping of token to experts assignment, + with shape [num_tokens, num_experts]. """ logits = logits.view(-1, self.config.num_moe_experts) @@ -271,12 +267,12 @@ def routing(self, logits: torch.Tensor): logits = gather_from_sequence_parallel_region(logits) if self.routing_type == "sinkhorn": - scores, indices = self.sinkhorn_load_balancing(logits) + scores, routing_map = self.sinkhorn_load_balancing(logits) elif self.routing_type == "aux_loss": - scores, indices = self.aux_loss_load_balancing(logits) + scores, routing_map = self.aux_loss_load_balancing(logits) elif self.routing_type == "none": # A naive top-k routing without load balancing - scores, indices, _ = topk_softmax_with_capacity( + scores, routing_map, _ = topk_softmax_with_capacity( logits, self.topk, capacity_factor=self.config.moe_expert_capacity_factor, @@ -288,7 +284,7 @@ def routing(self, logits: torch.Tensor): else: raise ValueError(f"Unsupported MoE routing type: {self.routing_type}") - return scores, indices + return scores, routing_map def forward(self, input: torch.Tensor): """ @@ -304,6 +300,6 @@ def forward(self, input: torch.Tensor): logits = self.gating(input) logits = logits.view(-1, self.config.num_moe_experts) - scores, indices = self.routing(logits) + scores, routing_map = self.routing(logits) - return scores, indices + return scores, routing_map diff --git a/megatron/core/transformer/moe/shared_experts.py b/megatron/core/transformer/moe/shared_experts.py index c2d9c188e..1d4b2a628 100644 --- a/megatron/core/transformer/moe/shared_experts.py +++ b/megatron/core/transformer/moe/shared_experts.py @@ -17,14 +17,10 @@ reduce_from_tensor_model_parallel_region, reduce_scatter_to_sequence_parallel_region, ) -from megatron.core.tensor_parallel.random import ( - get_cuda_rng_tracker, - get_data_parallel_rng_tracker_name, -) from megatron.core.transformer.mlp import MLP from megatron.core.transformer.spec_utils import ModuleSpec from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.utils import make_sharded_tensor_for_checkpoint +from megatron.core.utils import is_torch_min_version, make_sharded_tensor_for_checkpoint class SharedExpertMLP(MLP): @@ -46,12 +42,9 @@ def __init__(self, config: TransformerConfig, spec: ModuleSpec): self.use_shared_expert_gate = spec.params.get("gate", False) if self.use_shared_expert_gate: + # TODO: Add support for GPU initialization, which requires updating the golden values. self.gate_weight = torch.nn.Parameter(torch.empty((1, self.config.hidden_size))) if config.perform_initialization: - if get_cuda_rng_tracker().is_initialized(): - with get_cuda_rng_tracker().fork(get_data_parallel_rng_tracker_name()): - config.init_method(self.gate_weight) - else: config.init_method(self.gate_weight) self.gate_weight.data = self.gate_weight.data.to(dtype=config.params_dtype) setattr(self.gate_weight, 'sequence_parallel', self.config.sequence_parallel) @@ -235,28 +228,17 @@ def get_output(self): return output -TORCH_MAJOR = int(torch.__version__.split(".")[0]) -TORCH_MINOR = int(torch.__version__.split(".")[1]) -TORCH_LAST = torch.__version__.split(".")[2] - - def set_tensor_grad_fn_sequence_sr(tensor, value): """ Set sequence_sr for the grad_fn of a tensor to control the backward order. For older PyTorch version, do nothing (backward order is not changed). The bigger the value is, the earlier the grad_fn is scheduled. """ - if ( - (TORCH_MAJOR > 2) - or (TORCH_MAJOR == 2 and TORCH_MINOR > 2) - or (TORCH_MAJOR == 2 and TORCH_MINOR == 2 and '+' not in TORCH_LAST) - ): - # In NVIDIA PyTorch container 24.01, the PyTorch version is 2.2.0a0+81ea7a4, - # which does not contian the set_sequence_nr commit. + if is_torch_min_version("2.2.0"): if tensor is not None and tensor.grad_fn is not None: tensor.grad_fn._set_sequence_nr(value) else: warnings.warn( "WARNING : PyTorch is too old to set sequence_sr and the performance may not " - "optimal. Please use PyTorch >= 2.2.0 for better performance." + "be optimal. Please use PyTorch >= 2.2.0 for better performance." ) diff --git a/megatron/core/transformer/moe/token_dispatcher.py b/megatron/core/transformer/moe/token_dispatcher.py index db1b1920f..dbd768dda 100644 --- a/megatron/core/transformer/moe/token_dispatcher.py +++ b/megatron/core/transformer/moe/token_dispatcher.py @@ -5,15 +5,19 @@ import torch -from megatron.core import parallel_state, tensor_parallel -from megatron.core.tensor_parallel.mappings import ( - _gather_along_first_dim_moe, +from megatron.core.parallel_state import ( + get_expert_model_parallel_group, + get_expert_tensor_and_model_parallel_group, + get_expert_tensor_parallel_group, + get_expert_tensor_parallel_rank, +) +from megatron.core.tensor_parallel import ( + all_to_all, gather_from_sequence_parallel_region, reduce_scatter_to_sequence_parallel_region, ) from megatron.core.transformer.moe.moe_utils import ( - moe_gather, - moe_scatter, + get_capacity, permute, sort_chunks_by_idxs, unpermute, @@ -44,13 +48,39 @@ def __init__(self, config: TransformerConfig) -> None: self.config = config self.shared_experts: Optional[SharedExpertMLP] = None + self.tp_size = config.expert_tensor_parallel_size + self.ep_size = config.expert_model_parallel_size + + @property + def ep_group(self): + """Get expert model parallel group.""" + return get_expert_model_parallel_group() + + @property + def tp_group(self): + """Get expert tensor parallel group.""" + return get_expert_tensor_parallel_group() + + @property + def tp_rank(self): + """Get expert tensor parallel rank.""" + return get_expert_tensor_parallel_rank() + + @property + def tp_ep_group(self): + """Get expert tensor and model parallel group.""" + return get_expert_tensor_and_model_parallel_group() + @abstractmethod - def token_permutation(self, tokens: torch.Tensor, indices: torch.Tensor): + def token_permutation( + self, tokens: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor + ): """Dispatch tokens to experts. Args: tokens (torch.Tensor): Input tokens. - indices (torch.Tensor): indices tensor. + probs (torch.Tensor): The routing probability tensor [num_tokens, num_experts]. + routing_map (torch.Tensor): Token to expert mapping tensor. Returns: torch.Tensor: Tokens tensor. @@ -58,15 +88,12 @@ def token_permutation(self, tokens: torch.Tensor, indices: torch.Tensor): raise NotImplementedError("Dispatch function not implemented.") @abstractmethod - def token_unpermutation( - self, expert_output: torch.Tensor, probs: torch.Tensor, indices: torch.Tensor - ): + def token_unpermutation(self, expert_output: torch.Tensor, bias: torch.Tensor = None): """Restores the expert output to its original ordering. Args: expert_output (torch.Tensor): The output tensor from the expert models. - probs (torch.Tensor): Each token's score with each expert. - indices (torch.Tensor): The indices used to reorder the expert output. + bias (torch.Tensor): The bias tensor. Returns: (torch.Tensor, torch.Tensor): Unpermuted activation and optional bias. @@ -107,24 +134,21 @@ def __init__( self.global_local_map = None def token_permutation( - self, hidden_states: torch.Tensor, max_prob: torch.Tensor, max_ind: torch.Tensor + self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor ): """Dispatch tokens to local experts. It's composed of two stages: - (1) Permute the tokens across the expert parallel devices. After this stage, + (1) Gather the tokens across the expert parallel devices. After this stage, each device receives all of the tokens assigned to its local set of experts in its local HBM. (2) Permute the tokens locally so that they are grouped by their expert - assignment. After the stage (1), the tokens are grouped by which device - they came from. We re-order them locally for subsequent efficient computation. + assignment. Args: hidden_states: 3D tensor [S/TP, B, H]. Input tokens. - max_prob: 2D tensor [S/TP*B, topk]. Each row of max_prob contains + probs: 2D tensor [S/TP*B, num_experts]. Each row of probs contains the probility distribution across `topk` experts for one local token. - For 'aux_loss' load balancing, the sum of the values in each row is 1, - thus for `top1` gating, it degenerates into a full 1 tensor. - max_ind: 2D tensor [num_local_tokens, topk], where - `num_local_tokens=S/TP*B`. Token assignment to global experts. + routing_map: 2D tensor [S/TP*B, num_experts], representing token assignment to + global experts. Returns: permuted_local_hidden_states: Permutation of tokens to local experts group. @@ -135,79 +159,38 @@ def token_permutation( hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) # Permute the tokens across the expert parallel devices. - if (self.config.tensor_model_parallel_size > 1) or ( - self.config.expert_model_parallel_size > 1 - ): + if self.tp_size > 1 or self.ep_size > 1: ## local_indices calculation with torch.no_grad(): - # [num_local_tokens, topk] -> [num_global_tokens, topk], where: + # [num_local_tokens, num_experts] -> [num_global_tokens, num_experts], where: # num_local_tokens=(S/TP)*B, num_global_tokens=S*B*EP - global_indices = tensor_parallel.gather_from_sequence_parallel_region_to_moe( - max_ind + routing_map = gather_from_sequence_parallel_region( + routing_map, group=self.tp_ep_group ) - # Create a mask of mapping between global and local tokens where each - # element is True if it's between the local_expert_indices - global_local_mask = (global_indices >= self.local_expert_indices[0]) & ( - global_indices <= self.local_expert_indices[-1] - ) - local_indices = global_indices.masked_select(global_local_mask) ## local_probs calculation - # max_prob: [S/TP*B, topk] -> global_probs: [S*B*EP, topk] - global_probs = tensor_parallel.gather_from_sequence_parallel_region_to_moe(max_prob) - self.local_probs = global_probs.masked_select(global_local_mask) - self.local_probs = self.local_probs.view(-1, 1) + # max_prob: [S/TP*B, num_experts] -> global_probs: [S*B*EP, num_experts] + probs = gather_from_sequence_parallel_region(probs, group=self.tp_ep_group) + # Note that this allgather spans the communication domain of TP*EP. # [(S/TP)*B, H] -> [((S/TP)*B)*(TP*EP), H] = [S*B*EP, H] - global_hidden_states = tensor_parallel.gather_from_sequence_parallel_region_to_moe( - hidden_states, use_global_buffer=True + hidden_states = gather_from_sequence_parallel_region( + hidden_states, group=self.tp_ep_group, use_global_buffer=True ) - # Reshape global_local_mask to be compatible with Tensor.gather - global_local_map = global_local_mask.nonzero()[:, 0] - self.global_local_map = global_local_map.view(-1, 1).expand(-1, hidden_states.shape[-1]) - local_hidden_states = moe_gather.apply(global_hidden_states, self.global_local_map) - else: - if self.router_topk > 1: - global_local_mask = torch.ones_like(max_ind).bool() - local_indices = max_ind.masked_select(global_local_mask) - self.local_probs = max_prob.masked_select(global_local_mask) - self.local_probs = self.local_probs.view(-1, 1) - global_local_map = global_local_mask.nonzero()[:, 0] - self.global_local_map = global_local_map.view(-1, 1).expand( - -1, hidden_states.shape[-1] - ) - local_hidden_states = torch.gather(hidden_states, 0, self.global_local_map) - else: - local_indices = max_ind - self.local_probs = max_prob.view(-1, 1) - local_hidden_states = hidden_states - self.global_local_map = None - - with torch.no_grad(): - # The indices of local_indices that give its sorted order along dim 0. - self.indices = torch.argsort(local_indices, dim=0) - if self.config.deterministic_mode: - tokens_per_expert = torch.bincount( - local_indices.view(-1), minlength=self.config.num_moe_experts - ) - if self.num_local_experts < self.config.num_moe_experts: - tokens_per_expert = tokens_per_expert[ - self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 - ] - else: - tokens_per_expert = torch.histc( - local_indices, - bins=self.num_local_experts, - min=self.local_expert_indices[0], - max=self.local_expert_indices[-1], - ) - tokens_per_expert = tokens_per_expert.cpu().to(torch.long) + self.hidden_shape_before_permute = hidden_states.shape + + # The routing map and probs that for local experts. + self.local_map = routing_map[ + :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 + ].contiguous() + self.local_probs = probs[ + :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 + ].contiguous() - # Stage2: permute the tokens locally so that they are grouped by their expert assignment - # Reshape indices to be compatible with Tensor.gather + tokens_per_expert = self.local_map.sum(dim=0).long().cpu() - permuted_local_hidden_states, self.reversed_local_input_permutation_mapping = permute( - local_hidden_states, local_indices + (permuted_local_hidden_states, self.reversed_local_input_permutation_mapping) = permute( + hidden_states, self.local_map ) return permuted_local_hidden_states, tokens_per_expert @@ -227,81 +210,49 @@ def token_unpermutation(self, hidden_states: torch.Tensor, bias: torch.Tensor = output_total: un-permuted updated hidden states output from all local experts with shape of [S/TP, B, H] """ - # Stage1: unpermute the tokens and bias locally respectively. # Scale the expert output prior to reduction and subsequent to local unpermutation if k > 1. - + # Unpermute the expert output and bias + permuted_probs = self.local_probs.T.contiguous().masked_select( + self.local_map.T.contiguous() + ) + hidden_states = hidden_states * permuted_probs.unsqueeze(-1) unpermuted_local_hidden = unpermute( - hidden_states, self.reversed_local_input_permutation_mapping + hidden_states, + self.reversed_local_input_permutation_mapping, + restore_shape=self.hidden_shape_before_permute, ) - unpermuted_local_hidden = unpermuted_local_hidden * self.local_probs unpermuted_local_bias = None if self.add_bias: assert bias is not None - unpermuted_local_bias = torch.zeros_like(hidden_states) - unpermuted_local_bias = unpermute(bias, self.reversed_local_input_permutation_mapping) - unpermuted_local_bias = unpermuted_local_bias * self.local_probs + bias = bias * permuted_probs.unsqueeze(-1) + unpermuted_local_bias = unpermute( + bias, + self.reversed_local_input_permutation_mapping, + restore_shape=self.hidden_shape_before_permute, + ) output_total = unpermuted_local_hidden output_bias_total = unpermuted_local_bias - # Unpermute the tokens across expert parallel devices. - if (self.config.tensor_model_parallel_size > 1) or ( - self.config.expert_model_parallel_size > 1 - ): - assert ( - self.global_local_map is not None - ), "global_local_map is necessary for `AllGather`." - ep_group_size = parallel_state.get_tensor_and_expert_parallel_world_size() - # hidden_shape: [S/TP, B, H], gloal_num_tokens = S/TP*B*(TP*EP) - global_num_tokens = self.hidden_shape[0] * self.hidden_shape[1] * ep_group_size - global_hidden_shape = [global_num_tokens, hidden_states.shape[-1]] - assert self.global_local_map.shape == unpermuted_local_hidden.shape - unpermuted_global_hidden = moe_scatter.apply( - unpermuted_local_hidden, self.global_local_map, global_hidden_shape - ) - output_total = tensor_parallel.reduce_scatter_to_sequence_parallel_region_from_moe( - unpermuted_global_hidden + # Unpermute the tokens across ranks. + if self.tp_size > 1 or self.ep_size > 1: + output_total = reduce_scatter_to_sequence_parallel_region( + output_total, group=self.tp_ep_group ) if self.add_bias: # Unpermute the bias across expert parallel devices. - unpermuted_global_bias = torch.zeros_like(unpermuted_global_hidden) - unpermuted_global_bias = unpermuted_global_bias.scatter_add( - 0, self.global_local_map, unpermuted_local_bias - ) - output_bias_total = ( - tensor_parallel.reduce_scatter_to_sequence_parallel_region_from_moe( - unpermuted_global_bias - ) - ) # bias is duplicated across tensor parallelism ranks; - # reduce scatter reduces bias across tensor parallel_ranks output_bias_total = ( - output_bias_total / parallel_state.get_tensor_model_parallel_world_size() - ) - else: - if self.router_topk > 1: - global_num_tokens = self.hidden_shape[0] * self.hidden_shape[1] - global_hidden_shape = [global_num_tokens, hidden_states.shape[-1]] - unpermuted_global_hidden = torch.zeros( - global_hidden_shape, - dtype=hidden_states.dtype, - device=torch.cuda.current_device(), - ) - output_total = unpermuted_global_hidden.scatter_add( - 0, self.global_local_map, unpermuted_local_hidden - ) - if self.add_bias: - unpermuted_global_bias = torch.zeros_like(unpermuted_global_hidden) - output_bias_total = unpermuted_global_bias.scatter_add( - 0, self.global_local_map, unpermuted_local_bias + reduce_scatter_to_sequence_parallel_region( + output_bias_total, group=self.tp_ep_group ) + / self.tp_size + ) output_total = output_total.view(self.hidden_shape) if self.add_bias: output_bias_total = output_bias_total.view(self.hidden_shape) - else: - output_bias_total = None return output_total, output_bias_total @@ -309,6 +260,11 @@ def token_unpermutation(self, hidden_states: torch.Tensor, bias: torch.Tensor = class MoEAlltoAllTokenDispatcher(MoETokenDispatcher): """ AlltoAll-based token dispatcher. + + The workflow of AlltoAll token dispatcher is as follows: + (1) preprocess(): calculate necessary metadata for communication and permute + (2) token_permutation(): permute->A2A(EP)->AG(TP)->sort_chunk(if num_local_experts>1) + (3) token_unpermutation(): sort_chunk(if num_local_experts>1)->RS(TP)->A2A(EP)->unpermute """ def __init__( @@ -335,8 +291,6 @@ def __init__( assert ( self.local_expert_indices[i] == self.local_expert_indices[i + 1] - 1 ), "local_expert_indices must be continous" - self.ep_size = config.expert_model_parallel_size - self.tp_size = config.tensor_model_parallel_size self.probs = None # [ep_size]. Represents the number of tokens sent by the current rank to other @@ -379,33 +333,34 @@ def __init__( self.shared_experts = None - def preprocess(self, indices: torch.Tensor) -> torch.Tensor: + def preprocess(self, routing_map: torch.Tensor) -> torch.Tensor: """ - Preprocess token indices for AlltoAll communication and token permutation. This method - computes the number of tokens assigned to each expert based on the input indices. + Preprocess token routing map for AlltoAll communication and token permutation. + + This method computes the number of tokens assigned to each expert based on the routing_map. It also initializes the necessary data structures for AlltoAll communication, such as input and output splits, and the mapping between global tokens and local experts. Args: - indices (torch.Tensor): Tensor of indices mapping tokens to experts. + routing_map (torch.Tensor): The mapping of tokens to experts, with shape + [num_tokens, num_experts]. Returns: torch.Tensor: Tensor containing the number of tokens assigned to local expert. """ - if self.config.deterministic_mode: - num_local_tokens_per_expert = torch.bincount( - indices.view(-1), minlength=self.num_experts - ) - else: - num_local_tokens_per_expert = torch.histc( - indices, bins=self.num_experts, min=0, max=self.num_experts - ) - # num_local_tokens_per_expert: [num_experts] + # [num_experts], number of tokens assigned to each expert from the current rank's input. + num_local_tokens_per_expert = routing_map.sum(dim=0).long() - tp_rank = parallel_state.get_tensor_model_parallel_rank() if self.drop_and_pad: - # probs: [num_experts, local_capacity] - self.capacity = self.probs.size(1) + # Drop and pad the input to capacity. + num_tokens = routing_map.size(0) * self.config.moe_router_topk + self.capacity = get_capacity( + num_tokens=num_tokens, + num_experts=self.num_experts, + capacity_factor=self.config.moe_expert_capacity_factor, + ) + self.num_out_tokens = self.capacity * self.num_experts + # [num_local_experts], number of tokens processed by each expert. num_tokens_per_local_expert = torch.full( (self.num_local_experts,), self.capacity * self.tp_size * self.ep_size, @@ -417,20 +372,24 @@ def preprocess(self, indices: torch.Tensor) -> torch.Tensor: ) return num_tokens_per_local_expert elif self.config.moe_expert_capacity_factor is not None: - # Token drop but no pad. A synchronization is needed before the first + # Drop tokens to capacity, no padding. + # A synchronization is needed before the first # permutation to get the `num_out_tokens` CPU value. self.num_out_tokens = num_local_tokens_per_expert.sum().to( torch.device("cpu"), non_blocking=True ) self.cuda_sync_point = "before_permutation_1" - elif self.ep_size > 1 or self.num_local_experts > 1: - # Token dropless and enable ep. A synchronization is needed before expert parallel - # AlltoAll communication to get the `input_splits` and `output_splits` CPU values. - self.cuda_sync_point = "before_ep_alltoall" else: - # Token dropless and no ep. A synchronization is needed before the token_permutation() - # function returns to get the `tokens_per_expert` CPU value. - self.cuda_sync_point = "before_finish" + # Dropless + self.num_out_tokens = routing_map.size(0) * self.config.moe_router_topk + if self.ep_size > 1 or self.num_local_experts > 1: + # Token dropless and enable ep. A synchronization is needed before expert parallel + # AlltoAll communication to get the `input_splits` and `output_splits` CPU values. + self.cuda_sync_point = "before_ep_alltoall" + else: + # Token dropless and no ep. A synchronization is needed before the returns + # to get the `tokens_per_expert` CPU value for + self.cuda_sync_point = "before_finish" if self.ep_size > 1 or self.tp_size > 1: # =================================================== @@ -447,7 +406,9 @@ def preprocess(self, indices: torch.Tensor) -> torch.Tensor: # expert by all ranks. # [tp_size, ep_size, num_experts] num_global_tokens_per_expert = ( - _gather_along_first_dim_moe(num_local_tokens_per_expert) + gather_from_sequence_parallel_region( + num_local_tokens_per_expert, group=self.tp_ep_group + ) .reshape(self.ep_size, self.tp_size, self.num_experts) .transpose(0, 1) ) @@ -461,7 +422,7 @@ def preprocess(self, indices: torch.Tensor) -> torch.Tensor: # self.output_splits represents the number of tokens received by the current rank # from other EP rank. self.output_splits = ( - num_global_tokens_per_rank[tp_rank] + num_global_tokens_per_rank[self.tp_rank] .to(torch.device("cpu"), non_blocking=True) .numpy() ) @@ -493,15 +454,21 @@ def preprocess(self, indices: torch.Tensor) -> torch.Tensor: return num_tokens_per_local_expert def token_permutation( - self, hidden_states: torch.Tensor, probs: torch.Tensor, indices: torch.Tensor + self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor]: """ Dispatch tokens to local experts using AlltoAll communication. + This method performs the following steps: + 1. Preprocess the routing map to get metadata for communication and permutation. + 2. Permute input tokens for AlltoAll communication. + 3. Perform expert parallel AlltoAll communication. + 4. Sort tokens by local expert (if multiple local experts exist). + Args: hidden_states (torch.Tensor): Input token embeddings. - probs (torch.Tensor): Probs of tokens assigned to experts. - indices (torch.Tensor): Indices of tokens assigned to experts. + probs (torch.Tensor): The probabilities of token to experts assignment. + routing_map (torch.Tensor): The mapping of token to experts assignment. Returns: Tuple[torch.Tensor, torch.Tensor]: @@ -511,10 +478,12 @@ def token_permutation( # Preprocess: Get the metadata for communication, permutation and computation operations. self.hidden_shape = hidden_states.shape self.probs = probs + self.routing_map = routing_map assert probs.dim() == 2, "Expected 2D tensor for probs" - assert indices.dim() == 2, "Expected 2D tensor for indices" + assert routing_map.dim() == 2, "Expected 2D tensor for token2expert mask" + assert routing_map.dtype == torch.bool, "Expected bool tensor for mask" hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) - tokens_per_expert = self.preprocess(indices) + tokens_per_expert = self.preprocess(self.routing_map) if self.shared_experts is not None: self.shared_experts.pre_forward_comm(hidden_states.view(self.hidden_shape)) @@ -524,27 +493,22 @@ def token_permutation( if self.cuda_sync_point == "before_permutation_1": torch.cuda.current_stream().synchronize() permutated_local_input_tokens, self.reversed_local_input_permutation_mapping = permute( - hidden_states, - indices, - num_out_tokens=self.num_out_tokens, - padded_mode=self.drop_and_pad, + hidden_states, routing_map, num_out_tokens=self.num_out_tokens ) # Perform expert parallel AlltoAll communication if self.cuda_sync_point == "before_ep_alltoall": torch.cuda.current_stream().synchronize() - global_input_tokens = tensor_parallel.all_to_all( - parallel_state.get_expert_model_parallel_group(), - permutated_local_input_tokens, - self.output_splits, - self.input_splits, + global_input_tokens = all_to_all( + self.ep_group, permutated_local_input_tokens, self.output_splits, self.input_splits ) if self.shared_experts is not None: self.shared_experts.linear_fc1_forward_and_act(global_input_tokens) - if parallel_state.get_tensor_model_parallel_world_size() > 1: + if self.tp_size > 1: global_input_tokens = gather_from_sequence_parallel_region( global_input_tokens, + group=self.tp_group, output_split_sizes=( self.output_splits_tp.tolist() if self.output_splits_tp is not None else None ), @@ -564,11 +528,16 @@ def token_permutation( return global_input_tokens, tokens_per_expert def token_unpermutation( - self, hidden_states: torch.Tensor, bias: torch.Tensor = None + self, hidden_states: torch.Tensor, bias: Optional[torch.Tensor] = None ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: """ Reverse the token permutation to restore the original order. + This method performs the following steps: + 1. Unsort tokens by local expert (if multiple local experts exist). + 2. Perform expert parallel AlltoAll communication to restore the original order. + 3. Unpermute tokens to restore the original order. + Args: hidden_states (torch.Tensor): Output from local experts. bias (torch.Tensor, optional): Bias tensor (not supported). @@ -588,9 +557,10 @@ def token_unpermutation( self.restore_output_by_local_experts, ) - if parallel_state.get_tensor_model_parallel_world_size() > 1: + if self.tp_size > 1: hidden_states = reduce_scatter_to_sequence_parallel_region( hidden_states, + group=self.tp_group, input_split_sizes=( self.output_splits_tp.tolist() if self.output_splits_tp is not None else None ), @@ -598,23 +568,20 @@ def token_unpermutation( # Perform expert parallel AlltoAll communication # hidden_states: [SEQL, H] -> [SEQL, H/TP] - permutated_local_input_tokens = tensor_parallel.all_to_all( - parallel_state.get_expert_model_parallel_group(), - hidden_states, - self.input_splits, - self.output_splits, + permutated_local_input_tokens = all_to_all( + self.ep_group, hidden_states, self.input_splits, self.output_splits ) if self.shared_experts is not None: self.shared_experts.linear_fc2_forward(permutated_local_input_tokens) self.shared_experts.post_forward_comm() - # Unpermutation 1: Unsort input tokens to restore the original order. + # Unpermutation 1: AlltoAll output to output output = unpermute( permutated_local_input_tokens, self.reversed_local_input_permutation_mapping, - probs=self.probs, - padded_mode=self.drop_and_pad, restore_shape=self.hidden_shape_before_permute, + probs=self.probs, + routing_map=self.routing_map, ) # Reshape the output tensor diff --git a/megatron/core/transformer/multi_latent_attention.py b/megatron/core/transformer/multi_latent_attention.py index d637e2b44..6bff6fc08 100644 --- a/megatron/core/transformer/multi_latent_attention.py +++ b/megatron/core/transformer/multi_latent_attention.py @@ -113,10 +113,13 @@ def forward( key_value_states=None, inference_params=None, rotary_pos_emb=None, + attention_bias=None, packed_seq_params=None, position_ids=None, ): + """Forward pass for multi-latent attention""" assert rotary_pos_emb is None, "Rotary position embeddings should not be passed into MLA." + assert attention_bias is None, "Attention bias should not be passed into MLA." # hidden_states: [sq, b, h] @@ -138,8 +141,8 @@ def forward( # Adjust key, value for inference # =================================================== # rotary_pos_emb = None - key, value, _, attn_mask_type = self._adjust_key_value_for_inference( - inference_params, key, value, rotary_pos_emb=None + query, key, value, _, attn_mask_type = self._adjust_key_value_for_inference( + inference_params, query, key, value, rotary_pos_emb=None ) # ================================== diff --git a/megatron/core/transformer/torch_layer_norm.py b/megatron/core/transformer/torch_layer_norm.py index 11cf406f0..c718b1854 100644 --- a/megatron/core/transformer/torch_layer_norm.py +++ b/megatron/core/transformer/torch_layer_norm.py @@ -1,44 +1,4 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import warnings +from megatron.core.transformer.torch_norm import WrappedTorchNorm -import torch - -from megatron.core.transformer import TransformerConfig - - -class WrappedTorchLayerNorm(torch.nn.LayerNorm): - - def __init__( - self, - config: TransformerConfig, - hidden_size: int, - eps: float = 1e-5, - persist_layer_norm: bool = False, ## TODO: unused arguments. See https://gitlab-master.nvidia.com/ADLR/megatron-lm/-/issues/223 - zero_centered_gamma: bool = False, - normalization: str = "LayerNorm", # included to match TE interface - ): - self.config = config - assert ( - not self.config.layernorm_zero_centered_gamma - ), f"zero_centered_gamma not supported by torch LayerNorm" - - assert ( - self.config.normalization == "LayerNorm" - ), f'({self.config.normalization}) is not supported in by torch Layernorm' - - assert ( - not self.config.persist_layer_norm - ), f"persist_layer_norm not supported by torch LayerNorm" - - assert ( - not self.config.sequence_parallel - ), f"sequence parallel not supported by torch LayerNorm" - - assert ( - not self.config.memory_efficient_layer_norm - ), f"memory_efficient_layer_norm not supported by torch LayerNorm" - - super().__init__( - normalized_shape=hidden_size, ## applied to last len(normalized_shape.size) dimensions - eps=eps, - ) +WrappedTorchLayerNorm = WrappedTorchNorm diff --git a/megatron/core/transformer/torch_norm.py b/megatron/core/transformer/torch_norm.py new file mode 100644 index 000000000..5fcb74da8 --- /dev/null +++ b/megatron/core/transformer/torch_norm.py @@ -0,0 +1,48 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import torch + +from megatron.core.transformer import TransformerConfig +from megatron.core.utils import is_torch_min_version + + +class WrappedTorchNorm: + """ + A conditional wrapper to initialize an instance of PyTorch's + `LayerNorm` or `RMSNorm` based on input + """ + + def __new__( + cls, + config: TransformerConfig, + hidden_size: int, + eps: float = 1e-5, + # TODO: unused arguments. + # See https://gitlab-master.nvidia.com/ADLR/megatron-lm/-/issues/223 + persist_layer_norm: bool = False, + zero_centered_gamma: bool = False, + normalization: str = "LayerNorm", + ): + assert ( + not config.layernorm_zero_centered_gamma + ), f"zero_centered_gamma not supported by torch LayerNorm" + + assert not config.persist_layer_norm, f"persist_layer_norm not supported by torch LayerNorm" + + assert not config.sequence_parallel, f"sequence parallel not supported by torch LayerNorm" + + assert ( + not config.memory_efficient_layer_norm + ), f"memory_efficient_layer_norm not supported by torch LayerNorm" + + if config.normalization == "LayerNorm": + norm_cls = torch.nn.LayerNorm + elif config.normalization == "RMSNorm": + assert is_torch_min_version( + "2.4.0a0" + ), 'Torch RMSNorm requires PyTorch version >= 2.4.0' + + norm_cls = torch.nn.RMSNorm + else: + raise Exception("Only LayerNorm and RMSNorm are currently supported") + + return norm_cls(normalized_shape=hidden_size, eps=eps) diff --git a/megatron/core/transformer/transformer_block.py b/megatron/core/transformer/transformer_block.py index 3a88f1ab2..e29851926 100755 --- a/megatron/core/transformer/transformer_block.py +++ b/megatron/core/transformer/transformer_block.py @@ -39,9 +39,9 @@ LayerNormImpl = FusedLayerNorm except ImportError: - from megatron.core.transformer.torch_layer_norm import WrappedTorchLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm - LayerNormImpl = WrappedTorchLayerNorm + LayerNormImpl = WrappedTorchNorm def get_num_layers_to_build(config: TransformerConfig) -> int: @@ -265,6 +265,7 @@ def _checkpointed_forward( context: Tensor, context_mask: Tensor, rotary_pos_emb: Tensor, + attention_bias: Tensor, packed_seq_params: PackedSeqParams, ): """Forward method with activation checkpointing.""" @@ -281,6 +282,7 @@ def custom_forward( context=context, context_mask=context_mask, rotary_pos_emb=rotary_pos_emb, + attention_bias=attention_bias, inference_params=None, packed_seq_params=packed_seq_params, ) @@ -289,6 +291,7 @@ def custom_forward( return custom_forward def checkpoint_handler(forward_func): + """Determines whether to use the `te_checkpoint` or `tensor_parallel.checkpoint`""" if self.config.fp8: return te_checkpoint( forward_func, @@ -365,6 +368,7 @@ def get_cuda_graph_optional_args( context: Tensor, context_mask: Tensor, rotary_pos_emb: Tensor, + attention_bias: Tensor, inference_params: InferenceParams, packed_seq_params: PackedSeqParams, ): @@ -395,6 +399,9 @@ def forward( context: Tensor = None, context_mask: Tensor = None, rotary_pos_emb: Tensor = None, + rotary_pos_cos: Tensor = None, + rotary_pos_sin: Tensor = None, + attention_bias: Tensor = None, inference_params: InferenceParams = None, packed_seq_params: PackedSeqParams = None, ): @@ -412,6 +419,9 @@ def forward( context (Tensor, optional): Context tensor for cross-attention. context_mask (Tensor, optional): Mask for cross-attention context rotary_pos_emb (Tensor, optional): Rotary positional embeddings. + attention_bias (Tensor): Bias tensor for Q * K.T of shape in shape broadcastable + to [b, num_head, sq, skv], e.g. [1, 1, sq, skv]. + Used as an alternative to apply attention mask for TE cuDNN attention. inference_params (InferenceParams, optional): Parameters for inference-time optimizations. packed_seq_params (PackedSeqParams, optional): Parameters for packed sequence @@ -474,7 +484,7 @@ def forward( else: fp8_context = nullcontext() - with rng_context and fp8_context: + with rng_context, fp8_context: # Forward pass. if self.config.recompute_granularity == 'full' and self.training: hidden_states = self._checkpointed_forward( @@ -483,6 +493,7 @@ def forward( context=context, context_mask=context_mask, rotary_pos_emb=rotary_pos_emb, + attention_bias=attention_bias, packed_seq_params=packed_seq_params, ) else: @@ -496,6 +507,9 @@ def forward( context=context, context_mask=context_mask, rotary_pos_emb=rotary_pos_emb, + rotary_pos_cos=rotary_pos_cos, + rotary_pos_sin=rotary_pos_sin, + attention_bias=attention_bias, inference_params=inference_params, packed_seq_params=packed_seq_params, ) @@ -515,6 +529,7 @@ def forward( context, context_mask, rotary_pos_emb, + attention_bias, inference_params, packed_seq_params, ) diff --git a/megatron/core/transformer/transformer_config.py b/megatron/core/transformer/transformer_config.py index a63171686..48ad00cf6 100644 --- a/megatron/core/transformer/transformer_config.py +++ b/megatron/core/transformer/transformer_config.py @@ -1,7 +1,7 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. from dataclasses import dataclass -from typing import Callable, Optional, Tuple +from typing import Callable, List, Optional, Tuple, Union import torch.nn.functional as F @@ -304,6 +304,25 @@ class TransformerConfig(ModelParallelConfig): moe_layer_recompute: bool = False """Memory optimization: checkpointing moe_layer to save actiavtion memory.""" + ################## + # Context Parallel + ################## + cp_comm_type: Union[str, List[str]] = None + """Inter-gpu communication type for context parallelism. + str: all layers share same communication type. + List[str]: each layer has its separate communication type. + cp_comm_type of each layer can be "p2p" or "all_gather" or "a2a" or "a2a+p2p". + "p2p": Exchange KV chunks with P2P communications in ring topology. P2P is async and can be + overlapped with attention compute. + "all_gather": All-gather to get full sequence of KV before attention. The all-gather is not + async, and cannot be overlapped. + "a2a": Like DeepSpeed Ulysses, scatter attention heads across the CP group, and gather to get + full sequence of QKV. + "a2a+p2p": A hierarchical implementation of context parallelism to attention. + It uses A2A communications in low-level CP groups (e.g., via NVLink), + and P2P communications in high-level CP groups (e.g., via IBLink). + """ + #################### # miscellaneous #################### @@ -323,6 +342,9 @@ class TransformerConfig(ModelParallelConfig): config_logger_dir: str = "" """When non-empty, dumps entry-point configs to config_logger_dir""" + flash_decode: bool = False + """ Use the optimized flash decoding kernel during inference. """ + def __post_init__(self): """Python dataclass method that is used to modify attributes after initialization. See https://docs.python.org/3/library/dataclasses.html#post-init-processing for more @@ -477,11 +499,24 @@ def __post_init__(self): "When bias_activation_fusion is True, gated_linear_unit is False, " "and activation function is gelu, add_bias_linear must also be True." ) + if self.activation_func_fp8_input_store: if self.activation_func != F.silu or not self.gated_linear_unit: raise ValueError("Storing activation input in FP8 is supported only for SwiGLU.") - if self.apply_rope_fusion and self.rotary_interleaved: - raise ValueError('rotary_interleaved does not work with apply_rope_fusion.') + + if self.apply_rope_fusion: + if self.rotary_interleaved: + raise ValueError("rotary_interleaved does not work with apply_rope_fusion.") + + from megatron.core.models.common.embeddings.rope_utils import HAVE_APPLY_ROPE_FUSION + + if not HAVE_APPLY_ROPE_FUSION: + raise ValueError( + "apply_rope_fusion is not available. Please install TE >= 1.4 or Apex." + ) + + if self.multi_latent_attention and self.rotary_interleaved: + raise ValueError("rotary_interleaved does not work with multi_latent_attention.") if self.init_method is None: self.init_method = init_method_normal(self.init_method_std) @@ -491,17 +526,13 @@ def __post_init__(self): self.init_method_std, self.num_layers ) - if self.moe_extended_tp: - if self.moe_token_dispatcher_type != 'allgather': - raise ValueError( - "Moe extended TP parallelism only applies to allgather based token dispatcher." - ) - extended_tp_size = self.tensor_model_parallel_size * self.expert_model_parallel_size - if self.ffn_hidden_size % extended_tp_size != 0: - raise ValueError( - f'ffn_hidden_size: {self.ffn_hidden_size} must be divisible by ' - f'extended_tp_size {extended_tp_size}' - ) + if ( + self.moe_token_dispatcher_type == "alltoall_seq" + and self.tensor_model_parallel_size != self.expert_tensor_parallel_size + ): + raise ValueError( + "alltoall_seq dispatcher not support different TP size for MoE and Dense layer." + ) if self.num_moe_experts and self.fp8: # TE version below 1.7.0 will raise Error when handle zeros tokens for expert @@ -511,8 +542,32 @@ def __post_init__(self): f"but your version is {get_te_version()}." ) - if self.moe_grouped_gemm: - raise ValueError("Grouped GEMM of MoE not support fp8 for now.") + if self.moe_grouped_gemm and not is_te_min_version("1.11.0"): + raise ValueError( + "Only transformer-engine>=1.11.0 supports FP8 grouped gemm, " + f"but your version is {get_te_version()}." + ) + + if self.flash_decode and self.fp8: + raise ValueError("FP8 inference is currently not support with flash decoding.") + + if self.moe_token_dispatcher_type in ['allgather', 'alltoall_seq']: + if self.variable_seq_lengths is True: + raise ValueError( + f"Token dispatcher type: {self.moe_token_dispatcher_type} does not support " + f"variable sequence length, please use alltoall dispatcher instead." + ) + + if self.cp_comm_type is not None: + if isinstance(self.cp_comm_type, list): + assert len(self.cp_comm_type) == self.num_layers, ( + f"Length of cp_comm_type ({len(self.cp_comm_type)}) should equal to " + f"the total number of transformer layers ({self.num_layers})!" + ) + else: + assert isinstance( + self.cp_comm_type, str + ), "Unsupported communication type for context parallelism!" @dataclass diff --git a/megatron/core/transformer/transformer_layer.py b/megatron/core/transformer/transformer_layer.py index 7f5f14944..cf0bcb951 100644 --- a/megatron/core/transformer/transformer_layer.py +++ b/megatron/core/transformer/transformer_layer.py @@ -112,9 +112,19 @@ def __init__( eps=self.config.layernorm_epsilon, ) + attention_optional_kwargs = {} + if config.cp_comm_type is not None: + if isinstance(config.cp_comm_type, list): + attention_optional_kwargs["cp_comm_type"] = config.cp_comm_type[self.layer_number] + else: + attention_optional_kwargs["cp_comm_type"] = config.cp_comm_type + # [Module 2: SelfAttention] self.self_attention = build_module( - submodules.self_attention, config=self.config, layer_number=layer_number + submodules.self_attention, + config=self.config, + layer_number=layer_number, + **attention_optional_kwargs, ) # [Module 3: BiasDropoutFusion] @@ -130,7 +140,10 @@ def __init__( # [Module 5: CrossAttention] self.cross_attention = build_module( - submodules.cross_attention, config=self.config, layer_number=layer_number + submodules.cross_attention, + config=self.config, + layer_number=layer_number, + **attention_optional_kwargs, ) # [Module 6: BiasDropoutFusion] @@ -165,6 +178,10 @@ def __init__( def _get_layer_offset(self): """Get the index number of this layer, given the level of pipelining.""" pipeline_rank = parallel_state.get_pipeline_model_parallel_rank() + if not parallel_state.is_inside_encoder(): + pp_decoder_start = parallel_state.get_pipeline_model_parallel_decoder_start() + if pp_decoder_start is not None: + pipeline_rank = pipeline_rank - pp_decoder_start num_layers_per_pipeline_rank = ( self.config.num_layers // self.config.pipeline_model_parallel_size @@ -181,13 +198,13 @@ def _get_layer_offset(self): else: # Each stage gets a contiguous set of layers. - if parallel_state.get_pipeline_model_parallel_world_size() > 1: + if self.config.pipeline_model_parallel_size > 1: if ( self.config.first_pipeline_num_layers is not None or self.config.last_pipeline_num_layers is not None ): # Calculate number of pipelines for distributing layers - middle_pipeline_stages = parallel_state.get_pipeline_model_parallel_world_size() + middle_pipeline_stages = self.config.pipeline_model_parallel_size middle_pipeline_stages -= sum( [ 1 if x is not None else 0 @@ -206,7 +223,7 @@ def _get_layer_offset(self): ) last_pipeline_offset = ( 0 - if self.config.first_pipeline_num_layers is None + if self.config.last_pipeline_num_layers is None else self.config.last_pipeline_num_layers ) @@ -245,6 +262,9 @@ def forward( context=None, context_mask=None, rotary_pos_emb=None, + rotary_pos_cos=None, + rotary_pos_sin=None, + attention_bias=None, inference_params=None, packed_seq_params=None, ): @@ -261,6 +281,7 @@ def forward( context (Tensor, optional): Context tensor for cross-attention. context_mask (Tensor, optional): Mask tensor for cross-attention. rotary_pos_emb (Tensor, optional): Rotary positional embeddings. + attention_bias (Tensor, optional): Bias tensor for Q * K.T. inference_params (object, optional): Parameters for inference-time optimizations. packed_seq_params (object, optional): Parameters for packed sequence processing. @@ -283,6 +304,9 @@ def forward( attention_mask=attention_mask, inference_params=inference_params, rotary_pos_emb=rotary_pos_emb, + rotary_pos_cos=rotary_pos_cos, + rotary_pos_sin=rotary_pos_sin, + attention_bias=attention_bias, packed_seq_params=packed_seq_params, ) diff --git a/megatron/core/utils.py b/megatron/core/utils.py index f3910926a..6b46f292d 100644 --- a/megatron/core/utils.py +++ b/megatron/core/utils.py @@ -22,15 +22,44 @@ import torch from packaging.version import Version as PkgVersion +try: + from torch.distributed._tensor import DTensor + + HAVE_DTENSOR = True +except ImportError: + HAVE_DTENSOR = False + from megatron.core import parallel_state from megatron.core.dist_checkpointing.mapping import ShardedTensor logger = logging.getLogger(__name__) +try: + _torch_version = PkgVersion(torch.__version__) +except: + # This is a WAR for building docs, where torch is not actually imported + _torch_version = PkgVersion("0.0.0") _te_version = None +def get_torch_version(): + """Get pytorch version from __version__; if not available use pip's. Use caching.""" + + def get_torch_version_str(): + import torch + + if hasattr(torch, '__version__'): + return str(torch.__version__) + else: + return version("torch") + + global _torch_version + if _torch_version is None: + _torch_version = PkgVersion(get_torch_version_str()) + return _torch_version + + def get_te_version(): """Get TE version from __version__; if not available use pip's. Use caching.""" @@ -55,6 +84,20 @@ def is_te_min_version(version, check_equality=True): return get_te_version() > PkgVersion(version) +def get_torch_version(): + """Get torch version from __version__.""" + + global _torch_version + return _torch_version + + +def is_torch_min_version(version, check_equality=True): + """Check if minimum version of `torch` is installed.""" + if check_equality: + return get_torch_version() >= PkgVersion(version) + return get_torch_version() > PkgVersion(version) + + def ensure_divisibility(numerator, denominator): """Ensure that numerator is divisible by the denominator.""" assert numerator % denominator == 0, "{} is not divisible by {}".format(numerator, denominator) @@ -204,8 +247,8 @@ def assert_viewless_tensor(tensor, extra_msg=None): assert tensor._base is None, ( "Ensure tensor._base is None before setting tensor.data or storing " "tensor to memory buffer. Otherwise, a memory leak will occur (and " - "likely accumulate over iterations). %s" - ) % extra_msg + f"likely accumulate over iterations). {extra_msg}" + ) return tensor @@ -301,8 +344,10 @@ def check_param_hashes_across_dp_replicas( """ # Compute per-parameter hashes on this rank. - params = [] - local_param_hashes = [] + # Keep track of expert and non-expert parameters separately since they need to be + # all-gathered across different sets of ranks. + non_expert_params, expert_params = [], [] + local_non_expert_param_hashes, local_expert_param_hashes = [], [] for model_chunk_id, model_chunk in enumerate(model): for param_name, param in model_chunk.named_parameters(): param_hash = torch.frombuffer( @@ -311,34 +356,54 @@ def check_param_hashes_across_dp_replicas( ), dtype=torch.uint8, ) - params.append((model_chunk_id, param_name, param)) - local_param_hashes.append(param_hash) - local_param_hashes = torch.stack(local_param_hashes) - - # Collect per-parameter hashes across all ranks in DP group. - all_param_hashes = [ - torch.zeros_like(local_param_hashes) - for _ in range(parallel_state.get_data_parallel_world_size()) - ] - torch.distributed.all_gather( - all_param_hashes, local_param_hashes, group=parallel_state.get_data_parallel_group_gloo() - ) + if getattr(param, 'allreduce', True): + non_expert_params.append((model_chunk_id, param_name, param)) + local_non_expert_param_hashes.append(param_hash) + else: + expert_params.append((model_chunk_id, param_name, param)) + local_expert_param_hashes.append(param_hash) + + # Use data-modulo-expert parallel group to all-gather expert param hashes, regular + # data-parallel group for non-expert param hashes. + all_param_hashes_match = True + for params, local_param_hashes, all_gather_group in zip( + [non_expert_params, expert_params], + [local_non_expert_param_hashes, local_expert_param_hashes], + [ + parallel_state.get_data_parallel_group_gloo(), + parallel_state.get_expert_data_parallel_group_gloo(), + ], + ): + # Collect per-parameter hashes across all ranks in group. + assert len(params) == len(local_param_hashes) + if len(params) == 0: + continue + local_param_hashes = torch.stack(local_param_hashes) + all_param_hashes = [ + torch.zeros_like(local_param_hashes) + for _ in range(torch.distributed.get_world_size(all_gather_group)) + ] + torch.distributed.all_gather(all_param_hashes, local_param_hashes, group=all_gather_group) + + # Make sure local per-parameter hash matches DP rank 0. + param_hashes_match = torch.equal(local_param_hashes, all_param_hashes[0]) + if not param_hashes_match: + for i, (model_chunk_id, param_name, param) in enumerate(params): + if not torch.equal(local_param_hashes[i], all_param_hashes[0][i]): + rank = torch.distributed.get_rank() + logger.info( + f"[Rank {rank}] Hash not matching for {param_name} in model chunk" + f"{model_chunk_id}" + ) + if cross_check: + # Make sure all ranks have the same hash. + all_param_hashes_match &= all( + map(lambda x: torch.equal(local_param_hashes, x), all_param_hashes) + ) + else: + all_param_hashes_match &= param_hashes_match - # Make sure local per-parameter hash matches DP rank 0. - param_hashes_match = torch.equal(local_param_hashes, all_param_hashes[0]) - if not param_hashes_match: - for i, (model_chunk_id, param_name, param) in enumerate(params): - if not torch.equal(local_param_hashes[i], all_param_hashes[0][i]): - rank = torch.distributed.get_rank() - logger.info( - f"[Rank {rank}] Hash not matching for {param_name} in model chunk" - f"{model_chunk_id}" - ) - if cross_check: - # Make sure all ranks have the same hash. - return all(map(lambda x: torch.equal(local_param_hashes, x), all_param_hashes)) - else: - return param_hashes_match + return all_param_hashes_match def make_tp_sharded_tensor_for_checkpoint( @@ -349,21 +414,39 @@ def make_tp_sharded_tensor_for_checkpoint( Optionally, can provide offsets which prepend new dimensions to the tensor. """ - prepend_axis_num = len(prepend_offsets) + new_offsets = [] + tp_rank = parallel_state.get_tensor_model_parallel_rank() + dp_rank = parallel_state.get_data_parallel_rank(with_context_parallel=True) + tp_size = parallel_state.get_tensor_model_parallel_world_size() + dp_size = parallel_state.get_data_parallel_world_size(with_context_parallel=True) + dp_replica_id = parallel_state.get_data_parallel_rank(with_context_parallel=True) + + new_offsets.append((tp_axis + prepend_axis_num, tp_rank, tp_size)) + + if HAVE_DTENSOR and isinstance(tensor, DTensor): + # TP + FSDP2 sharding + dp_replica_id = 0 + tensor = tensor._local_tensor + + if tp_axis == 0: + # both FSDP2 and TP shards axis 0 + # default MCore uses tp-cp-ep-dp-pp + # FSDP2 is compatibile with TP, CP + new_offsets[0] = (prepend_axis_num, tp_rank * dp_size + dp_rank, tp_size * dp_size) + else: + # FSDP2 shards axis 0 and TP shards some other axis + new_offsets.append((prepend_axis_num, dp_rank, dp_size)) + if replica_id is None: - replica_id = (0, 0, parallel_state.get_data_parallel_rank(with_context_parallel=True)) + replica_id = (0, 0, dp_replica_id) return ShardedTensor.from_rank_offsets( key, tensor, *prepend_offsets, - ( - tp_axis + prepend_axis_num, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_tensor_model_parallel_world_size(), - ), + *new_offsets, replica_id=replica_id, prepend_axis_num=prepend_axis_num, **kwargs, @@ -378,23 +461,48 @@ def make_sharded_tensor_for_checkpoint(tensor, key, prepend_offsets=(), replica_ prepend_axis_num = len(prepend_offsets) + new_offsets = [] + dp_rank = parallel_state.get_data_parallel_rank(with_context_parallel=True) + dp_size = parallel_state.get_data_parallel_world_size(with_context_parallel=True) + dp_replica_id = parallel_state.get_data_parallel_rank(with_context_parallel=True) + + if HAVE_DTENSOR and isinstance(tensor, DTensor): + # FSDP2 sharding + dp_replica_id = 0 + tensor = tensor._local_tensor + new_offsets.append((prepend_axis_num, dp_rank, dp_size)) + if replica_id is None: - replica_id = ( - 0, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_data_parallel_rank(with_context_parallel=True), - ) + replica_id = (0, parallel_state.get_tensor_model_parallel_rank(), dp_replica_id) return ShardedTensor.from_rank_offsets( key, tensor, *prepend_offsets, + *new_offsets, replica_id=replica_id, prepend_axis_num=prepend_axis_num, **kwargs, ) +def to_local_if_dtensor(tensor: Union[torch.Tensor, "DTensor"]) -> torch.Tensor: + """Returns the local shard of the given tensor if it is a DTensor.""" + with torch.no_grad(): + return tensor.to_local() if HAVE_DTENSOR and isinstance(tensor, DTensor) else tensor + + +def get_data_parallel_group_if_dtensor( + tensor: Union[torch.Tensor, "DTensor"], data_parallel_group: "ProcessGroup" = None +) -> Optional["ProcessGroup"]: + """Gets the data parallel group of the given tensor if it is a DTensor.""" + if HAVE_DTENSOR and isinstance(tensor, DTensor): + current_group = tensor.device_mesh.get_group() + assert data_parallel_group is None or current_group == data_parallel_group + return current_group + return None + + def prepare_input_tensors_for_wgrad_compute(grad_output, all_gathered_input): """Ensure grad_output is stored in a contiguous buffer.""" # Doing gather + slicing during the NeMo forward pass can make this tensor @@ -414,6 +522,12 @@ def prepare_input_tensors_for_wgrad_compute(grad_output, all_gathered_input): return grad_output, all_gathered_input +if is_torch_min_version("1.13.0"): + dist_all_gather_func = torch.distributed.all_gather_into_tensor +else: + dist_all_gather_func = torch.distributed._all_gather_base + + def drain_embedding_wgrad_compute(config, embedding_activation_buffer, grad_output_buffer, weight): """Helper for performing embedding wgrad GEMM's during the pipeline drain phase, pipelines the AllGather and GEMM's. @@ -442,7 +556,7 @@ def drain_embedding_wgrad_compute(config, embedding_activation_buffer, grad_outp all_gathered_input = [None, None] if config.sequence_parallel: all_gather_buffer = get_global_memory_buffer().get_tensor(dim_size, input.dtype, "mpu_0") - handle = torch.distributed._all_gather_base( + handle = dist_all_gather_func( all_gather_buffer, input, group=get_tensor_model_parallel_group(), async_op=False ) @@ -480,7 +594,7 @@ def wgrad_compute(all_gathered_input, grad_output, weight): if config.sequence_parallel: name = "mpu_" + str((i + 1) % 2) all_gather_buffer = get_global_memory_buffer().get_tensor(dim_size, input.dtype, name) - handle = torch.distributed._all_gather_base( + handle = dist_all_gather_func( all_gather_buffer, input, group=get_tensor_model_parallel_group(), async_op=True ) @@ -523,14 +637,8 @@ def local_multi_tensor_l2_norm(chunk_size, noop_flag, tensor_lists, per_tensor, # works as a drop-in replacement for amp_C.multi_tensor_scale def local_multi_tensor_scale(chunk_size, noop_flag, tensor_lists, scale): """Works as a drop-in replacement for amp_C.multi_tensor_scale.""" - inputs, targets = tensor_lists[0], tensor_lists[1] - if inputs == targets: - for i in range(len(targets)): - # for parity with apex implementation - targets[i] *= scale - else: - for i in range(len(targets)): - targets[i] = inputs[i] * scale + for src, dst in zip(tensor_lists[0], tensor_lists[1]): + dst.copy_(src * scale) class _ValueWithRank: @@ -752,7 +860,7 @@ def configure( amp (float, optional): Set to 3.0 if we only use timers in fwd pass. Defaults to 3.0. port (int, optional): Control port, useful only for rank-0. Defaults to 65535. - prefill (int, optional): Howmany Events to pre-populate. Defaults to 1024. + prefill (int, optional): How many Events to pre-populate. Defaults to 1024. enabled (bool, optional): Whether or not collection is enabled on startup. Defaults to False. """ @@ -1003,7 +1111,7 @@ def _check_toggle(self) -> None: indirectly from report() is the only way to activate the change that is made via rank-0 """ - # If no change just commnunicate the current + # If no change just communicate the current off = self._off if self.rank == 0 and self.toggle: off = not self._off @@ -1038,7 +1146,7 @@ def _handler(self) -> None: if self.rank == 0: state = "OFF" if self._off else "ON" logger.info( - f"Controller ready to recv " f"commands on port {self.port}. Current state {state}" + f"Controller ready to recv commands on port {self.port}. Current state {state}" ) while True and self.sock is not None: try: @@ -1209,7 +1317,7 @@ def enabled(self) -> bool: @property def configured(self) -> bool: - """Can be called to check if the the instance is already configured + """Can be called to check if the instance is already configured Returns: bool: returns True if configure was called and was a success, else False diff --git a/megatron/inference/algos/__init__.py b/megatron/inference/algos/__init__.py new file mode 100644 index 000000000..f8011007a --- /dev/null +++ b/megatron/inference/algos/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. diff --git a/megatron/inference/algos/distillation.py b/megatron/inference/algos/distillation.py new file mode 100644 index 000000000..540575033 --- /dev/null +++ b/megatron/inference/algos/distillation.py @@ -0,0 +1,454 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Distillation loss function(s).""" + +import logging +import types +from abc import ABCMeta +from typing import Any, Dict, Optional, Tuple + +import modelopt.torch.distill as mtd +import torch +import torch.nn as nn +import torch.nn.functional as F +import yaml +from torch import Tensor +from torch.nn.modules.loss import _Loss + +from megatron.core.dist_checkpointing.mapping import ShardedStateDict +from megatron.core.parallel_state import get_tensor_model_parallel_group +from megatron.core.tensor_parallel import gather_from_sequence_parallel_region +from megatron.core.transformer import TransformerConfig +from megatron.training import get_args, print_rank_0 + +logger = logging.getLogger(__name__) + + +def load_distillation_config( + config_path: Optional[str], student_cfg: TransformerConfig, teacher_cfg: TransformerConfig +) -> Dict[str, Any]: + """Read the distillation yaml config file specified by ``args.export_kd_cfg``. + + Args: + config_path: Path to user-defined distillation settings yaml file. + If `None`, uses default logits-only distillation mode for GPT models. + student_cfg: Model config for student model. + teacher_cfg: Model config for teacher model. + + WARNING: Assumes intermediate hidden sizes are always that found in the model config's ``hidden_size`` attribute. + """ + if not config_path: + logger.warning("Distillation config not provided. Using default.") + cfg = { + "logit_layers": ["output_layer", "output_layer"], + "intermediate_layer_pairs": [], + "skip_lm_loss": True, + "kd_loss_scale": 1.0, + } + else: + with open(config_path) as f: + cfg = yaml.safe_load(f) + + intermediate_pairs = cfg["intermediate_layer_pairs"] + logit_pair = cfg["logit_layers"] + skip_lm_loss = cfg["skip_lm_loss"] + loss_scale = cfg["kd_loss_scale"] + + hidden_size_student = student_cfg.hidden_size + hidden_size_teacher = teacher_cfg.hidden_size + + criterion = {tuple(logit_pair): LogitsKLLoss()} + for layer_names in intermediate_pairs: + print_rank_0( + "Distillation: Adding intermediate loss between" + f" `{layer_names[0]}` of student (hidden size {hidden_size_student}) and" + f" `{layer_names[1]}` of teacher (hidden size {hidden_size_teacher})." + ) + criterion[tuple(layer_names)] = HiddenStateCosineLoss( + hidden_size_student, hidden_size_teacher + ) + + loss_balancer = LogitsAndIntermediatesLossBalancer( + kd_loss_scale=loss_scale, skip_original_loss=skip_lm_loss + ) + + cfg["criterion"] = criterion + cfg["loss_balancer"] = loss_balancer + + return cfg + + +######################################################## + + +class BaseLoss(_Loss, metaclass=ABCMeta): + """Abstract base class for Megatron distillation losses.""" + + def __init__( + self, hidden_size_student: Optional[int] = None, hidden_size_teacher: Optional[int] = None + ): + """ + Constructor. + + Args: + hidden_size_student: Size of the student's hidden dimension. + hidden_size_teacher: Size of the teacher's hidden dimension. + """ + super().__init__() + self._projection = ProjectionLayer(hidden_size_student, hidden_size_teacher) + args = get_args() + self._tensor_parallel = args.tensor_model_parallel_size > 1 + self._sequence_parallel = args.sequence_parallel + + def pre_forward(self, predictions: Tensor, targets: Tensor) -> Tuple[Tensor, Tensor]: + """Performs projection of student tensor to match teacher's size if necessary.""" + if isinstance(predictions, tuple): + # `ColumnParallelLinear` returns bias too + predictions, targets = predictions[0], targets[0] + + predictions = self._projection(predictions) + targets = targets.detach() + + return predictions, targets + + def post_forward(self, loss: Tensor, tp_reduce: bool = False) -> Tensor: + """Reshapes tensor from [s, b] to [b, s] for upcoming loss masking.""" + loss = loss.transpose(0, 1).contiguous() + return (loss, tp_reduce) + + +class MSELoss(BaseLoss): + """Calculates Mean Squared Error loss between two tensors without reducing the sequence dim.""" + + def forward(self, predictions: Tensor, targets: Tensor) -> Tensor: + """ + Forward function. + + Args: + predictions: Student model tensors (size [s, b, h]) + targets: Teacher model tensors (size [s, b, h]) + + Returns: + MSE loss of tensors (size [b, s]) + """ + predictions, targets = self.pre_forward(predictions, targets) + + # TP irrelevant since MSE loss gradients are per-input element. + loss = F.mse_loss(predictions, targets, reduction="none") + loss = loss.sum(dim=-1) + + return self.post_forward(loss) + + +class HiddenStateCosineLoss(BaseLoss): + """ + Calculates Cosine loss between two tensors without reducing the sequence dim. + + The tensors are assumed to be intermediate activations, so extra restrictions are in place. + """ + + def __init__( + self, hidden_size_student: Optional[int] = None, hidden_size_teacher: Optional[int] = None + ): + """ + Constructor. + + Args: + hidden_size_student: Size of the student's hidden dimension. + hidden_size_teacher: Size of the teacher's hidden dimension. + """ + super().__init__(hidden_size_student, hidden_size_teacher) + + if self._tensor_parallel and not self._sequence_parallel: + logger.warning( + "``HiddenStateCosineLoss`` only works with tensors with full hidden dim. Ensure the " + "tensor inputs meet this requirement or use `--sequence_parallel` if tensor parallel is enabled." + ) + if hidden_size_student is None or hidden_size_teacher is None: + logger.warning( + "Hidden sizes of teacher and student not provided. This assumes " + "they are the same shape, which may be a mistake." + ) + + def forward(self, predictions: Tensor, targets: Tensor) -> Tensor: + """ + Forward function. + + Args: + predictions: Student model tensors (size [s, b, h]) + targets: Teacher model tensors (size [s, b, h]) + + Returns: + Cosine loss of tensors (size [b, s]) + """ + predictions, targets = self.pre_forward(predictions, targets) + + loss = F.cosine_embedding_loss( + predictions.view(-1, predictions.size(-1)), + targets.view(-1, targets.size(-1)), + targets.new_ones(1), + reduction="none", + ) + loss = loss.view(*predictions.shape[:2]) + + if self._sequence_parallel: + # Can efficiently gather size [s, b] tensor now for loss-masking purposes. + # TODO(aanoosheh) Reconsider for memory savings by splitting loss mask instead. + loss = gather_from_sequence_parallel_region(loss) + + return self.post_forward(loss) + + +class LogitsKLLoss(BaseLoss): + """Calculates KL-Divergence loss between two logits tensors without reducing the sequence dim.""" + + def __init__(self, temperature: float = 1.0, reverse: bool = False): + """ + Constructor. + + Args: + temperature: Divide tensors by this value prior to calculating loss. + reverse: Whether to reverse the loss as KLD(teacher, student) instead of KLD(student, teacher) + """ + super().__init__() + self._temperature = temperature + self._reverse = reverse + + def forward(self, predictions: Tensor, targets: Tensor) -> Tensor: + """ + Forward function. + + Args: + predictions: Student model tensors (size [s, b, h]) + targets: Teacher model tensors (size [s, b, h]) + + Returns: + KLD loss of tensors (size [b, s]) + """ + predictions, targets = self.pre_forward(predictions, targets) + + # Division by temp should happen prior to finding max for both student and teacher. + # Currently we don't use temperature in any of ours runs (temp=1.0) + output_teacher = targets.float() / self._temperature + output_student = predictions.float() / self._temperature + + # Compute local softmax, and the reweight to compute global softmax. + if self._tensor_parallel: + + # Maximum value along vocab dimension across all GPUs. + teacher_logits_max, _ = torch.max(output_teacher, dim=-1) + torch.distributed.all_reduce( + teacher_logits_max, + op=torch.distributed.ReduceOp.MAX, + group=get_tensor_model_parallel_group(), + ) + output_teacher = output_teacher - teacher_logits_max.unsqueeze(dim=-1) + + denom_teacher = torch.sum(torch.exp(output_teacher), dim=-1) + # We can't use `gather_from_tensor_model_parallel_region` here since it discards + # gradients from other ranks - we need to all_reduce the gradients as well. + denom_teacher = all_reduce_autograd( + denom_teacher, group=get_tensor_model_parallel_group() + ) + + # Maximum value along vocab dimension across all GPUs. + student_logits_max, _ = torch.max(output_student, dim=-1) + torch.distributed.all_reduce( + student_logits_max, + op=torch.distributed.ReduceOp.MAX, + group=get_tensor_model_parallel_group(), + ) + output_student = output_student - student_logits_max.unsqueeze(dim=-1).detach() + + denom_student = torch.sum(torch.exp(output_student), dim=-1) + denom_student = all_reduce_autograd( + denom_student, group=get_tensor_model_parallel_group() + ) + + slen, bsz, sharded_vocab_size = output_student.shape + student_log_prob = output_student - torch.log(denom_student).view(slen, bsz, 1).expand( + slen, bsz, sharded_vocab_size + ) + teacher_log_prob = output_teacher - torch.log(denom_teacher).view(slen, bsz, 1).expand( + slen, bsz, sharded_vocab_size + ) + + if self._reverse: + loss = torch.sum( + F.kl_div(teacher_log_prob, student_log_prob, reduction="none", log_target=True), + dim=-1, + ) + else: + loss = torch.sum( + F.kl_div(student_log_prob, teacher_log_prob, reduction="none", log_target=True), + dim=-1, + ) + + else: + if self._reverse: + loss = torch.sum( + F.kl_div( + F.log_softmax(output_teacher, dim=-1), + F.softmax(output_student, dim=-1), + reduction="none", + ), + dim=-1, + ) + else: + loss = torch.sum( + F.kl_div( + F.log_softmax(output_student, dim=-1), + F.softmax(output_teacher, dim=-1), + reduction="none", + ), + dim=-1, + ) + + return self.post_forward(loss, tp_reduce=True) + + +######################################################## + + +class LogitsAndIntermediatesLossBalancer(mtd.DistillationLossBalancer): + """ + LossBalancer implementation for Logit and Intermediate losses. + + Dynamically weighs distillation and original losses to balance during training. + """ + + def __init__(self, kd_loss_scale: float = 1.0, skip_original_loss: bool = False): + """Constructor. + + Args: + kd_loss_scale: Multiply distillation losses by this before weighing. + (Not used when `skip_original_loss` is True.) + skip_original_loss: Used to signal whether the original loss should be used, regardless + of whether it was passed into ``mtd.DistillationModel.compute_kd_loss()`` or not. + """ + super().__init__() + self._kd_loss_scale = kd_loss_scale + self._skip_original_loss = skip_original_loss + + def forward(self, loss_dict: Dict[str, Tensor]) -> Tensor: + """Forward function. + + Args: + loss_dict: All individual scalar losses, passed in during ``mtd.DistillationModel.compute_kd_loss()`` + + Returns: + Aggregate total scalar loss. + """ + original_loss = loss_dict.pop(mtd.loss_balancers.STUDENT_LOSS_KEY) + for _key, _loss in loss_dict.items(): + if _key.startswith(LogitsKLLoss.__name__): + logits_loss = _loss # should only be one + intermediate_loss = sum(loss_dict.values()) + + if intermediate_loss > 0: + dynamic_scale = logits_loss.item() / intermediate_loss.item() + intermediate_loss *= dynamic_scale + kd_loss_scale = self._kd_loss_scale / 2.0 + else: + kd_loss_scale = self._kd_loss_scale + + if self._skip_original_loss: + kd_loss = logits_loss + intermediate_loss + total_loss = kd_loss + else: + kd_loss = (logits_loss + intermediate_loss) * kd_loss_scale + dynamic_scale = original_loss.item() / kd_loss.item() + total_loss = original_loss + kd_loss * dynamic_scale + + return total_loss + + +######################################################## + + +class ProjectionLayer(nn.Module): + """Module to project student layer activations to teacher's size.""" + + def __init__(self, hidden_size_student: int, hidden_size_teacher: int): + """ + Constructor. + + Args: + hidden_size_student: Size of the student's hidden dimension. + hidden_size_teacher: Size of the teacher's hidden dimension. + """ + super().__init__() + if hidden_size_student == hidden_size_teacher: + self._fit = nn.Identity() + else: + self._fit = nn.Linear(hidden_size_student, hidden_size_teacher) + self.apply(self._init_weights) + setattr(self._fit.weight, 'sequence_parallel', get_args().sequence_parallel) + setattr(self._fit.bias, 'sequence_parallel', get_args().sequence_parallel) + + def forward(self, student_tensor: Tensor): + """ + Forward function. + + Args: + student_tensor: Tensor to be fit to teacher size. + """ + return self._fit(student_tensor) + + def _init_weights(self, module): + """Initialize the weights.""" + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=0.01) + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + +class _AllReduce(torch.autograd.Function): + """Implementation from old PyTorch `torch.distributed.nn.parallel`.""" + + @staticmethod + def forward(ctx, op, group, tensor): + ctx.group, ctx.op = group, op + tensor = tensor.clone() + torch.distributed.all_reduce(tensor, op=op, group=group) + return tensor + + @staticmethod + def backward(ctx, grad_output): + return (None, None, _AllReduce.apply(ctx.op, ctx.group, grad_output)) + + +def all_reduce_autograd( + tensor, op=torch.distributed.ReduceOp.SUM, group=torch.distributed.group.WORLD +): + return _AllReduce.apply(op, group, tensor) + + +######################################################## + + +def adjust_distillation_model_for_mcore(model: mtd.DistillationModel, distill_cfg: Dict[str, Any]): + """Extra modifcations to ``mtd.DistillationModel`` requried for Megatron-Core.""" + + # HACK: Hide teacher during `sharded_state_dict` method. + def _sharded_state_dict(self, *args, **kwargs) -> ShardedStateDict: + with self.hide_teacher_model(): + return self._sharded_state_dict(*args, **kwargs) + + model._sharded_state_dict = model.sharded_state_dict + model.sharded_state_dict = types.MethodType(_sharded_state_dict, model) + + # HACK: Skip `lm_loss` bypassing it when training if not needed for backprop. + def _compute_language_model_loss(self, labels, logits) -> Tensor: + if self.training: + return torch.zeros_like(labels) + return self._compute_language_model_loss(labels, logits) + + if distill_cfg["skip_lm_loss"]: + model._compute_language_model_loss = model.compute_language_model_loss + model.compute_language_model_loss = types.MethodType(_compute_language_model_loss, model) diff --git a/megatron/inference/arguments.py b/megatron/inference/arguments.py index 7fcd7a7dc..6c4618c0a 100644 --- a/megatron/inference/arguments.py +++ b/megatron/inference/arguments.py @@ -22,5 +22,21 @@ def add_modelopt_args(parser): choices=["int8", "int8_sq", "fp8", "int4_awq", "w4a8_awq", "int4", "None"], help="Specify a quantization config from the supported choices.", ) + group.add_argument( + '--export-kd-cfg', + type=str, + default=None, + help='Path to distillation configuration yaml file.', + ) + group.add_argument( + '--export-kd-teacher-load', + type=str, + help='Path to checkpoint to load as distillation teacher.', + ) + group.add_argument( + '--export-kd-finalize', + action="store_true", + help='Export original student class back from a loaded distillation model.', + ) return parser diff --git a/megatron/inference/checkpointing.py b/megatron/inference/checkpointing.py index f8d3e2dd5..4d9ae4fb1 100644 --- a/megatron/inference/checkpointing.py +++ b/megatron/inference/checkpointing.py @@ -2,7 +2,9 @@ import os from pathlib import Path -from typing import Optional, Dict +from typing import Dict, Optional + +import torch.nn as nn from megatron.core import dist_checkpointing from megatron.training import get_args @@ -18,8 +20,8 @@ raise ImportError("Required `\"nvidia-modelopt[torch]\"` is not installed!") from e -def load_modelopt_state(load_dir: Optional[str] = None) -> Dict: - """Loading modelopt_state without a model. +def load_modelopt_state(load_dir: Optional[str] = None, model: Optional[nn.Module] = None) -> Dict: + """Loading modelopt_state without loading the model. If --use-dist-ckpt, we try to load from the sharded modelopt_state. This will not load the model state_dict. Otherwise, if the checkpoint is not sharded, we load the base checkpoint (that @@ -27,6 +29,7 @@ def load_modelopt_state(load_dir: Optional[str] = None) -> Dict: Args: load_dir: optionally provide a different loading path + model: required when loading a sharded checkpoint """ args = get_args() @@ -34,6 +37,8 @@ def load_modelopt_state(load_dir: Optional[str] = None) -> Dict: load_dir = args.load if args.use_dist_ckpt: + assert model is not None, "`model` argument required when `args.use_dist_ckpt is True`" + # Read the tracker file and set the iteration. tracker_filename = os.path.join(load_dir, 'latest_checkpointed_iteration.txt') # If no tracker file, assuming that it is a .nemo checkpoint. @@ -52,7 +57,8 @@ def load_modelopt_state(load_dir: Optional[str] = None) -> Dict: print_rank_0("Loading sharded modelopt_state ({})".format(modelopt_state_dir)) modelopt_state = restore_modelopt_state_metadata( dist_checkpointing.load( - get_sharded_modelopt_state(args.num_layers), modelopt_state_dir, + get_sharded_modelopt_state(num_layers=args.num_layers, model=model), + modelopt_state_dir, ) ) return modelopt_state diff --git a/megatron/inference/docs/distillation.md b/megatron/inference/docs/distillation.md new file mode 100644 index 000000000..2e2a9c703 --- /dev/null +++ b/megatron/inference/docs/distillation.md @@ -0,0 +1,91 @@ +# Megatron-LM ModelOpt Distillation Integration + +## Table of Contents + +[[_TOC_]] + +## How To + +### Prerequisites + +In order to perform soft-label Knowledge Distillation between two models on a specific dataset, +we take a larger teacher model which has already been fully trained and use its logits as +labels for a smaller student model. + +We require the following pieces of data: +* Teacher model weights +* Student model weights (unless starting from scratch) +* NeMo-format config file for teacher model +* Distillation run config file +* Tokenizer +* Dataset + +It also requires the installation of the [NVIDIA Model Optimizer library](https://github.com/NVIDIA/TensorRT-Model-Optimizer) (minimum version 0.15) + +### Teacher checkpoint format + +We enforce the use of a config yaml in [NeMo](https://github.com/NVIDIA/NeMo) checkpoint-format style to define the arguments to the teacher model. +The normal command-line arguments go toward constructing the student, thus the values in this file +override the student arguments before being handed to the teacher constructor. This file must be +named `model_config.yaml` and be placed in the root of the teacher model checkpoint folder. +Unlike NeMo-generated checkpoints, Megatron-LM checkpoints do not contain these files by default and must be manually created. + +> NOTE: Not all keys in the NEMO-style yaml correspond 1:1 to the argument names for Megatron-LM. These +are converted in `megatron/inference/gpt/model_provider.py`. + +### Distillation config format + +Configuring the distillation run is done via a separate YAML file with the following fields: + +```yaml +logit_layers: ["output_layer", "output_layer"] +intermediate_layer_pairs: + - ["decoder.layers.0.input_layernorm", "decoder.layers.0.input_layernorm"] + - ["decoder.final_layernorm", "decoder.layers.30.input_layernorm"] +skip_lm_loss: true +kd_loss_scale: 10.0 +``` + +* `logit_layers` defines the names of the student and teacher submodules, respectively, whose outputs are the logits. +* `intermediate_layer_pairs` defines the potentially multiple – or zero – pairs of intermediate activation layers to also perform loss on. +* `skip_lm_loss` decides whether or not to compute and combine the original training LM loss with the KD loss +* `kd_loss_scale` will scale the KD loss before adding it to the LM loss, if `skip_lm_loss` is `True`. + +### Training + +Distillation is triggered by calling `megatron/inference/pretrain_gpt_modelopt.py` while the `--kd-teacher-load` argument is not empty. + +Use the regular arguments you would for `pretrain_gpt.py` in addition to the following: + +```bash +--kd-teacher-load +--kd-distill-cfg +--export-te-mcore-model +``` + +## Distillation API and design + +Knowledge Distillation is done via the [NVIDIA Model Optimizer library](https://github.com/NVIDIA/TensorRT-Model-Optimizer). + +The model creation step wraps the base model as the student in a +`modelopt.torch.distill.DistillationModel` wrapper which also contains the teacher model. + +Model Optimizer modifies the model using the loss criterion present in the distillation config yaml file, which +defines a loss function between two module attribute names of the teacher and student model, respectively. + +Default loss function used between logits is a KL-Divergence Loss and loss used among intermediate tensors is Cosine-Similarity, +both defined in `megatron/inference/algos/distillation.py`. + +## Restrictions + +* Pipeline Parallel is currently unsupported for Distillation. + +* Only Megatron-Core (not legacy Megatron-LM) is supported for Distillation. + +## Known Issues + +* An unknown memory allocation (a few megabytes per microbatch) takes place when the model is converted to a +`modelopt.torch.distill.DistillationModel`. If `--manual-gc` is enabled, it can easily lead to an OOM after some iterations. + +* A CUDA kernel issue is occurring where student's forward latency is severly prolonged compared to running student forward +without a teacher model. This means the total time per iteration may be up to 40% longer than ideally expected. diff --git a/megatron/inference/endpoints/common.py b/megatron/inference/endpoints/common.py new file mode 100644 index 000000000..e430cc8f0 --- /dev/null +++ b/megatron/inference/endpoints/common.py @@ -0,0 +1,18 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import torch +import threading + +GENERATE_NUM = 0 +BEAM_NUM = 1 +LOCK = threading.Lock() + + +def send_do_generate(): + choice = torch.tensor([GENERATE_NUM], dtype=torch.long, device="cuda") + torch.distributed.broadcast(choice, 0) + + +def send_do_beam_search(): + choice = torch.tensor([BEAM_NUM], dtype=torch.long, device="cuda") + torch.distributed.broadcast(choice, 0) diff --git a/megatron/inference/endpoints/completions.py b/megatron/inference/endpoints/completions.py new file mode 100644 index 000000000..32dbc5dca --- /dev/null +++ b/megatron/inference/endpoints/completions.py @@ -0,0 +1,186 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""This endpoint is for mimicking the OpenAI completions API. +See https://platform.openai.com/docs/api-reference/completions/create +""" + +import torch +import numpy as np +from megatron.training import get_tokenizer +from megatron.inference.text_generation.api import generate_and_post_process +from megatron.inference.endpoints.common import send_do_generate, LOCK + +from flask import request, jsonify +from flask_restful import Resource + + +def detokenize(prompt, tok) -> list[str]: + if isinstance(prompt, str): + return [prompt] + elif isinstance(prompt, list): + if not prompt: # The list is empty, can't determine its intended type. + raise ValueError(f"prompt contains no items: {prompt}") + if all(isinstance(item, str) for item in prompt): + return prompt + elif all(isinstance(item, int) for item in prompt): + return [tok.detokenize(prompt[0])] + elif all( # list[list[int]] + isinstance(item, list) and all(isinstance(subitem, int) for subitem in item) + for item in prompt + ): + return [tok.detokenize(item) for item in prompt] + else: + raise ValueError(f"Unknown prompt type: {type(prompt)}") + else: + raise ValueError(f"Unknown prompt type: {type(prompt)}") + + +class MegatronCompletions(Resource): + def __init__(self, model): + self.model = model + + def post(self): + req = request.get_json() + tok = get_tokenizer() + prompts = detokenize(req["prompt"], tok) + + # convert the openai-local-completions api to the format + # expected by the generate_and_post_process function + local_kwargs = { + "prompts": prompts, + "tokens_to_generate": int(req["max_tokens"]), + "temperature": float(req.get("temperature", 1.0)), + "top_p_sampling": float(req.get("top_p", 1.0)), + "return_topk_logprobs": int(req.get("logprobs", 0)), + "echo": bool(req.get("echo", False)), + "random_seed": int(req.get("seed", -1)), + "best_of": int(req.get("best_of", 1)), + "num_completions": int(req.get("n", 1)), + "stop": req.get("stop", [tok.detokenize([tok.eod])]), + "return_output_log_probs": True, + } + + if isinstance(local_kwargs["stop"], str): + local_kwargs["stop"] = [local_kwargs["stop"]] + + if local_kwargs["temperature"] == 0: + # temperature = 0 is openai api's way of specifying greedy + # deterministic sampling but actually passing temperature=0 + # is undefined and leads to div by zero, so set top-k = 1 + local_kwargs["top_k_sampling"] = 1 + local_kwargs["top_p_sampling"] = 0 + + echo = local_kwargs.pop("echo") + if (not echo) and (local_kwargs["tokens_to_generate"] == 0): + return "echo=False not supported when tokens_to_generate=0", 400 + + if local_kwargs.pop("best_of") > 1: + return "best_of > 1 not supported", 400 + + if local_kwargs.pop("num_completions") > 1: + return "num_completions > 1 not supported", 400 + + if local_kwargs["tokens_to_generate"] > 0 and local_kwargs["return_topk_logprobs"] > 0: + return "cannot return top-k unless tokens_to_generate=0 at this time", 400 + + if local_kwargs["return_topk_logprobs"] > 10: + return "return_topk_logprobs > 10 not supported", 400 + + stop_until = local_kwargs.pop("stop") + + with LOCK: + send_do_generate() + result = generate_and_post_process( + self.model, + add_BOS=False, + use_eod_token_for_early_termination=True, + stop_on_double_eol=True, + stop_on_eol=False, + prevent_newline_after_colon=False, + **local_kwargs, + ) + + prompts_plus_generations, prompts_plus_generations_segments = result[:2] + output_log_probs, tokens = result[2:4] + + logprobs_topk, logprobs_topk_indices = None, None + if len(result) > 4: + logprobs_topk, logprobs_topk_indices = result[4] + + if "debug_fname" in req: + torch.save( + { + "args": local_kwargs, + "tokenizer": tok, + "prompts_plus_generations": prompts_plus_generations, + "prompts_plus_generations_segments": prompts_plus_generations_segments, + "output_log_probs": output_log_probs, + "tokens": tokens, + "logprobs_topk": logprobs_topk, + "logprobs_topk_indices": logprobs_topk_indices, + }, + f"completions_result_{req['debug_fname']}.pt", + ) + + batch_size = len(tokens) + ret_topk_logprobs = [[None] for _ in range(batch_size)] + if local_kwargs["return_topk_logprobs"] > 0: + assert echo, "echo=False not supported when return_topk_logprobs > 0" + logprobs_topk_indices = logprobs_topk_indices.cpu().numpy().tolist() + logprobs_topk = logprobs_topk.cpu().numpy().tolist() + + for batch_idx, segmented_response in enumerate(prompts_plus_generations_segments): + for t, _ in enumerate(segmented_response): + ret_topk_logprobs[batch_idx].append( + { + tok.detokenize([tk]): tk_ll + for tk, tk_ll in zip( + logprobs_topk_indices[batch_idx][t], logprobs_topk[batch_idx][t] + ) + } + ) + + results = [] + for batch_idx, (prompt_plus_generation, prompt) in enumerate( + zip(prompts_plus_generations, prompts) + ): + tok_offsets = tok.offsets(tokens[batch_idx], prompt_plus_generation) + if echo: + str_trunc_start_idx, tok_idx_start = 0, 0 + else: + str_trunc_start_idx = len(prompt) + tok_idx_start = np.searchsorted(tok_offsets, len(prompt)) + + # truncate the generation at the first stop token + trunc_idxs = [ + prompt_plus_generation.find(suffix, str_trunc_start_idx) + for suffix in stop_until + if suffix and suffix in prompt_plus_generation + ] + str_trunc_end_idx = min(filter(lambda x: x != -1, trunc_idxs), default=len(prompt_plus_generation)) + truncated_generation = prompt_plus_generation[str_trunc_start_idx:str_trunc_end_idx] + + # TODO(sasatheesh): handle cases where truncated_generation is not a full token + tok_idx_end = np.searchsorted(tok_offsets, len(truncated_generation)) + + truncated_generation_logprobs = output_log_probs[batch_idx][tok_idx_start:tok_idx_end] + truncated_generation_tokens = tokens[batch_idx][tok_idx_start:tok_idx_end] + truncated_generation_topk_logprobs = ret_topk_logprobs[batch_idx][ + tok_idx_start:tok_idx_end + ] + truncated_generation_tok_offsets = tok_offsets[tok_idx_start:tok_idx_end] + + results.append( + { + "index": batch_idx, + "text": truncated_generation, + "logprobs": { + "token_logprobs": [None] + truncated_generation_logprobs, + "tokens": [tok.detokenize([tk]) for tk in truncated_generation_tokens], + "text_offset": truncated_generation_tok_offsets, + "top_logprobs": truncated_generation_topk_logprobs, + }, + } + ) + + return jsonify({"choices": results}) diff --git a/megatron/inference/gpt/__init__.py b/megatron/inference/gpt/__init__.py index f8011007a..830c0d7fb 100644 --- a/megatron/inference/gpt/__init__.py +++ b/megatron/inference/gpt/__init__.py @@ -1 +1,4 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from .loss_func import loss_func +from .model_provider import model_provider diff --git a/megatron/inference/gpt/loss_func.py b/megatron/inference/gpt/loss_func.py new file mode 100644 index 000000000..bbc8670ad --- /dev/null +++ b/megatron/inference/gpt/loss_func.py @@ -0,0 +1,89 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Pretrain GPT loss function(s).""" + +import os + +import torch + +from megatron.core import mpu, tensor_parallel +from megatron.core.models.gpt import GPTModel +from megatron.training import get_args +from megatron.training.utils import average_losses_across_data_parallel_group, unwrap_model + + +def _mask_loss(output_tensor, loss_mask): + """Apply mask to the unreduced loss tensor.""" + args = get_args() + + if isinstance(output_tensor, tuple): + # Special distillation flag indicating whether to perform an additional tensor-parallel reduction. + output_tensor, tp_reduce = output_tensor + else: + tp_reduce = False + + losses = output_tensor.float() + loss_mask = loss_mask.view(-1).float() + + if args.context_parallel_size > 1: + loss = torch.cat([torch.sum(losses.view(-1) * loss_mask).view(1), loss_mask.sum().view(1)]) + torch.distributed.all_reduce(loss, group=mpu.get_context_parallel_group()) + loss = loss[0] / loss[1] + else: + loss = torch.sum(losses.view(-1) * loss_mask) / loss_mask.sum() + + if tp_reduce and args.tensor_model_parallel_size > 1: + # Losses such as KL-Div require extra all-reduce to ensure same values across MP-TP partitions. + loss = torch.sum(tensor_parallel.gather_from_tensor_model_parallel_region(loss.reshape(1))) + + return loss + + +def _allreduce_loss(loss): + """Reduce loss for reporting purposes.""" + args = get_args() + + # Check individual rank losses are not NaN prior to DP all-reduce. + if args.check_for_nan_in_loss_and_grad: + global_rank = torch.distributed.get_rank() + assert not loss.isnan(), ( + f'Rank {global_rank}: found NaN in local forward loss calculation. ' + f'Device: {torch.cuda.current_device()}, node: {os.uname()[1]}' + ) + + # Reduce loss for logging. + averaged_loss = average_losses_across_data_parallel_group([loss]) + + return loss * args.context_parallel_size, averaged_loss[0] + + +def loss_func(loss_mask: torch.Tensor, model: GPTModel, output_tensor: torch.Tensor): + """Loss function (with KD Loss support). + + Args: + loss_mask (Tensor): Used to mask out some portions of the loss + model (GPTModel): The model (can be wrapped) + output_tensor (Tensor): The tensor with the losses + """ + args = get_args() + + # Unwrap for both Distillation and LANA + model = unwrap_model(model) + + # Standard lm loss + loss_lm = _mask_loss(output_tensor, loss_mask) + loss_lm, loss_lm_avg = _allreduce_loss(loss_lm) + + loss, report = loss_lm, {'lm loss': loss_lm_avg} + + if model.training and args.export_kd_teacher_load and not args.export_kd_finalize: + # [ModelOpt]: Handle knowledge distillation + loss_kd = model.compute_kd_loss( + student_loss=loss, loss_reduction_fn=lambda x: _mask_loss(x, loss_mask) + ) + loss_kd, loss_kd_avg = _allreduce_loss(loss_kd) + + # Still logs original loss for baseline-comparison purposes. + loss, report["kd loss"] = loss_kd, loss_kd_avg + + return loss, report diff --git a/megatron/inference/gpt/model_provider.py b/megatron/inference/gpt/model_provider.py index 0df0168fa..97e03b20a 100644 --- a/megatron/inference/gpt/model_provider.py +++ b/megatron/inference/gpt/model_provider.py @@ -2,7 +2,14 @@ """ModelOpt GPT model provider.""" +import os +from argparse import Namespace +from typing import Any, Dict + +import modelopt.torch.distill as mtd import modelopt.torch.opt as mto +import yaml + from megatron.core.inference.modelopt_support.gpt.model_specs import get_gpt_layer_modelopt_spec from megatron.core.inference.modelopt_support.gpt.state_dict_hooks import ( mcore_gpt_load_legacy_state_dict_pre_hook, @@ -11,11 +18,107 @@ from megatron.core.models.gpt import GPTModel as MCoreGPTModel from megatron.core.parallel_state import get_tensor_model_parallel_rank from megatron.core.transformer.spec_utils import import_module -from megatron.inference.checkpointing import load_modelopt_state +from megatron.inference.algos import distillation +from megatron.inference.checkpointing import load_modelopt_checkpoint, load_modelopt_state from megatron.training import get_args, print_rank_0 from megatron.training.arguments import core_transformer_config_from_args +def _add_load_convert_hooks(model: MCoreGPTModel): + """Register some load_state_dict prehooks to handle some known state_dict key mismatch. + + (legacy <-> modelopt) and (default te <-> modelopt) + """ + args = get_args() + if args.export_legacy_megatron: + model._register_load_state_dict_pre_hook(mcore_gpt_load_legacy_state_dict_pre_hook) + if args.export_te_mcore_model: + model._register_load_state_dict_pre_hook(mcore_gpt_load_te_state_dict_pre_hook) + + +def _load_teacher_model_config(checkpoint_path: str) -> Namespace: + """Reads teacher config from a file. + + The file named ``model_config.yaml`` within the checkpoint directory should specify + (in NEMO format) any model architecture settings which differ from the main student model's. + This function will translate NEMO field names to MCore as needed. + """ + required_teacher_fields = ( + "num_layers", + "hidden_size", + "ffn_hidden_size", + "num_attention_heads", + ) + + config_path = os.path.join(checkpoint_path, "model_config.yaml") + if not os.path.exists(config_path): + raise FileNotFoundError( + "Teacher checkpoint dir must contain a NEMO-format yaml config named 'model_config.yaml'" + ) + with open(config_path) as f: + config = yaml.safe_load(f) + + missing_keys = [k for k in required_teacher_fields if k not in config] + if missing_keys: + raise ValueError( + f"Teacher `model_config.yaml` file missing the following fields: {missing_keys}" + ) + + if "encoder_seq_length" in config: + config["seq_length"] = config["encoder_seq_length"] + if "bias" in config: + config["disable_bias_linear"] = not config["bias"] + if config.get("activation") == "swiglu": + config["swiglu"] = True + if config.get("position_embedding_type", False) is None: + config["use_rotary_position_embeddings"] = config["no_position_embedding"] = True + if "share_embeddings_and_output_weights" in config: + config["untie_embeddings_and_output_weights"] = not config[ + "share_embeddings_and_output_weights" + ] + if "tokenizer" in config: + config["tokenizer_type"] = config["tokenizer"]["type"] + config["tokenizer_model"] = config["tokenizer"]["model"] + if "masked_softmax_fusion" in config: + config["no_masked_softmax_fusion"] = not config["masked_softmax_fusion"] + if config.get("normalization") == "layernorm1p": + config["apply_layernorm_1p"] = True + if "precision" in config: + config[config["precision"]] = True + if "mcore_gpt" in config: + config["use_mcore_models"] = config["mcore_gpt"] + + args_dict = vars(get_args()).copy() + del args_dict["kv_channels"] # not recalculated if present + args_dict.update(config) + + return Namespace(**args_dict) + + +def _teacher_provider(config: Namespace, model_kwargs: Dict[str, Any]) -> MCoreGPTModel: + """Teacher model factory (must be a non-local function to pickle).""" + args = get_args() + + # Convert to `TransformerConfig` here to avoid ModelOpt pickling issues (contains local functions) + config = core_transformer_config_from_args(config) + config.non_homogeneous_layers = True + + teacher = MCoreGPTModel(config=config, **model_kwargs) + + _add_load_convert_hooks(teacher) + + print_rank_0("Loading teacher checkpoint...") + # [WAR]: load checkpoint will check checkpoint's saved args and rng state if not finetune. + # To avoid error out on loading teacher's checkpoint, we temporarily set args.finetune to + # True while loading the teacher checkpoint. + original_args_finetune = args.finetune + args.finetune = True + load_modelopt_checkpoint([teacher], load_arg='export_kd_teacher_load') + args.finetune = original_args_finetune + + return teacher + + def model_provider(pre_process=True, post_process=True, parallel_output=True) -> MCoreGPTModel: """Builds the model. @@ -47,12 +150,13 @@ def model_provider(pre_process=True, post_process=True, parallel_output=True) -> transformer_layer_spec = import_module(args.spec) else: transformer_layer_spec = get_gpt_layer_modelopt_spec( - remap_te_layernorm=args.export_te_mcore_model, qk_layernorm=False, + num_experts=args.num_experts, + moe_grouped_gemm=args.moe_grouped_gemm, + remap_te_layernorm=args.export_te_mcore_model, + qk_layernorm=False, ) - model_type = MCoreGPTModel model_kwargs = { - "config": config, "transformer_layer_spec": transformer_layer_spec, "vocab_size": args.padded_vocab_size, "max_sequence_length": args.max_position_embeddings, @@ -66,20 +170,49 @@ def model_provider(pre_process=True, post_process=True, parallel_output=True) -> "rotary_base": args.rotary_base, "rope_scaling": args.use_rope_scaling, } - - model = model_type(**model_kwargs) + model = MCoreGPTModel(config=config, **model_kwargs) # Load modelopt_state - modelopt_state = load_modelopt_state() if args.load else {} + modelopt_state = load_modelopt_state(model=model) if args.load else {} if modelopt_state: model = mto.restore_from_modelopt_state(model, modelopt_state) - # Register some load_state_dict prehooks to handle some known state_dict key mismatch. - # (legacy <-> modelopt) and (default te <-> modelopt) - if args.export_legacy_megatron: - model._register_load_state_dict_pre_hook(mcore_gpt_load_legacy_state_dict_pre_hook) - if args.export_te_mcore_model: - model._register_load_state_dict_pre_hook(mcore_gpt_load_te_state_dict_pre_hook) + _add_load_convert_hooks(model) + + # Distillation mode. + distill_cfg = None + if args.export_kd_teacher_load: + print_rank_0("Distillation: Enabled.") + + # NOTE: Unknown memory leak occuring per fwd-bwd pass if model + # is converted to a `modelopt.torch.opt.DynamicModule`. + # Argument `--manual-gc` can result in an eventual OOM. + assert ( + not args.manual_gc + ), "ModelOpt Distillation currently incompatible with `--manual-gc` option." + + teacher_config = _load_teacher_model_config(args.export_kd_teacher_load) + distill_cfg = distillation.load_distillation_config( + args.export_kd_cfg, student_cfg=config, teacher_cfg=teacher_config + ) + # Intialize DistillationModel if not already restored. + if str(mto.conversion.get_mode(model)) != "kd_loss" and not args.export_kd_finalize: + kd_config = { + "teacher_model": (_teacher_provider, [teacher_config, model_kwargs], {}), + "criterion": distill_cfg["criterion"], + "loss_balancer": distill_cfg["loss_balancer"], + } + model = mtd.convert(model, mode=[("kd_loss", kd_config)]) + + if isinstance(model, mtd.DistillationModel): + # Export the student model and create the distillation export mode. + if args.export_kd_finalize: + print_rank_0("Distillation: Exporting student model into original model...") + model = mtd.export(model) + else: + assert distill_cfg is not None + # Additional tweaks needed for MCore/Nemo. + distillation.adjust_distillation_model_for_mcore(model, distill_cfg) # Print models on all pp ranks. if get_tensor_model_parallel_rank() == 0: diff --git a/megatron/inference/text_generation/api.py b/megatron/inference/text_generation/api.py index 06dad2e51..d744ca769 100644 --- a/megatron/inference/text_generation/api.py +++ b/megatron/inference/text_generation/api.py @@ -32,9 +32,9 @@ def generate_and_post_process(model, stop_on_eol=False, prevent_newline_after_colon=False, random_seed=-1, - return_logits=False, detokenize_segments=True, - data_parallel=False): + data_parallel=False, + return_topk_logprobs=0): """Run inference and post-process outputs, i.e., detokenize, move to cpu and convert to list. @@ -45,7 +45,7 @@ def generate_and_post_process(model, """ # Main inference. - tokens, lengths, output_log_probs, logits = generate( + tokens, lengths, output_log_probs, logprobs_topk = generate( model, forward_step=forward_step, prompts=prompts, @@ -74,11 +74,10 @@ def generate_and_post_process(model, for i, (prob, seg) in enumerate(zip(output_log_probs, prompts_plus_generations_segments)): output_log_probs[i] = prob[:len(seg)-1] - if return_logits: - assert(tokens_to_generate == 0) - assert(mpu.get_pipeline_model_parallel_world_size() == 1) + if return_topk_logprobs > 0: + assert tokens_to_generate == 0 return prompts_plus_generations, prompts_plus_generations_segments, \ - output_log_probs, tokens, logits + output_log_probs, tokens, logprobs_topk else: return prompts_plus_generations, prompts_plus_generations_segments, \ output_log_probs, tokens diff --git a/megatron/inference/text_generation/communication.py b/megatron/inference/text_generation/communication.py index a67e0a5e4..c3d5dfefb 100644 --- a/megatron/inference/text_generation/communication.py +++ b/megatron/inference/text_generation/communication.py @@ -9,7 +9,6 @@ from megatron.core import mpu - # TODO: use functions from megatron/p2p def recv_from_prev_pipeline_rank_(recv_buffer=None): """Receive from previous pipeline stage and update the @@ -25,8 +24,6 @@ def recv_from_prev_pipeline_rank_(recv_buffer=None): # To protect against race condition when using batch_isend_irecv(). torch.cuda.synchronize() - - # TODO: use functions from megatron/p2p def send_to_next_pipeline_rank(tensor=None): """Send output to the next pipeline stage.""" @@ -80,6 +77,29 @@ def broadcast_from_last_pipeline_stage(size, dtype, tensor=None): return tensor +def _send_and_recv_from_last_to_first_pipeline_stage(tensor=None): + is_last_stage = mpu.is_pipeline_last_stage() + is_first_stage = mpu.is_pipeline_first_stage() + + if is_last_stage or is_first_stage: + if is_first_stage: + recv_prev_op = torch.distributed.P2POp( + torch.distributed.irecv, tensor, + mpu.get_pipeline_model_parallel_last_rank()) + reqs = torch.distributed.batch_isend_irecv([recv_prev_op]) + elif is_last_stage: + send_next_op = torch.distributed.P2POp( + torch.distributed.isend, tensor, + mpu.get_pipeline_model_parallel_first_rank()) + reqs = torch.distributed.batch_isend_irecv([send_next_op]) + + for req in reqs: + req.wait() + # To protect against race condition when using batch_isend_irecv(). + torch.cuda.synchronize() + + return tensor + def broadcast_from_last_to_first_pipeline_stage(size, dtype, tensor=None): """Broadcast tensor values from last stage into the first stage.""" @@ -98,10 +118,7 @@ def broadcast_from_last_to_first_pipeline_stage(size, dtype, tensor=None): tensor = torch.empty(size, dtype=dtype, device=torch.cuda.current_device()) - src = mpu.get_pipeline_model_parallel_last_rank() - group = mpu.get_embedding_group() - # Broadcast from last stage into the first stage. - torch.distributed.broadcast(tensor, src, group) + tensor = _send_and_recv_from_last_to_first_pipeline_stage(tensor) else: tensor = None @@ -123,8 +140,6 @@ def copy_from_last_to_first_pipeline_stage(size, dtype, tensor=None): if is_last_stage or is_first_stage: _is_cuda(tensor) is_contiguous = tensor.is_contiguous() - src = mpu.get_pipeline_model_parallel_last_rank() - group = mpu.get_embedding_group() if is_contiguous: tensor_ = tensor else: @@ -134,8 +149,7 @@ def copy_from_last_to_first_pipeline_stage(size, dtype, tensor=None): tensor_ = torch.empty(size, dtype=dtype, device=torch.cuda.current_device()) - # Broadcast from last stage into the first stage. - torch.distributed.broadcast(tensor_, src, group) + tensor_ = _send_and_recv_from_last_to_first_pipeline_stage(tensor_) # Update the first stage tensor if is_first_stage and not is_contiguous: tensor[...] = tensor_ @@ -150,7 +164,7 @@ def broadcast_tensor(size, dtype, tensor=None, rank=0, data_parallel=False): data_parallel (bool): Broadcast across a single data parallel model replica. """ if data_parallel: - rank = parallel_state.get_tensor_model_parallel_src_rank() + rank = parallel_state.get_model_parallel_src_rank() if torch.distributed.get_rank() == rank: _is_cuda_contiguous(tensor) @@ -161,7 +175,7 @@ def broadcast_tensor(size, dtype, tensor=None, rank=0, data_parallel=False): group = None if data_parallel: - group = parallel_state.get_tensor_model_parallel_group() + group = parallel_state.get_model_parallel_group() torch.distributed.broadcast(tensor, rank, group=group) @@ -179,12 +193,11 @@ def broadcast_list(size, dtype, list_values=None, rank=0, data_parallel=False): tensor = None if data_parallel: - src_rank = parallel_state.get_data_parallel_src_rank() - if src_rank == 0: + if parallel_state.get_model_parallel_src_rank() == torch.distributed.get_rank(): tensor = torch.tensor(list_values, dtype=dtype, device=torch.cuda.current_device()) - rank = parallel_state.get_tensor_model_parallel_src_rank() + rank = parallel_state.get_model_parallel_src_rank() else: if torch.distributed.get_rank() == rank: tensor = torch.tensor(list_values, dtype=dtype, diff --git a/megatron/inference/text_generation/forward_step.py b/megatron/inference/text_generation/forward_step.py index 4d4878d33..0a89936ed 100644 --- a/megatron/inference/text_generation/forward_step.py +++ b/megatron/inference/text_generation/forward_step.py @@ -32,30 +32,40 @@ def __init__(self, model, max_batch_size, max_sequence_length): args = get_args() self.pipeline_size_larger_than_one = ( args.pipeline_model_parallel_size > 1) - # Threshold of pipelining. + # Threshold for whether we split up the batch for pipelining. self.pipelining_batch_x_seqlen = \ args.inference_batch_times_seqlen_threshold def _forward(self, tokens, position_ids, attention_mask): return self.model(tokens, position_ids, attention_mask, inference_params=self.inference_params) - def __call__(self, tokens, position_ids, attention_mask): + def __call__(self, tokens, position_ids, attention_mask, recv_buffer_seq_length=None): """Invocation of the forward methods. Note that self.inference_params is being modified by the forward step.""" # Pipelining case. + # This runs only if current_batch_x_seqlen > args.inference_batch_times_seqlen_threshold + # and requires setting args.pipeline_model_parallel > 1. The batch will be split into + # smaller microbatches to be pipelined through the stages. if self.pipeline_size_larger_than_one: - current_batch_x_seqlen = tokens.size(0) * tokens.size(1) + seq_len = tokens.size(1) if recv_buffer_seq_length is None else recv_buffer_seq_length + current_batch_x_seqlen = tokens.size(0) * seq_len if current_batch_x_seqlen >= self.pipelining_batch_x_seqlen: micro_batch_size = \ - max(1, self.pipelining_batch_x_seqlen // tokens.size(1)) + max(1, self.pipelining_batch_x_seqlen // seq_len) return self._with_pipelining_forward_step(tokens, position_ids, attention_mask, - micro_batch_size) + micro_batch_size, + recv_buffer_seq_length=recv_buffer_seq_length) + + recv_buffer = None + if recv_buffer_seq_length is not None: + recv_buffer = _allocate_recv_buffer(tokens.size(0), recv_buffer_seq_length) return self._no_pipelining_forward_step(tokens, position_ids, - attention_mask) + attention_mask, + recv_buffer=recv_buffer) def _forward_step_helper(self, tokens, position_ids, attention_mask, recv_buffer=None): @@ -63,15 +73,20 @@ def _forward_step_helper(self, tokens, position_ids, attention_mask, recv_buffer only the first time the memory is allocated.""" batch_size = tokens.size(0) sequence_length = tokens.size(1) + if recv_buffer is None: recv_buffer = _allocate_recv_buffer(batch_size, sequence_length) # Receive from previous stage. - recv_from_prev_pipeline_rank_(recv_buffer) + if recv_buffer is not None and torch.numel(recv_buffer) > 0: + recv_from_prev_pipeline_rank_(recv_buffer) # Forward pass through the model. - self.model.set_input_tensor(recv_buffer) + if not mpu.is_pipeline_first_stage(): + self.model.set_input_tensor(recv_buffer) output_tensor = self._forward(tokens, position_ids, attention_mask) + if isinstance(output_tensor, tuple): + output_tensor = output_tensor[0] # Send output to the next stage. send_to_next_pipeline_rank(output_tensor) @@ -96,10 +111,10 @@ def _no_pipelining_forward_step(self, tokens, position_ids, attention_mask, return logits - def _with_pipelining_forward_step(self, tokens, position_ids, attention_mask, micro_batch_size): + def _with_pipelining_forward_step(self, tokens, position_ids, attention_mask, micro_batch_size, recv_buffer_seq_length=None): """No interleaving is supported.""" - sequence_length = tokens.size(1) batch_size = tokens.size(0) + sequence_length = tokens.size(1) if recv_buffer_seq_length is None else recv_buffer_seq_length # Divide the batch dimension into micro batches. num_micro_batches, last_chunk = divmod(batch_size, @@ -140,7 +155,7 @@ def _with_pipelining_forward_step(self, tokens, position_ids, attention_mask, mi # Once we are done with all the micro-batches, we can # adjust the sequence length offset. - self.inference_params.sequence_len_offset += sequence_length + self.inference_params.sequence_len_offset += tokens.size(1) # and reset the batch size offset self.inference_params.batch_size_offset = 0 diff --git a/megatron/inference/text_generation/generation.py b/megatron/inference/text_generation/generation.py index 5e4c23875..13e53b3c6 100644 --- a/megatron/inference/text_generation/generation.py +++ b/megatron/inference/text_generation/generation.py @@ -16,7 +16,10 @@ from .sampling import sample from .beam_utils import BeamHypotheses -def score_and_return_on_first_stage(model, tokens, lengths): +MAX_TOPK_LOGPROBS = 5 +NO_TOPK_LOGPROBS = None + +def score_and_return_on_first_stage(model, tokens: torch.Tensor, lengths: torch.Tensor): """Function for just scoring. Args: @@ -37,13 +40,17 @@ def score_and_return_on_first_stage(model, tokens, lengths): assert max_prompt_length == tokens.size(1) if max_prompt_length > args.max_position_embeddings: - raise ValueError("Length of prompt + tokens_to_generate longer than allowed") + raise ValueError( + f"Length of prompt + tokens_to_generate longer than allowed {max_prompt_length} > {args.max_position_embeddings}" + ) if max_prompt_length * batch_size > args.max_tokens_to_oom: - raise ValueError("Too many tokens. " + str(max_prompt_length*batch_size)+ " is greater than "+str(args.max_tokens_to_oom)) + raise ValueError( + f"Too many tokens. {max_prompt_length*batch_size} > {args.max_tokens_to_oom}" + ) # forward step. - forward_step = ForwardStep(model, batch_size, max_prompt_length) + forward_step = ForwardStep(model, batch_size, args.inference_max_seq_length) # =================== # Pre-allocate memory @@ -51,13 +58,22 @@ def score_and_return_on_first_stage(model, tokens, lengths): # Log probability of the sequence (prompt + generated tokens). output_log_probs = None + output_topk_log_probs, output_topk_log_indices = None, None output_log_probs_size = (batch_size, max_prompt_length - 1) + output_topk_log_probs_size = (batch_size, max_prompt_length, MAX_TOPK_LOGPROBS) if mpu.is_pipeline_last_stage(): - output_log_probs = torch.empty(output_log_probs_size, - dtype=torch.float32, - device=torch.cuda.current_device()) + output_log_probs = torch.empty( + output_log_probs_size, dtype=torch.float32, device=torch.cuda.current_device() + ) + + output_topk_log_probs = torch.empty( + output_topk_log_probs_size, dtype=torch.float32, device=torch.cuda.current_device() + ) + output_topk_log_indices = torch.empty( + output_topk_log_probs_size, dtype=torch.int64, device=torch.cuda.current_device() + ) # ============= # Run infernece # ============= @@ -78,14 +94,23 @@ def score_and_return_on_first_stage(model, tokens, lengths): # so shift by 1. indices = torch.unsqueeze(tokens[:, 1:], 2) output_log_probs = torch.gather(log_probs, 2, indices).squeeze(2) + torch.topk(log_probs, MAX_TOPK_LOGPROBS, dim=2, out=(output_topk_log_probs, output_topk_log_indices)) # ====================================== # Broadcast to the first pipeline stage. # ====================================== + output_topk_log_probs = broadcast_from_last_to_first_pipeline_stage( + output_topk_log_probs_size, torch.float32, output_topk_log_probs + ) + output_topk_log_indices = broadcast_from_last_to_first_pipeline_stage( + output_topk_log_probs_size, torch.int64, output_topk_log_indices + ) output_log_probs = broadcast_from_last_to_first_pipeline_stage( - output_log_probs_size, torch.float32, output_log_probs) + output_log_probs_size, torch.float32, output_log_probs + ) - return tokens, lengths, output_log_probs, logits + logprobs_topk = torch.return_types.topk((output_topk_log_probs, output_topk_log_indices)) + return tokens, lengths, output_log_probs, logprobs_topk def generate_tokens_probs_and_return_on_first_stage( model, forward_step, tokens, lengths, @@ -141,7 +166,7 @@ def generate_tokens_probs_and_return_on_first_stage( raise ValueError("Too many tokens. " + str(max_sequence_length*batch_size)+ " is greater than "+str(args.max_tokens_to_oom)) # forward step. - forward_step = forward_step(model, batch_size, max_sequence_length) + forward_step = forward_step(model, batch_size, args.inference_max_seq_length) # Added termination_id to support the case that we want to terminate the # generation once that id is generated. @@ -291,7 +316,7 @@ def generate_tokens_probs_and_return_on_first_stage( output_log_probs = broadcast_from_last_to_first_pipeline_stage( output_log_probs_size, torch.float32, output_log_probs) - return tokens, generated_sequence_lengths, output_log_probs, None + return tokens, generated_sequence_lengths, output_log_probs, NO_TOPK_LOGPROBS def beam_search_and_return_on_first_stage(model, forward_step, tokens, lengths, beam_size, stop_token, num_return_gen, length_penalty, prevent_newline_after_colon=True): args = get_args() diff --git a/megatron/inference/text_generation/tokenization.py b/megatron/inference/text_generation/tokenization.py index 32d3b50c6..7610cd4b3 100644 --- a/megatron/inference/text_generation/tokenization.py +++ b/megatron/inference/text_generation/tokenization.py @@ -16,7 +16,6 @@ def detokenize_generations(tokens_gpu_tensor, detokenize_segments): """Detokenize the generated tokens.""" - args = get_args() tokenizer = get_tokenizer() prompts_plus_generations = [] prompts_plus_generations_segments = [] @@ -101,7 +100,6 @@ def _tokenize_prompts_and_batch(prompts, tokens_to_generate, add_BOS): """ # Tokenize all the prompts. - args = get_args() tokenizer = get_tokenizer() if hasattr(tokenizer, 'eod'): eod_token = tokenizer.eod diff --git a/megatron/inference/text_generation_server.py b/megatron/inference/text_generation_server.py index 2eba2e259..df1e67242 100644 --- a/megatron/inference/text_generation_server.py +++ b/megatron/inference/text_generation_server.py @@ -1,42 +1,27 @@ # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. import datetime -import torch import json -import threading -from flask import Flask, request, jsonify, current_app + +from flask import Flask, request, jsonify from flask_restful import Resource, Api -from megatron.training import get_args + from megatron.inference.text_generation import generate_and_post_process from megatron.inference.text_generation import beam_search_and_post_process +from megatron.inference.endpoints.common import send_do_generate, send_do_beam_search, LOCK +from megatron.inference.endpoints.completions import MegatronCompletions -GENERATE_NUM = 0 -BEAM_NUM = 1 -lock = threading.Lock() - class MegatronGenerate(Resource): def __init__(self, model): self.model = model - @staticmethod - def send_do_generate(): - choice = torch.tensor([GENERATE_NUM], dtype=torch.long, device='cuda') - torch.distributed.broadcast(choice, 0) - - @staticmethod - def send_do_beam_search(): - choice = torch.tensor([BEAM_NUM], dtype=torch.long, device='cuda') - torch.distributed.broadcast(choice, 0) - def put(self): - args = get_args() - if not "prompts" in request.get_json(): return "prompts argument required", 400 - + if "max_len" in request.get_json(): return "max_len is no longer used. Replace with tokens_to_generate", 400 - + if "sentences" in request.get_json(): return "sentences is no longer used. Replace with prompts", 400 @@ -46,10 +31,10 @@ def put(self): if len(prompts) == 0: return "prompts is empty", 400 - + if len(prompts) > 128: return "Maximum number of prompts is 128", 400 - + tokens_to_generate = 64 # Choosing hopefully sane default. Full sequence is slow if "tokens_to_generate" in request.get_json(): tokens_to_generate = request.get_json()["tokens_to_generate"] @@ -63,62 +48,62 @@ def put(self): logprobs = request.get_json()["logprobs"] if not isinstance(logprobs, bool): return "logprobs must be a boolean value" - + if tokens_to_generate == 0 and not logprobs: return "tokens_to_generate=0 implies logprobs should be True" - + temperature = 1.0 if "temperature" in request.get_json(): temperature = request.get_json()["temperature"] - if not (type(temperature) == int or type(temperature) == float): - return "temperature must be a positive number less than or equal to 100.0" + if not (isinstance(temperature, (int, float))): + return "temperature must be a positive number less than or equal to 1000.0" if not (0.0 < temperature <= 100.0): return "temperature must be a positive number less than or equal to 100.0" - - top_k = 0.0 + + top_k = 0 if "top_k" in request.get_json(): top_k = request.get_json()["top_k"] - if not (type(top_k) == int): + if not (isinstance(top_k, int)): return "top_k must be an integer equal to or greater than 0 and less than or equal to 1000" if not (0 <= top_k <= 1000): return "top_k must be equal to or greater than 0 and less than or equal to 1000" - + top_p = 0.0 if "top_p" in request.get_json(): top_p = request.get_json()["top_p"] - if not (type(top_p) == float): + if not (isinstance(top_p, float)): return "top_p must be a positive float less than or equal to 1.0" if top_p > 0.0 and top_k > 0.0: return "cannot set both top-k and top-p samplings." if not (0 <= top_p <= 1.0): return "top_p must be less than or equal to 1.0" - + top_p_decay = 0.0 if "top_p_decay" in request.get_json(): top_p_decay = request.get_json()["top_p_decay"] - if not (type(top_p_decay) == float): + if not (isinstance(top_p_decay, float)): return "top_p_decay must be a positive float less than or equal to 1.0" if top_p == 0.0: return "top_p_decay cannot be set without top_p" if not (0 <= top_p_decay <= 1.0): return "top_p_decay must be less than or equal to 1.0" - + top_p_bound = 0.0 if "top_p_bound" in request.get_json(): top_p_bound = request.get_json()["top_p_bound"] - if not (type(top_p_bound) == float): + if not (isinstance(top_p_bound, float)): return "top_p_bound must be a positive float less than or equal to top_p" if top_p == 0.0: return "top_p_bound cannot be set without top_p" if not (0.0 < top_p_bound <= top_p): return "top_p_bound must be greater than 0 and less than top_p" - + add_BOS = False if "add_BOS" in request.get_json(): add_BOS = request.get_json()["add_BOS"] if not isinstance(add_BOS, bool): return "add_BOS must be a boolean value" - + if any([len(prompt) == 0 for prompt in prompts]) and not add_BOS: return "Empty prompts require add_BOS=true" @@ -127,7 +112,7 @@ def put(self): stop_on_double_eol = request.get_json()["stop_on_double_eol"] if not isinstance(stop_on_double_eol, bool): return "stop_on_double_eol must be a boolean value" - + stop_on_eol = False if "stop_on_eol" in request.get_json(): stop_on_eol = request.get_json()["stop_on_eol"] @@ -145,7 +130,7 @@ def put(self): random_seed = request.get_json()["random_seed"] if not isinstance(random_seed, int): return "random_seed must be integer" - if random_seed < 0: + if random_seed < 0: return "random_seed must be a positive integer" no_log = False @@ -153,7 +138,7 @@ def put(self): no_log = request.get_json()["no_log"] if not isinstance(no_log, bool): return "no_log must be a boolean value" - + beam_width = None if "beam_width" in request.get_json(): beam_width = request.get_json()["beam_width"] @@ -164,48 +149,46 @@ def put(self): if len(prompts) > 1: return "When doing beam_search, batch size must be 1" - stop_token=50256 + stop_token = 50256 if "stop_token" in request.get_json(): stop_token = request.get_json()["stop_token"] if not isinstance(stop_token, int): return "stop_token must be an integer" - - length_penalty = 1 + + length_penalty = 1 if "length_penalty" in request.get_json(): length_penalty = request.get_json()["length_penalty"] if not isinstance(length_penalty, float): return "length_penalty must be a float" - - with lock: # Need to get lock to keep multiple threads from hitting code - + + with LOCK: # Need to get lock to keep multiple threads from hitting code + if not no_log: print("request IP: " + str(request.remote_addr)) - print(json.dumps(request.get_json()),flush=True) + print(json.dumps(request.get_json()), flush=True) print("start time: ", datetime.datetime.now()) - + try: if beam_width is not None: - MegatronGenerate.send_do_beam_search() # Tell other ranks we're doing beam_search - response, response_seg, response_scores = \ - beam_search_and_post_process( + send_do_beam_search() # Tell other ranks we're doing beam_search + response, response_seg, response_scores = beam_search_and_post_process( self.model, prompts=prompts, tokens_to_generate=tokens_to_generate, - beam_size = beam_width, + beam_size=beam_width, add_BOS=add_BOS, stop_token=stop_token, num_return_gen=beam_width, # Returning whole beam length_penalty=length_penalty, - prevent_newline_after_colon=prevent_newline_after_colon - ) - - return jsonify({"text": response, - "segments": response_seg, - "scores": response_scores}) + prevent_newline_after_colon=prevent_newline_after_colon, + ) + + return jsonify( + {"text": response, "segments": response_seg, "scores": response_scores} + ) else: - MegatronGenerate.send_do_generate() # Tell other ranks we're doing generate - response, response_seg, response_logprobs, _ = \ - generate_and_post_process( + send_do_generate() # Tell other ranks we're doing generate + result = generate_and_post_process( self.model, prompts=prompts, tokens_to_generate=tokens_to_generate, @@ -220,22 +203,29 @@ def put(self): stop_on_double_eol=stop_on_double_eol, stop_on_eol=stop_on_eol, prevent_newline_after_colon=prevent_newline_after_colon, - random_seed=random_seed) + random_seed=random_seed, + ) - return jsonify({"text": response, + response, response_seg, response_logprobs = result[:3] + response = { + "text": response, "segments": response_seg, - "logprobs": response_logprobs}) + "logprobs": response_logprobs, + } + + return jsonify(response) except ValueError as ve: return ve.args[0] print("end time: ", datetime.datetime.now()) - + class MegatronServer(object): def __init__(self, model): self.app = Flask(__name__, static_url_path='') api = Api(self.app) api.add_resource(MegatronGenerate, '/api', resource_class_args=[model]) - - def run(self, url, port): + api.add_resource(MegatronCompletions, '/completions', resource_class_args=[model]) + + def run(self, url, port): self.app.run(url, threaded=True, debug=False, port=port) diff --git a/megatron/legacy/model/transformer.py b/megatron/legacy/model/transformer.py index dda550551..db48d607e 100644 --- a/megatron/legacy/model/transformer.py +++ b/megatron/legacy/model/transformer.py @@ -20,14 +20,14 @@ from megatron.core.jit import jit_fuser from megatron.core.num_microbatches_calculator import get_num_microbatches from megatron.core.parallel_state import ( - get_tensor_and_expert_parallel_group, + get_expert_tensor_and_model_parallel_group, get_tensor_model_parallel_group, ) from megatron.core.tensor_parallel import ( - gather_from_sequence_parallel_region_to_moe, + gather_from_sequence_parallel_region, + reduce_scatter_to_sequence_parallel_region, get_cuda_rng_tracker, get_data_parallel_rng_tracker_name, - reduce_scatter_to_sequence_parallel_region_from_moe, ) from megatron.legacy.model.enums import AttnMaskType, AttnType, LayerType from megatron.legacy.model.fused_bias_gelu import bias_gelu_impl @@ -221,10 +221,11 @@ def __init__(self, config): for i in range(self.num_local_experts): self.local_experts.append(ParallelMLP(config, is_expert=True)) + self.tp_ep_group = get_expert_tensor_and_model_parallel_group() + def gather_indices(self, local_indices): """ Gather tensors and concatinate along the first dimension.""" - group = get_tensor_and_expert_parallel_group() - world_size = torch.distributed.get_world_size(group=group) + world_size = torch.distributed.get_world_size(group=self.tp_ep_group) # Bypass the function if we are using only 1 GPU. if world_size == 1: return local_indices @@ -236,7 +237,7 @@ def gather_indices(self, local_indices): output = torch.empty(dim_size, dtype=local_indices.dtype, device=torch.cuda.current_device()) torch.distributed._all_gather_base( - output, local_indices.contiguous(), group=group + output, local_indices.contiguous(), group=self.tp_ep_group ) return output @@ -269,7 +270,7 @@ def forward(self, hidden_states): # Each vector could be routed differently if self.sequence_parallel or (self.expert_parallel_size > 1): global_hidden_states = \ - gather_from_sequence_parallel_region_to_moe(hidden_states) + gather_from_sequence_parallel_region(hidden_states, group=self.tp_ep_group) global_indices = self.gather_indices(max_ind) else: global_hidden_states = hidden_states @@ -291,10 +292,10 @@ def forward(self, hidden_states): if self.sequence_parallel or (self.expert_parallel_size > 1): output_total = \ - reduce_scatter_to_sequence_parallel_region_from_moe(output_total) + reduce_scatter_to_sequence_parallel_region(output_total, group=self.tp_ep_group) if self.add_bias: output_bias_total = \ - reduce_scatter_to_sequence_parallel_region_from_moe(output_bias_total) + reduce_scatter_to_sequence_parallel_region(output_bias_total, group=self.tp_ep_group) # bias is duplicated across tensor parallelism ranks; # reduce scatter reduces bias across tensor parallel_ranks diff --git a/megatron/training/activations.py b/megatron/training/activations.py index c6ce9f1de..4d0fed14f 100644 --- a/megatron/training/activations.py +++ b/megatron/training/activations.py @@ -2,10 +2,7 @@ import torch import torch.nn.functional as F -try: - jit_fuser = torch.compile -except AttributeError: - jit_fuser = torch.jit.script +from megatron.core.jit import jit_fuser @jit_fuser diff --git a/megatron/training/arguments.py b/megatron/training/arguments.py index e3d876a5f..d86ea515c 100644 --- a/megatron/training/arguments.py +++ b/megatron/training/arguments.py @@ -5,11 +5,12 @@ import argparse import dataclasses import json -import logging import os -import torch import types +import warnings +from packaging.version import Version as PkgVersion +import torch import torch.nn.functional as F from megatron.core.dist_checkpointing.validation import StrictHandling @@ -18,6 +19,7 @@ get_gpt_data_dir as get_retro_data_dir, ) from megatron.core.transformer import TransformerConfig, MLATransformerConfig +from megatron.core.utils import get_torch_version, is_torch_min_version from megatron.training.activations import squared_relu from megatron.training.utils import update_use_dist_ckpt @@ -38,6 +40,7 @@ def parse_args(extra_args_provider=None, ignore_unknown_args=False): parser = _add_distributed_args(parser) parser = _add_validation_args(parser) parser = _add_data_args(parser) + parser = _add_tokenizer_args(parser) parser = _add_autoresume_args(parser) parser = _add_biencoder_args(parser) parser = _add_vision_args(parser) @@ -165,6 +168,10 @@ def validate_args(args, defaults={}): # Set args.use_dist_ckpt from args.ckpt_format. update_use_dist_ckpt(args) + + if args.encoder_pipeline_model_parallel_size == 0 and args.num_experts == 0: + assert args.encoder_tensor_model_parallel_size == args.tensor_model_parallel_size, "If non-MOE encoder shares first decoder pipeline rank it must have the same TP as the decoder." + if args.encoder_tensor_model_parallel_size > 0: assert args.encoder_pipeline_model_parallel_size > 0, "encoder_pipeline_model_parallel_size must be defined." assert args.num_attention_heads % args.encoder_tensor_model_parallel_size == 0 @@ -191,31 +198,41 @@ def validate_args(args, defaults={}): args.data_parallel_size = args.world_size // total_model_size - # Checks. if args.rank == 0: print('using world size: {}, data-parallel size: {}, ' 'context-parallel size: {}, ' + 'hierarchical context-parallel sizes: {}' 'tensor-model-parallel size: {}, ' 'encoder-tensor-model-parallel size: {}, ' 'pipeline-model-parallel size: {}, ' 'encoder-pipeline-model-parallel size: {}'.format( args.world_size, args.data_parallel_size, args.context_parallel_size, + args.hierarchical_context_parallel_sizes, args.tensor_model_parallel_size, args.encoder_tensor_model_parallel_size, args.pipeline_model_parallel_size, args.encoder_pipeline_model_parallel_size), flush=True) - # backwards compatibility. + # Checks. + + # Backwards compatibility. if args.pipeline_model_parallel_split_rank is not None: args.encoder_pipeline_model_parallel_size = args.pipeline_model_parallel_split_rank args.pipeline_model_parallel_size -= args.encoder_pipeline_model_parallel_size assert args.pipeline_model_parallel_size > 0 - if args.tp_comm_overlap: - assert args.sequence_parallel == True, 'Tensor parallel communication/GEMM overlap can happen only when sequence parallelism is enabled' + if args.hierarchical_context_parallel_sizes: + from numpy import prod + assert args.context_parallel_size == prod(args.hierarchical_context_parallel_sizes) + if "a2a+p2p" in args.cp_comm_type: + assert args.hierarchical_context_parallel_sizes is not None, \ + "--hierarchical-context-parallel-sizes must be set when a2a+p2p is used in cp comm" - # Deprecated arguments + if args.expert_tensor_parallel_size is None: + args.expert_tensor_parallel_size = args.tensor_model_parallel_size + + # Deprecated arguments. assert args.batch_size is None, '--batch-size argument is no longer ' \ 'valid, use --micro-batch-size instead' del args.batch_size @@ -258,6 +275,20 @@ def validate_args(args, defaults={}): f'of "{legacy_default_split_value}"') args.split = legacy_default_split_value + use_data_path = (args.data_path is not None) or (args.data_args_path is not None) + if use_data_path: + # Exactly one of the two has to be None if we use it. + assert (args.data_path is None) or (args.data_args_path is None) + use_per_split_data_path = any( + elt is not None + for elt in [args.train_data_path, args.valid_data_path, args.test_data_path]) or \ + args.per_split_data_args_path is not None + if use_per_split_data_path: + # Exactly one of the two has to be None if we use it. + assert any(elt is not None + for elt in [args.train_data_path, args.valid_data_path, args.test_data_path]) is False or \ + args.per_split_data_args_path is None + # Batch size. assert args.micro_batch_size is not None assert args.micro_batch_size > 0 @@ -267,21 +298,29 @@ def validate_args(args, defaults={}): print('setting global batch size to {}'.format( args.global_batch_size), flush=True) assert args.global_batch_size > 0 + if args.decoder_first_pipeline_num_layers is None and args.decoder_last_pipeline_num_layers is None: + # Divisibility check not applicable for T5 models which specify encoder_num_layers + # and decoder_num_layers. + if args.num_layers is not None: + assert args.num_layers % args.transformer_pipeline_model_parallel_size == 0, \ + 'Number of layers should be divisible by the pipeline-model-parallel size' if args.num_layers_per_virtual_pipeline_stage is not None: if args.overlap_p2p_comm: assert args.pipeline_model_parallel_size > 1, \ - 'when interleaved schedule is used, pipeline-model-parallel size '\ + 'When interleaved schedule is used, pipeline-model-parallel size '\ 'should be greater than 1' else: assert args.pipeline_model_parallel_size > 2, \ - 'when interleaved schedule is used and p2p communication overlap is disabled, '\ + 'When interleaved schedule is used and p2p communication overlap is disabled, '\ 'pipeline-model-parallel size should be greater than 2 to avoid having multiple '\ 'p2p sends and recvs between same 2 ranks per communication batch' + assert args.num_layers is not None + # Double check divisibility check here since check above is if guarded. assert args.num_layers % args.transformer_pipeline_model_parallel_size == 0, \ - 'number of layers should be divisible by the pipeline parallel size' + 'Number of layers should be divisible by the pipeline-model-parallel size' num_layers_per_pipeline_stage = args.num_layers // args.transformer_pipeline_model_parallel_size assert num_layers_per_pipeline_stage % args.num_layers_per_virtual_pipeline_stage == 0, \ - 'number of layers per pipeline stage must be divisible number of layers per virtual pipeline stage' + 'Number of layers per pipeline stage must be divisible by number of layers per virtual pipeline stage' args.virtual_pipeline_model_parallel_size = num_layers_per_pipeline_stage // \ args.num_layers_per_virtual_pipeline_stage else: @@ -303,6 +342,24 @@ def validate_args(args, defaults={}): assert not args.use_legacy_models, \ '--overlap-param-gather only supported with MCore models' + if getattr(args, "use_torch_fsdp2", False): + assert get_torch_version() >= PkgVersion("2.4"), \ + 'FSDP2 requires PyTorch >= 2.4.0 with FSDP 2 support.' + assert args.pipeline_model_parallel_size == 1, \ + '--use-torch-fsdp2 is not supported with pipeline parallelism' + assert args.expert_model_parallel_size == 1, \ + '--use-torch-fsdp2 is not supported with expert parallelism' + assert not args.use_distributed_optimizer, \ + "--use-torch-fsdp2 is not supported with MCore's distributed optimizer" + assert not args.gradient_accumulation_fusion, \ + '--use-torch-fsdp2 is not supported with gradient accumulation fusion' + assert args.ckpt_format == 'torch_dist', \ + '--use-torch-fsdp2 requires --ckpt-format torch_dist' + assert args.untie_embeddings_and_output_weights, \ + '--use-torch-fsdp2 requires --untie-embeddings-and-output-weights' + assert not args.fp16, \ + '--use-torch-fsdp2 not supported with fp16 yet' + if args.overlap_param_gather_with_optimizer_step: assert args.use_distributed_optimizer, \ '--overlap-param-gather-with-optimizer-step only supported with distributed optimizer' @@ -465,10 +522,8 @@ def validate_args(args, defaults={}): assert args.start_weight_decay is not None assert args.end_weight_decay is not None - TORCH_MAJOR = int(torch.__version__.split('.')[0]) - TORCH_MINOR = int(torch.__version__.split('.')[1]) # Persistent fused layer norm. - if TORCH_MAJOR < 1 or (TORCH_MAJOR == 1 and TORCH_MINOR < 11): + if not is_torch_min_version("1.11.0a0"): args.no_persist_layer_norm = True if args.rank == 0: print('Persistent fused layer norm kernel is supported from ' @@ -486,10 +541,10 @@ def validate_args(args, defaults={}): assert args.recompute_method is not None, \ 'for distributed recompute activations to work you '\ 'need to use a recompute method ' - assert (TORCH_MAJOR, TORCH_MINOR) >= (1, 10), \ + assert is_torch_min_version("1.10.0a0"), \ 'distributed recompute activations are supported for pytorch ' \ 'v1.10 and above (Nvidia Pytorch container >= 21.07). Current ' \ - 'pytorch version is v%s.%s.' % (TORCH_MAJOR, TORCH_MINOR) + f'pytorch version is v{get_torch_version()}.' if args.recompute_granularity == 'selective': assert args.recompute_method is None, \ @@ -500,12 +555,24 @@ def validate_args(args, defaults={}): # to avoid change in numerics when # sequence_parallelism is enabled. if args.tensor_model_parallel_size == 1: + if args.sequence_parallel: + warnings.warn("Disabling sequence parallelism because tensor model parallelism is disabled") args.sequence_parallel = False + if args.tp_comm_overlap: + assert args.sequence_parallel == True, 'Tensor parallel communication/GEMM overlap can happen only when sequence parallelism is enabled' + # disable async_tensor_model_parallel_allreduce when # model parallel memory optimization is enabled if args.sequence_parallel: args.async_tensor_model_parallel_allreduce = False + if getattr(args, "use_torch_fsdp2", False): + warnings.warn( + "Using sequence parallelism with FSDP2 together. Try not to using them " + "together since they require different CUDA_MAX_CONNECTIONS settings " + "for best performance. sequence parallelism requires setting the " + "environment variable CUDA_DEVICE_MAX_CONNECTIONS to 1 while FSDP2 " + "requires not setting CUDA_DEVICE_MAX_CONNECTIONS=1 for better parallelization.") if os.environ.get('CUDA_DEVICE_MAX_CONNECTIONS') != "1": if args.sequence_parallel: @@ -521,6 +588,10 @@ def validate_args(args, defaults={}): if not args.add_bias_linear: args.bias_gelu_fusion = False + # Keep the 'add bias' args in sync; add_qkv_bias is more targeted. + if args.add_bias_linear: + args.add_qkv_bias = True + # Retro checks. if args.retro_add_retriever: @@ -547,6 +618,8 @@ def validate_args(args, defaults={}): raise RuntimeError('--rotary-interleaved does not work with rope_fusion.') if args.rotary_interleaved and args.use_legacy_models: raise RuntimeError('--rotary-interleaved is not supported in legacy models.') + if args.position_embedding_type != 'rope': + args.apply_rope_fusion = False # Would just need to add 'NoPE' as a position_embedding_type to support this, but for now # don't allow it to keep things simple @@ -620,6 +693,11 @@ def validate_args(args, defaults={}): print('--dist-ckpt-format is deprecated and has no effect.' ' Use --ckpt-format to select the checkpoint format.') + # Inference args + if args.inference_batch_times_seqlen_threshold > -1: + assert args.pipeline_model_parallel_size > 1, \ + "--inference-batch-times-seqlen-threshold requires setting --pipeline-model-parallel-size > 1." + # MoE upcycling check if args.moe_use_upcycling: assert args.save is not None, "When using upcycling, the --save option must be specified." @@ -656,7 +734,7 @@ def _check_arg_is_not_none(args, arg): def core_transformer_config_from_args(args, config_class=None): - + # Config class. config_class = config_class or TransformerConfig @@ -696,6 +774,9 @@ def core_transformer_config_from_args(args, config_class=None): kw_args['num_query_groups'] = None kw_args['config_logger_dir'] = args.config_logger_dir + if len(args.cp_comm_type) == 1: + kw_args['cp_comm_type'] = args.cp_comm_type[0] + # Return config. return config_class(**kw_args) @@ -729,17 +810,17 @@ def _add_transformer_engine_args(parser): group.add_argument('--fp8-param-gather', action='store_true', help='Keep the compute param in fp8 (do not use any other intermediate ' 'dtype) and perform the param all-gather in fp8.') - return parser def _add_inference_args(parser): group = parser.add_argument_group(title='inference') group.add_argument('--inference-batch-times-seqlen-threshold', - type=int, default=512, - help='During inference, if batch-size times ' - 'sequence-length is smaller than this threshold ' - 'then we will not use pipelining, otherwise we will.') + type=int, default=-1, + help='If (batch-size * sequence-length) is smaller than this threshold' + 'then batches will not be split up for pipelining.' + 'Requires setting --pipeline-model-parallel-size > 1.' + 'Setting this to -1 indicates that batch pipelining is not used.') group.add_argument('--max-tokens-to-oom', type=int, default=12000, help='Maximum number of tokens during inference' @@ -753,7 +834,11 @@ def _add_inference_args(parser): choices=["megatron", "huggingface"], help='Select either Megatron or Huggingface as the ' 'Bert embedder.') - + group.add_argument('--flash-decode', default=False, action="store_true", + help='Whether to use the flash decoding kernel.') + group.add_argument('--inference-max-seq-length', type=int, default=2560, + help='Maximum sequence length allocated for prefill during inference.', + dest='inference_max_seq_length') return parser @@ -1074,7 +1159,7 @@ def _add_training_args(parser): ' ' ' ' 'For example:' - ' --rampup-batch-size 16 8 300000 \ ' + ' --rampup-batch-size 16 8 300000 \\ ' ' --global-batch-size 1024' 'will start with global batch size 16 and over ' ' (1024 - 16) / 8 = 126 intervals will increase' @@ -1139,6 +1224,10 @@ def _add_training_args(parser): dest='use_pytorch_profiler') group.add_argument('--profile-ranks', nargs='+', type=int, default=[0], help='Global ranks to profile.') + group.add_argument('--record-memory-history', action="store_true", default=False, + help='Record memory history in last rank.') + group.add_argument('--memory-snapshot-path', type=str, default="snapshot.pickle", + help='Specifies where to dump the memory history pickle.') group.add_argument('--tp-comm-overlap', action='store_true', help='Enables the ' ' overlap of Tensor parallel communication and GEMM kernels.') group.add_argument('--tp-comm-overlap-cfg', type=str, default=None, @@ -1418,8 +1507,12 @@ def _add_checkpointing_args(parser): 'checkpoint', dest='perform_initialization') group.add_argument('--use-checkpoint-args', action='store_true', - help='Override any command line arguments with arguments ' - 'from the checkpoint') + help='Override model-related command-line arguments with arguments from checkpoint') + group.add_argument('--use-mp-args-from-checkpoint-args', action='store_true', + help='Copy model parallelism command-line arguments from checkpoint') + group.add_argument('--no-use-tokenizer-model-from-checkpoint-args', action='store_false', + dest='use_tokenizer_model_from_checkpoint_args', + help='If set, do not use tokenizer model path from checkpoint') group.add_argument('--exit-on-missing-checkpoint', action='store_true', help="If '--load' is set, but checkpoint is not found " "(e.g., path typo), then exit instead of random " @@ -1540,9 +1633,15 @@ def _add_distributed_args(parser): '--tensor-model-parallel-size instead.') group.add_argument('--num-layers-per-virtual-pipeline-stage', type=int, default=None, help='Number of layers per virtual pipeline stage') + group.add_argument('--microbatch-group-size-per-virtual-pipeline-stage', type=int, default=None, + help='Number of contiguous microbatches per virtual pipeline stage', + dest='microbatch_group_size_per_vp_stage') group.add_argument('--no-overlap-p2p-communication', action='store_false', - help='overlap pipeline parallel communication with forward and backward chunks', + help='overlap pipeline parallel communication with forward and backward chunks in 1F1B', dest='overlap_p2p_comm') + group.add_argument('--overlap-p2p-communication-warmup-flush', action='store_true', + default=False, help='if set, overlap pipeline parallel communication in warmup and flush', + dest='overlap_p2p_comm_warmup_flush') group.add_argument('--distributed-backend', default='nccl', choices=['nccl', 'gloo'], help='Which backend to use for distributed training.') @@ -1595,8 +1694,25 @@ def _add_distributed_args(parser): 'affects the encoder embedding.)') group.add_argument('--use-distributed-optimizer', action='store_true', help='Use distributed optimizer.') + group.add_argument('--num-distributed-optimizer-instances', type=int, default=1, + help='Number of Distributed Optimizer copies across Data Parallel domain.') + group.add_argument('--use-torch-fsdp2', action='store_true', + help="Use the torch FSDP2 implementation. FSDP2 is not currently working with Pipeline Parallel." + "It is still not in a stable release stage, and may therefore contain bugs or other potential issues.") group.add_argument('--context-parallel-size', type=int, default=1, help='Degree of context parallelism.') + group.add_argument('--cp-comm-type', nargs='+', type=str, default=["p2p"], + help='Inter-gpu communication type for context parallelism: ' + 'p2p, a2a, allgather or a2a+p2p. If a single string is provided, ' + 'all layers will share the same communication type. Users can also ' + 'specify separated types for each layer like ' + '--cp-comm-type p2p p2p a2a a2a a2a+p2p a2a+p2p') + group.add_argument('--hierarchical-context-parallel-sizes', nargs='+', type=int, default=None, + help='Degrees of the hierarchical context parallelism. Users should ' + 'provide a list to specify the sizes for different levels. ' + '--hierarchical-context-parallel-sizes 2 4 indicates every two adjacent gpus ' + 'forms the first level of cp groups and the cp ranks with the same odevity ' + 'forms the second level of cp groups.') group.add_argument('--nccl-communicator-config-path', type=str, default=None, help='Path to the yaml file with NCCL communicator ' 'configurations. The number of min/max thread groups and thread ' @@ -1626,6 +1742,41 @@ def _add_validation_args(parser): return parser +def _add_tokenizer_args(parser): + group = parser.add_argument_group(title='tokenizer') + group.add_argument('--vocab-size', type=int, default=None, + help='Size of vocab before EOD or padding.') + group.add_argument('--vocab-file', type=str, default=None, + help='Path to the vocab file.') + group.add_argument('--merge-file', type=str, default=None, + help='Path to the BPE merge file.') + group.add_argument('--vocab-extra-ids', type=int, default=0, + help='Number of additional vocabulary tokens. ' + 'They are used for span masking in the T5 model') + group.add_argument('--tokenizer-type', type=str, + default=None, + choices=['BertWordPieceLowerCase', + 'BertWordPieceCase', + 'GPT2BPETokenizer', + 'SentencePieceTokenizer', + 'GPTSentencePieceTokenizer', + 'HuggingFaceTokenizer', + 'Llama2Tokenizer', + 'TikTokenizer', + 'MultimodalTokenizer', + 'NullTokenizer'], + help='What type of tokenizer to use.') + group.add_argument('--tokenizer-model', type=str, default=None, + help='Sentencepiece tokenizer model.') + group.add_argument('--tiktoken-pattern', type=str, default=None, + help='Which tiktoken pattern to use. Options: [v1, v2]') + group.add_argument('--tiktoken-num-special-tokens', type=int, default=1000, + help='Number of special tokens in tiktoken tokenizer') + group.add_argument('--tiktoken-special-tokens', type=str, nargs='+', default=None, + help='List of tiktoken special tokens, needs to have ["", "", ""]') + return parser + + def _add_data_args(parser): group = parser.add_argument_group(title='data and dataloader') @@ -1656,6 +1807,17 @@ def _add_data_args(parser): group.add_argument('--test-data-path', nargs='*', default=None, help='The weight and prefix list for an independent test dataset. ' 'Follows the same pattern rules as --data-path.') + group.add_argument('--data-args-path', type=str, default=None, + help='Path to data-args. Instead of feeding `--data-path` ' + 'with weighted dataset, we pass in a file path from which ' + 'we read that argument. This is useful when the list of data is ' + 'too big.') + group.add_argument('--per-split-data-args-path', type=str, default=None, + help='Path to per-split-data-args. Instead of feeding ' + '`--(train|valid|test)-data-path` with weighted dataset, ' + 'we pass in a file path from which we read those arguments. ' + 'This is useful when the list of data is too big. Format is a ' + 'json file with `train`, `valid, `test` keys') group.add_argument('--data-cache-path', default=None, help='Path to a directory to hold cached index files.') group.add_argument('--no-mmap-bin-files', action='store_false', @@ -1664,15 +1826,6 @@ def _add_data_args(parser): group.add_argument('--mock-data', action='store_true', help='Skip data loading and validation and opt for artificial ' 'generation of mock data when an implementation is available.') - group.add_argument('--vocab-size', type=int, default=None, - help='Size of vocab before EOD or padding.') - group.add_argument('--vocab-file', type=str, default=None, - help='Path to the vocab file.') - group.add_argument('--merge-file', type=str, default=None, - help='Path to the BPE merge file.') - group.add_argument('--vocab-extra-ids', type=int, default=0, - help='Number of additional vocabulary tokens. ' - 'They are used for span masking in the T5 model') group.add_argument('--seq-length', type=int, default=None, help='Maximum sequence length to process.') group.add_argument('--encoder-seq-length', type=int, default=None, @@ -1692,26 +1845,6 @@ def _add_data_args(parser): help='Probability of producing a short sequence.') group.add_argument('--num-workers', type=int, default=2, help="Dataloader number of workers.") - group.add_argument('--tokenizer-type', type=str, - default=None, - choices=['BertWordPieceLowerCase', - 'BertWordPieceCase', - 'GPT2BPETokenizer', - 'SentencePieceTokenizer', - 'GPTSentencePieceTokenizer', - 'HuggingFaceTokenizer', - 'Llama2Tokenizer', - 'TikTokenizer', - 'NullTokenizer'], - help='What type of tokenizer to use.') - group.add_argument('--tokenizer-model', type=str, default=None, - help='Sentencepiece tokenizer model.') - group.add_argument('--tiktoken-pattern', type=str, default=None, - help='Which tiktoken pattern to use. Options: [v1, v2]') - group.add_argument('--tiktoken-num-special-tokens', type=int, default=1000, - help='Number of special tokens in tiktoken tokenizer') - group.add_argument('--tiktoken-special-tokens', type=str, nargs='+', default=None, - help='List of tiktoken special tokens, needs to have ["", "", ""]') group.add_argument('--reset-position-ids', action='store_true', help='Reset posistion ids after end-of-document token.') group.add_argument('--reset-attention-mask', action='store_true', @@ -1873,6 +2006,8 @@ def _add_moe_args(parser): group = parser.add_argument_group(title="moe") group.add_argument('--expert-model-parallel-size', type=int, default=1, help='Degree of expert model parallelism.') + group.add_argument('--expert-tensor-parallel-size', type=int, default=None, + help='Degree of expert model parallelism. Default is None, which will be set to the value of --tensor-model-paralle-size.') group.add_argument('--num-experts', type=int, default=None, help='Number of Experts in MoE (None means no MoE)') group.add_argument('--moe-shared-expert-intermediate-size', type=int, default=None, @@ -1915,7 +2050,7 @@ def _add_moe_args(parser): group.add_argument('--moe-layer-recompute', action='store_true', help='Enable checkpointing for moe_layer, should be used when memory is not sufficient.') group.add_argument('--moe-extended-tp', action='store_true', - help='Alternative to expert parallelism, all experts are sharded across TPXEP domain.') + help='Deprecated. Use --expert-tensor-parallel-size instead.') group.add_argument('--moe-use-upcycling', action='store_true', help='Load a checkpoint of a dense model, convert it into an MoE model, and save the converted model to the path specified by --save. ' 'Upcycling is implemented on the top of distributed checkpointing, so it supports parallel modes different from the dense model.') @@ -1956,11 +2091,13 @@ def _add_experimental_args(parser): help='Ratio of mlp layers to total layers, in the ' 'range [0.0, 1.0].') group.add_argument('--hybrid-override-pattern', type=str, default=None, - help='Force a specific hybrid layer pattern. If a value' - 'greater than 0.0 is supplied to any of the hybrid ratio' - 'arguments, then the number of each type of layer in the' - 'override pattern must match number in the overidden' - 'pattern') + help='Force a specific hybrid layer pattern. The value' + 'should be a string of characters chosen from' + 'core.ssm.mamba_hybrid_layer_allocation.Symbols.' + 'If a value greater than 0.0 is supplied to any of the ' + 'hybrid ratio arguments, then the number of each type' + 'of layer in the override pattern must match number in' + 'the overidden pattern') group.add_argument('--yaml-cfg', type=str, default=None, help = 'Config file to add additional arguments') return parser diff --git a/megatron/training/checkpointing.py b/megatron/training/checkpointing.py index 3de49f6c5..b2c175318 100644 --- a/megatron/training/checkpointing.py +++ b/megatron/training/checkpointing.py @@ -2,13 +2,14 @@ """Input/output checkpointing.""" -from enum import Enum, auto -from logging import getLogger +import contextlib import os import random import shutil import sys import threading +from enum import Enum, auto +from logging import getLogger from pathlib import Path import numpy as np @@ -303,7 +304,7 @@ class CheckpointType(Enum): def save_checkpoint(iteration, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, checkpointing_context=None, pipeline_rank=None, expert_rank=None, tensor_rank=None, pipeline_parallel=None, expert_parallel=None, non_persistent_ckpt=False, - train_data_iterator=None, ft_client=None): + train_data_iterator=None, ft_client=None, preprocess_common_state_dict_fn = None): """Save a model, optimizer and optionally dataloader checkpoint. Checkpointing context is used to persist some checkpointing state @@ -390,7 +391,7 @@ def save_checkpoint(iteration, model, optimizer, opt_param_scheduler, num_floati # Collect args, model, RNG. if not torch.distributed.is_initialized() \ - or mpu.get_data_modulo_expert_parallel_rank(with_context_parallel=True) == 0 \ + or mpu.get_expert_data_parallel_rank() == 0 \ or ckpt_type != CheckpointType.LEGACY: optim_sd_kwargs = {} if ckpt_type != CheckpointType.LEGACY and args.use_distributed_optimizer: @@ -435,7 +436,8 @@ def save_checkpoint(iteration, model, optimizer, opt_param_scheduler, num_floati logger.debug(f"rank: {rank}, takes {end_ckpt - start_ckpt} to prepare state dict for ckpt ") async_save_request = dist_checkpointing.save(state_dict, checkpoint_name, save_strategy, async_sharded_save=args.async_save, - validate_access_integrity=validate_sharding_integrity) + validate_access_integrity=validate_sharding_integrity, + preprocess_common_before_consistancy_check=preprocess_common_state_dict_fn) # [ModelOpt]: save sharded modelopt_state if has_nvidia_modelopt: save_sharded_modelopt_state(model, checkpoint_name, (args.ckpt_format, 1)) @@ -479,8 +481,9 @@ def iter_finalize_fn(): def iter_finalize_fn(): with open(tracker_filename, 'w') as f: f.write(str(iteration)) - print_rank_0(' successfully saved checkpoint from iteration {:7d} to {}' - .format(iteration, args.save)) + print_rank_0(f' successfully saved checkpoint from iteration {int(iteration):7d} to {args.save} ' + f'[ t {(tensor_rank if tensor_rank is not None else mpu.get_tensor_model_parallel_rank()) + 1}/{mpu.get_tensor_model_parallel_world_size()}, ' + f'p {(pipeline_rank if pipeline_rank is not None else mpu.get_pipeline_model_parallel_rank()) + 1}/{mpu.get_pipeline_model_parallel_world_size()} ]') if args.log_progress and args.async_save: append_to_progress_log(f'Saved async checkpoint\tIteration: {iteration}', barrier=False) @@ -942,6 +945,7 @@ def _set_arg(arg_name, old_arg_name=None, force=False): else: print_rank_0(f"Checkpoint did not provide arguments {arg_name}") + # Model args. _set_arg('num_layers') _set_arg('hidden_size') _set_arg('ffn_hidden_size') @@ -954,24 +958,54 @@ def _set_arg(arg_name, old_arg_name=None, force=False): _set_arg('position_embedding_type', force=True) _set_arg('add_position_embedding', force=True) _set_arg('use_rotary_position_embeddings', force=True) + _set_arg('rotary_base', force=True) _set_arg('rotary_percent', force=True) _set_arg('rotary_interleaved', force=True) _set_arg('add_bias_linear', force=True) _set_arg('add_qkv_bias', force=True) + _set_arg('squared_relu', force=True) _set_arg('swiglu', force=True) _set_arg('untie_embeddings_and_output_weights', force=True) _set_arg('apply_layernorm_1p', force=True) _set_arg('normalization', force=True) - _set_arg('tokenizer_type') - _set_arg('padded_vocab_size') _set_arg('apply_query_key_layer_scaling', force=True) - if checkpoint_version < 3.0: - _set_arg('tensor_model_parallel_size', 'model_parallel_size') - else: - _set_arg('tensor_model_parallel_size', force=True) - _set_arg('pipeline_model_parallel_size', force=True) - _set_arg('virtual_pipeline_model_parallel_size', force=True) - _set_arg('num_layers_per_virtual_pipeline_stage') + _set_arg('attention_dropout', force=True) + _set_arg('hidden_dropout', force=True) + + _set_arg('hybrid_override_pattern', force=True) + _set_arg('spec', force=True) + _set_arg('hybrid_attention_ratio', force=True) + _set_arg('hybrid_mlp_ratio', force=True) + + _set_arg('num_experts', force=True) + _set_arg('moe_router_topk', force=True) + _set_arg('moe_token_dispatcher_type', force=True) + _set_arg('moe_router_pre_softmax', force=True) + _set_arg('moe_grouped_gemm', force=True) + _set_arg('moe_shared_expert_intermediate_size', force=True) + + # Tokenizer args. + _set_arg('tokenizer_type', force=True) + # Using checkpoint version might not always be safe (e.g., if running on different cluster). + if args.use_tokenizer_model_from_checkpoint_args: + _set_arg('tokenizer_model', force=True) + _set_arg('tiktoken_pattern', force=True) + _set_arg('padded_vocab_size') + + # Checkpoint args. + _set_arg('ckpt_format') + + # Model parallelism args. + if args.use_mp_args_from_checkpoint_args: + if checkpoint_version < 3.0: + _set_arg('tensor_model_parallel_size', 'model_parallel_size') + else: + _set_arg('tensor_model_parallel_size', force=True) + _set_arg('pipeline_model_parallel_size', force=True) + _set_arg('virtual_pipeline_model_parallel_size', force=True) + _set_arg('num_layers_per_virtual_pipeline_stage') + _set_arg('expert_model_parallel_size', force=True) + return args, checkpoint_args @@ -990,11 +1024,15 @@ def fix_fp8_params_lose_precision_when_loading_dist_ckpt(state_dict): def load_checkpoint(model, optimizer, opt_param_scheduler, load_arg='load', strict=True, - ft_client=None, checkpointing_context=None): + ft_client=None, checkpointing_context=None, skip_load_to_model_and_opt=False): """Load a model checkpoint and return the iteration. strict (bool): whether to strictly enforce that the keys in :attr:`state_dict` of the checkpoint match the names of parameters and buffers in model. + skip_load_to_model_and_opt (bool): whether to call `load_state_dict` + for :attr:`model` and :attr:`optimizer`. In case of running FSDP2 + or other torch features that uses DTensor in state dict, the tensors + are already loaded in-place by `_load_base_checkpoint`. """ args = get_args() load_dir = getattr(args, load_arg) @@ -1038,13 +1076,18 @@ def load_checkpoint(model, optimizer, opt_param_scheduler, load_arg='load', stri ckpt_tp_pp = ( state_dict['args'].tensor_model_parallel_size, state_dict['args'].pipeline_model_parallel_size, + getattr(state_dict['args'], 'encoder_tensor_model_parallel_size', 0), + getattr(state_dict['args'], 'encoder_pipeline_model_parallel_size', 0), ) run_tp_pp = ( - mpu.get_tensor_model_parallel_world_size(), - mpu.get_pipeline_model_parallel_world_size(), + args.tensor_model_parallel_size, + args.pipeline_model_parallel_size, + # TODO: change this to args.encoder_tensor_model_parallel_size after 30th Nov 24 + getattr(args, 'encoder_tensor_model_parallel_size', 0), + getattr(args, 'encoder_pipeline_model_parallel_size', 0), ) - mismatch_msg = "(TP, PP) mismatch after resume ({} vs {} from checkpoint)".format( - ckpt_tp_pp, run_tp_pp + mismatch_msg = "(TP, PP, encoder TP, encoder PP) mismatch after resume ({} vs {} from checkpoint)".format( + run_tp_pp, ckpt_tp_pp ) # Determine if RNG state will be loaded @@ -1081,8 +1124,17 @@ def load_checkpoint(model, optimizer, opt_param_scheduler, load_arg='load', stri else: gen_sd_optim = None gen_sd_opt_param_scheduler = None - load_kwargs['sharded_state_dict'] = generate_state_dict(args, model, gen_sd_optim, gen_sd_opt_param_scheduler, - gen_sd_rng_state, True, optim_sd_kwargs=optim_sd_kwargs) + + # [ModelOpt]: Initial loading from non-resume sharded checkpoint to a Distillation Model + # will result in key mismatch with loss modules potentially containing parameters, since + # it requires generating a state_dict before loading. Here we hide those modules if present. + with contextlib.ExitStack() as stack: # Allows multiple context managers for each model shard + if args.finetune and hasattr(model[0], "hide_loss_modules"): + for m in model: + stack.enter_context(m.hide_loss_modules()) + load_kwargs['sharded_state_dict'] = generate_state_dict(args, model, gen_sd_optim, gen_sd_opt_param_scheduler, + gen_sd_rng_state, True, optim_sd_kwargs=optim_sd_kwargs) + # When "--fp8-param-gather" is disabled, this function doesn't modify anything. fix_fp8_params_lose_precision_when_loading_dist_ckpt(load_kwargs['sharded_state_dict']) @@ -1148,12 +1200,13 @@ def load_checkpoint(model, optimizer, opt_param_scheduler, load_arg='load', stri # Model. strict = False if args.retro_add_retriever else strict - if len(model) == 1: - model[0].load_state_dict(state_dict['model'], strict=strict) - else: - for i in range(len(model)): - mpu.set_virtual_pipeline_model_parallel_rank(i) - model[i].load_state_dict(state_dict['model%d' % i], strict=strict) + if not skip_load_to_model_and_opt: + if len(model) == 1: + model[0].load_state_dict(state_dict['model'], strict=strict) + else: + for i in range(len(model)): + mpu.set_virtual_pipeline_model_parallel_rank(i) + model[i].load_state_dict(state_dict['model%d' % i], strict=strict) # Fix up query/key/value matrix ordering if needed. checkpoint_version = get_checkpoint_version() @@ -1164,7 +1217,7 @@ def load_checkpoint(model, optimizer, opt_param_scheduler, load_arg='load', stri if not release and not args.finetune and not args.no_load_optim: try: # Load state dict. - if optimizer is not None: + if not skip_load_to_model_and_opt and optimizer is not None: optimizer.load_state_dict(state_dict['optimizer']) # Load distributed optimizer's custom parameter state. @@ -1239,8 +1292,8 @@ def load_checkpoint(model, optimizer, opt_param_scheduler, load_arg='load', stri torch.distributed.barrier() print_rank_0(f' successfully loaded checkpoint from {load_dir} ' - f'[ t {mpu.get_tensor_model_parallel_rank()}, ' - f'p {mpu.get_pipeline_model_parallel_rank()} ] ' + f'[ t {mpu.get_tensor_model_parallel_rank() + 1}/{mpu.get_tensor_model_parallel_world_size()}, ' + f'p {mpu.get_pipeline_model_parallel_rank() + 1}/{mpu.get_pipeline_model_parallel_world_size()} ] ' f'at iteration {iteration}') torch.cuda.empty_cache() diff --git a/megatron/training/global_vars.py b/megatron/training/global_vars.py index 6c1b551d1..70701341e 100644 --- a/megatron/training/global_vars.py +++ b/megatron/training/global_vars.py @@ -7,7 +7,7 @@ import torch from megatron.core import Timers -from megatron.core.num_microbatches_calculator import init_num_microbatches_calculator +from megatron.core.num_microbatches_calculator import init_num_microbatches_calculator, unset_num_microbatches_calculator from megatron.training import dist_signal_handler from megatron.training.tokenizer import build_tokenizer @@ -101,6 +101,35 @@ def set_global_variables(args, build_tokenizer=True): _set_signal_handler() +def unset_global_variables(): + """Unset global vars. + + Useful for multiple runs. See `tests/unit_tests/ckpt_converter/test_ckpt_converter.py` for an example. + """ + + global _GLOBAL_ARGS + global _GLOBAL_NUM_MICROBATCHES_CALCULATOR + global _GLOBAL_TOKENIZER + global _GLOBAL_TENSORBOARD_WRITER + global _GLOBAL_WANDB_WRITER + global _GLOBAL_ONE_LOGGER + global _GLOBAL_ADLR_AUTORESUME + global _GLOBAL_TIMERS + global _GLOBAL_SIGNAL_HANDLER + + _GLOBAL_ARGS = None + _GLOBAL_NUM_MICROBATCHES_CALCULATOR = None + _GLOBAL_TOKENIZER = None + _GLOBAL_TENSORBOARD_WRITER = None + _GLOBAL_WANDB_WRITER = None + _GLOBAL_ONE_LOGGER = None + _GLOBAL_ADLR_AUTORESUME = None + _GLOBAL_TIMERS = None + _GLOBAL_SIGNAL_HANDLER = None + + unset_num_microbatches_calculator() + + def set_args(args): global _GLOBAL_ARGS _GLOBAL_ARGS = args diff --git a/megatron/training/initialize.py b/megatron/training/initialize.py index ad68ce8cb..dbb00c88c 100644 --- a/megatron/training/initialize.py +++ b/megatron/training/initialize.py @@ -5,6 +5,7 @@ import random import os import time +import warnings import numpy as np import torch @@ -22,7 +23,7 @@ from megatron.core.fusions.fused_bias_dropout import bias_dropout_add_fused_train from megatron.core.fusions.fused_bias_gelu import bias_gelu from megatron.core.fusions.fused_bias_swiglu import bias_swiglu -from megatron.core.utils import get_te_version, is_te_min_version +from megatron.core.utils import get_te_version, is_te_min_version, is_torch_min_version logger = logging.getLogger(__name__) @@ -110,6 +111,7 @@ def finish_mpu_init(): _compile_dependencies() if args.tp_comm_overlap: + #TODO: Should this be activated with just decoder-tp-comm-overlap too? _initialize_tp_communicators() # No continuation function @@ -210,7 +212,10 @@ def _initialize_tp_communicators(): else: ub_cfgs = {} - input_shape = [(args.seq_length * args.micro_batch_size) // args.context_parallel_size , args.hidden_size] + if getattr(args, 'decoder_tp_comm_overlap', False): + input_shape = [(args.decoder_seq_length * args.micro_batch_size) // args.context_parallel_size , args.hidden_size] + else: + input_shape = [(args.seq_length * args.micro_batch_size) // args.context_parallel_size , args.hidden_size] if is_te_min_version("1.9.0"): # The process group with the target bootstrap backend is created in Transformer Engine. @@ -277,7 +282,10 @@ def _initialize_distributed(get_embedding_ranks, get_position_embedding_ranks): args.virtual_pipeline_model_parallel_size, args.pipeline_model_parallel_split_rank, context_parallel_size=args.context_parallel_size, + hierarchical_context_parallel_sizes=args.hierarchical_context_parallel_sizes, expert_model_parallel_size=args.expert_model_parallel_size, + num_distributed_optimizer_instances=args.num_distributed_optimizer_instances, + expert_tensor_parallel_size=args.expert_tensor_parallel_size, distributed_timeout_minutes=args.distributed_timeout_minutes, nccl_communicator_config_path=args.nccl_communicator_config_path, order='tp-cp-ep-dp-pp' if not args.use_tp_pp_dp_mapping else 'tp-pp-dp', @@ -320,7 +328,7 @@ def _set_random_seed(seed_, data_parallel_random_init=False): if torch.cuda.device_count() > 0: tensor_parallel.model_parallel_cuda_manual_seed(seed) else: - raise ValueError("Seed ({}) should be a positive integer.".format(seed)) + raise ValueError("Seed ({}) should be a positive integer.".format(seed_)) def write_args_to_tensorboard(): @@ -335,9 +343,9 @@ def write_args_to_tensorboard(): def set_jit_fusion_options(): """Set PyTorch JIT layer fusion options.""" # flags required to enable jit fusion kernels - TORCH_MAJOR = int(torch.__version__.split(".")[0]) - TORCH_MINOR = int(torch.__version__.split(".")[1]) - if (TORCH_MAJOR > 1) or (TORCH_MAJOR == 1 and TORCH_MINOR >= 10): + if is_torch_min_version("2.2.0a0"): + pass # we're using torch.compile for jit fusion + elif is_torch_min_version("1.10.0a0"): # nvfuser torch._C._jit_set_profiling_executor(True) torch._C._jit_set_profiling_mode(True) diff --git a/megatron/training/tokenizer/gpt2_tokenization.py b/megatron/training/tokenizer/gpt2_tokenization.py index 4080abeeb..55b95b8ed 100644 --- a/megatron/training/tokenizer/gpt2_tokenization.py +++ b/megatron/training/tokenizer/gpt2_tokenization.py @@ -142,7 +142,8 @@ def from_pretrained(cls, pretrained_model_name_or_path, cache_dir=None, *inputs, kwargs['max_len'] = min(kwargs.get('max_len', int(1e12)), max_len) # Instantiate tokenizer. if special_tokens_file and 'special_tokens' not in kwargs: - special_tokens = open(special_tokens_file, encoding='utf-8').read().split('\n')[:-1] + with open(special_tokens_file, encoding='utf-8') as f: + special_tokens = f.read().split('\n')[:-1] else: special_tokens = kwargs.pop('special_tokens', []) tokenizer = cls( @@ -156,12 +157,14 @@ def from_pretrained(cls, pretrained_model_name_or_path, cache_dir=None, *inputs, def __init__(self, vocab_file, merges_file, errors='replace', special_tokens=None, max_len=None): self.max_len = max_len if max_len is not None else int(1e12) - self.encoder = json.load(open(vocab_file)) + with open(vocab_file) as f: + self.encoder = json.load(f) self.decoder = {v: k for k, v in self.encoder.items()} self.errors = errors # how to handle errors in decoding self.byte_encoder = bytes_to_unicode() self.byte_decoder = {v: k for k, v in self.byte_encoder.items()} - bpe_data = open(merges_file, encoding='utf-8').read().split('\n')[1:-1] + with open(merges_file, encoding='utf-8') as f: + bpe_data = f.read().split('\n')[1:-1] bpe_merges = [tuple(merge.split()) for merge in bpe_data] self.bpe_ranks = dict(zip(bpe_merges, range(len(bpe_merges)))) self.cache = {} diff --git a/megatron/training/tokenizer/multimodal_tokenizer.py b/megatron/training/tokenizer/multimodal_tokenizer.py new file mode 100644 index 000000000..c5ea95c06 --- /dev/null +++ b/megatron/training/tokenizer/multimodal_tokenizer.py @@ -0,0 +1,274 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Multimodal tokenizer.""" +from dataclasses import dataclass +from typing import Dict, List, Union + +import numpy as np + +from megatron.core.datasets.megatron_tokenizer import MegatronTokenizer + +# Mark tokens that will be ignored in the loss function with this value. +# Same ignore_index in https://pytorch.org/docs/stable/generated/torch.nn.CrossEntropyLoss.html +from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, IMAGE_TOKEN + +IMAGE_TAGS = { + "nvlm": ("", ""), + "internvl": ("", ""), + "": None, # Image tag not used. +} + + +# The default mistral template raises exceptions so we use a custom one. +mistral_custom_template = """ +{{- bos_token }} +{%- for message in messages %} + {%- if message['role'] == 'user' %} + {{- '[INST] ' + message['content'] + '[/INST]' }} + {%- elif message['role'] == 'assistant' %} + {{- ' ' + message['content'] + eos_token}} + {%- endif %} +{%- endfor %} +{% if add_generation_prompt %}{{ ' ' }}{% endif %} +""" + + +nvlm_yi_34b_template = "{{- bos_token }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}" + + +qwen2p0_custom_template = "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}" + + + +@dataclass +class PromptConfig: + """Config options for different prompt formats.""" + + # How many tokens are used for the assistant prefix, e.g. "<|im_start|>assistant\n". + # Used for masking the assistant prefix. + assistant_prefix_len: int + # Padding token ID. + pad_token_id: int + # For overriding the default chat format template. + custom_chat_template: str + # If the tokenizer inserts BOS token by default. + has_bos: bool + # If the tokenizer supports a separate role for system messages. + has_system_role: bool + + +class MultimodalTokenizer(MegatronTokenizer): + """Multimodal Tokenizer.""" + + def __init__( + self, + tokenizer: MegatronTokenizer, + prompt_format: str, + special_tokens: List[str], + image_tag_type: str, + ): + """Tokenizer with a support for non-text inputs. + + Note: Currently, only HuggingFaceTokenizer is supported as the underlying text tokenizer. + + Args: + tokenizer (MegatronTokenizer): Underlying tokenizer. + prompt_format (str): Prompt format for the tokenizer. + special_tokens (List[str]): Non-text tokens. + image_tag_type (str): Image tag to apply, if any. For example . + """ + self._vocab_size = len(tokenizer) + + num_added_tokens = tokenizer.add_tokens(special_tokens, special_tokens=True) + assert num_added_tokens == len( + special_tokens + ), f"failed to add {len(special_tokens)} special tokens; only added {num_added_tokens}" + + self._tokenizer = tokenizer + + if prompt_format == "mistral": + # Mistral format doesn't have prefix for the assistant message. + self._prompt_config = PromptConfig( + assistant_prefix_len=0, + pad_token_id=tokenizer.unk_token_id, + custom_chat_template=mistral_custom_template, + has_bos=True, + has_system_role=False, + ) + elif prompt_format == "llama3": + # "<|start_header_id|>assistant<|end_header|>\n\n" is the prefix for assistant messages. + self._prompt_config = PromptConfig( + assistant_prefix_len=4, + pad_token_id=tokenizer.convert_tokens_to_ids("<|end_of_text|>"), + custom_chat_template=None, + has_bos=True, + has_system_role=True, + ) + elif prompt_format == "nvlm-yi-34b": + self._prompt_config = PromptConfig( + assistant_prefix_len=4, + pad_token_id=tokenizer.pad_token_id, + custom_chat_template=nvlm_yi_34b_template, + has_bos=True, + has_system_role=True, + ) + elif prompt_format == "chatml": + # "<|im_start|>assistant\n" is the prefix for assistant messages + self._prompt_config = PromptConfig( + assistant_prefix_len=3, + pad_token_id=tokenizer.pad_token_id, + custom_chat_template=None, + has_bos=False, + has_system_role=True, + ) + elif prompt_format == "qwen2p0": + # "<|im_start|>assistant\n" is the prefix for assistant messages + self._prompt_config = PromptConfig( + assistant_prefix_len=3, + pad_token_id=tokenizer.pad_token_id, + custom_chat_template=qwen2p0_custom_template, + has_bos=False, + has_system_role=True, + ) + else: + raise NotImplementedError("unknown multimodal tokenizer type", prompt_format) + + self._image_tag = IMAGE_TAGS[image_tag_type] + + def _apply_image_tag(self, text: Union[str, List[Dict]]): + """Surround with image tags such as and .""" + if self._image_tag is None: + return text + + replacement = f"{self._image_tag[0]}{IMAGE_TOKEN}{self._image_tag[1]}" + + if isinstance(text, list): + for turn in text: + turn["content"] = turn["content"].replace(IMAGE_TOKEN, replacement) + else: + text = text.replace(IMAGE_TOKEN, replacement) + + return text + + def tokenize(self, text: Union[str, List[Dict]]): + """Tokenize conversation or string input.""" + if isinstance(text, list): + # This code path is used by the inference code currently. + return self.tokenize_conversation(text, False, True).tolist() + + return self._encode(text) + + def _encode(self, text: str): + """Tokenize text input.""" + text = self._apply_image_tag(text) + return self._tokenizer.encode(text) + + def tokenize_conversation( + self, conversation: List[Dict], return_target: bool, add_generation_prompt: bool + ): + """Convert a conversation to tokens. + + Args: + conversation (List[Dict]): Sequence of system/user/assistant messages. + Must be in the following format: + [ + {"role": "user", "content": "something"}, + {"role": "assistant", "content": "something2"}, + ] + return_target (bool): Return target tokens with system and assistant masked. + add_generation_prompt (bool): Add assistant prefix to the end. + """ + # Skip system message if the tokenizer doesn't have a system role. + if not self._prompt_config.has_system_role and conversation[0]["role"] == "system": + conversation = conversation[1:] + + # Apply possible image tag. + conversation = self._apply_image_tag(conversation) + + tokens = self._tokenizer.apply_chat_template( + conversation, + tokenize=True, + add_generation_prompt=add_generation_prompt, + return_assistant_token_mask=False, + return_tensors="np", + chat_template=self._prompt_config.custom_chat_template, + )[0] + + if not return_target: + return tokens + + target = tokens.copy() + + # Mask system and user tokens in the target. + idx = 0 + for turn_idx, turn in enumerate(conversation): + if len(turn["content"]) == 0: + raise ValueError(f"empty turn in conversation: {conversation}. Skipping.") + + turn_tokens = self._tokenizer.apply_chat_template( + [turn], tokenize=True, chat_template=self._prompt_config.custom_chat_template + ) + + # There should be only one BOS at the very beginning. + # After the first turn, skip BOS token. + if self._prompt_config.has_bos and turn_idx > 0: + turn_tokens = turn_tokens[1:] + + turn_len = len(turn_tokens) + + role = turn["role"] + if role in ("system", "user"): + target[idx : idx + turn_len] = IGNORE_INDEX + elif role == "assistant": + if IMAGE_TOKEN in turn["content"]: + raise RuntimeError(f"{IMAGE_TOKEN} not allowed in assistant content!") + + if self._prompt_config.assistant_prefix_len > 0: + target[idx : idx + self._prompt_config.assistant_prefix_len] = IGNORE_INDEX + + assert np.allclose( + tokens[idx : idx + turn_len], turn_tokens + ), f"expected turn tokens to match tokens in conversation {conversation}" + + idx += turn_len + + assert idx == len(tokens), f"mismatch in target masking the conversation {conversation}" + + return tokens, target + + def convert_tokens_to_ids(self, tokens: List[str]): + """Convert tokens to IDs.""" + return self._tokenizer.convert_tokens_to_ids(tokens) + + def detokenize(self, tokens: List[int]): + """Detokenize tokens.""" + return self._tokenizer.decode(tokens) + + def get_special_tokens(self): + """Get special tokens.""" + return self._tokenizer.get_added_vocab() + + @property + def pad(self): + """Pad token ID.""" + return self._prompt_config.pad_token_id + + @property + def eod(self): + """End of sentence token ID.""" + return self._tokenizer.eos_token_id + + @property + def vocab(self): + """Vocab.""" + return NotImplementedError("not used") + + @property + def inv_vocab(self): + """Inverse vocab.""" + return NotImplementedError("not used") + + @property + def vocab_size(self): + """Vocabulary size.""" + return self._vocab_size diff --git a/megatron/training/tokenizer/tokenizer.py b/megatron/training/tokenizer/tokenizer.py index 1ddc7a237..d50f772e0 100644 --- a/megatron/training/tokenizer/tokenizer.py +++ b/megatron/training/tokenizer/tokenizer.py @@ -14,6 +14,7 @@ from .bert_tokenization import FullTokenizer as FullBertTokenizer from .gpt2_tokenization import GPT2Tokenizer +from megatron.training.tokenizer.multimodal_tokenizer import MultimodalTokenizer def build_tokenizer(args, **kwargs): @@ -64,6 +65,33 @@ def build_tokenizer(args, **kwargs): elif args.tokenizer_type == 'NullTokenizer': assert args.vocab_size is not None tokenizer = _NullTokenizer(args.vocab_size) + elif args.tokenizer_type == "MultimodalTokenizer": + try: + import transformers + except ImportError: + raise ImportError( + "MultimodalTokenizer currently requires transformers library to be installed" + ) + + kwargs = dict() + if args.tokenizer_prompt_format == "nvlm-yi-34b": + kwargs = { + "from_slow": True, + "legacy": False, + "add_bos_token": True, + } + + # Currently, only HuggingFace tokenizers are supported. + underlying_tokenizer = transformers.AutoTokenizer.from_pretrained( + pretrained_model_name_or_path=args.tokenizer_model, **kwargs + ) + + tokenizer = MultimodalTokenizer( + underlying_tokenizer, + args.tokenizer_prompt_format, + args.special_tokens, + args.image_tag_type, + ) else: raise NotImplementedError('{} tokenizer is not ' 'implemented.'.format(args.tokenizer_type)) diff --git a/megatron/training/training.py b/megatron/training/training.py index d5ee16be5..09d7cfce9 100644 --- a/megatron/training/training.py +++ b/megatron/training/training.py @@ -32,6 +32,13 @@ from megatron.legacy.model import Float16Module from megatron.core.distributed import DistributedDataParallelConfig from megatron.core.distributed import DistributedDataParallel as DDP +try: + from megatron.core.distributed import TorchFullyShardedDataParallel as torch_FSDP + + HAVE_FSDP2 = True +except ImportError: + HAVE_FSDP2 = False + from megatron.core.distributed import finalize_model_grads from megatron.core.enums import ModelType from megatron.core.optimizer import get_megatron_optimizer, OptimizerConfig @@ -92,7 +99,7 @@ def print_datetime(string): """Note that this call will sync across all ranks.""" torch.distributed.barrier() time_str = datetime.now().strftime('%Y-%m-%d %H:%M:%S') - print_rank_0('[' + string + '] datetime: {} '.format(time_str)) + print_rank_0(f'[{string}] datetime: {time_str} ') def num_floating_point_operations(args, batch_size): @@ -196,6 +203,17 @@ def _get_field(string, type): start_num_floating_point_operations +def preprocess_common_state_dict(common_state_dict): + import copy + # Convert args key of type namespace to dictionary + preprocessed_common_state_dict = copy.deepcopy(common_state_dict) + preprocessed_common_state_dict['args'] = vars(preprocessed_common_state_dict['args']) + # Remove rank and local rank from state dict if it exists, since they are expected to be different + preprocessed_common_state_dict['args'].pop('local_rank', None) + preprocessed_common_state_dict['args'].pop('rank', None) + return preprocessed_common_state_dict + + def pretrain( train_valid_test_dataset_provider, model_provider, @@ -214,7 +232,7 @@ def pretrain( 1) initialize Megatron. 2) setup model, optimizer and lr schedule using the model_provider. 3) call train_val_test_data_provider to get train/val/test datasets. - 4) train the modle using the forward_step_func. + 4) train the model using the forward_step_func. Args: train_valid_test_dataset_provider: a function that takes the size of @@ -278,9 +296,6 @@ def pretrain( print_datetime('after megatron is initialized') app_metrics['app_model_init_finish_time'] = one_logger_utils.get_timestamp_in_ms() - args = get_args() - timers = get_timers() - # Track E2E metrics on pretrain start one_logger_utils.on_pretrain_start() @@ -372,7 +387,7 @@ def pretrain( num_floating_point_operations_so_far, checkpointing_context, train_data_iterator=train_data_iterator, ft_client=ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.SAVE_CHECKPOINT)) + ft_integration.StateMachineActions.SAVE_CHECKPOINT), preprocess_common_state_dict_fn=preprocess_common_state_dict) one_logger and one_logger.log_metrics({ 'app_train_loop_finish_time': one_logger_utils.get_timestamp_in_ms() @@ -438,7 +453,7 @@ def update_train_iters(args): args.global_batch_size args.train_iters = iterations - print_rank_0('setting training iterations to {}'.format(args.train_iters)) + print_rank_0(f'setting training iterations to {args.train_iters}') def get_model(model_provider_func, model_type=ModelType.encoder_or_decoder, wrap_with_ddp=True): @@ -533,6 +548,12 @@ def get_model(model_provider_func, model_type=ModelType.encoder_or_decoder, wrap fp8_meta.amax_history[0][fp8_meta_index] = 0 if wrap_with_ddp: + if getattr(args, "use_torch_fsdp2", False): + assert HAVE_FSDP2, "Torch FSDP2 requires torch>=2.4.0" + DP = torch_FSDP + else: + DP = DDP + config = get_model_config(model[0]) kwargs = {} @@ -546,9 +567,9 @@ def get_model(model_provider_func, model_type=ModelType.encoder_or_decoder, wrap ddp_config = DistributedDataParallelConfig(**kwargs) overlap_param_gather_with_optimizer_step = getattr(args, 'overlap_param_gather_with_optimizer_step', False) - model = [DDP(config, - ddp_config, - model_chunk, + model = [DP(config=config, + ddp_config=ddp_config, + module=model_chunk, # Turn off bucketing for model_chunk 2 onwards, since communication for these # model chunks is overlapped with compute anyway. disable_bucketing=(model_chunk_idx > 0) or overlap_param_gather_with_optimizer_step) @@ -679,7 +700,8 @@ def setup_model_and_optimizer(model_provider_func, args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( model, optimizer, opt_param_scheduler, - ft_client=ft_integration.get_rank_monitor_client(), checkpointing_context=checkpointing_context) + ft_client=ft_integration.get_rank_monitor_client(), checkpointing_context=checkpointing_context, + skip_load_to_model_and_opt=HAVE_FSDP2 and getattr(args, "use_torch_fsdp2", False)) timers('load-checkpoint').stop(barrier=True) timers.log(['load-checkpoint']) one_logger and one_logger.log_metrics({ @@ -706,7 +728,8 @@ def setup_model_and_optimizer(model_provider_func, update_use_dist_ckpt(args) save_checkpoint(args.iteration, model, optimizer, opt_param_scheduler, - args.num_floating_point_operations_so_far) + args.num_floating_point_operations_so_far, + preprocess_common_state_dict_fn=preprocess_common_state_dict) print_rank_0("> converted checkpoint: %s -> %s." % (load_ckpt_format, args.ckpt_format)) torch.distributed.barrier() @@ -877,6 +900,12 @@ def training_log(loss_dict, total_loss_dict, learning_rate, decoupled_learning_r timers.write(timers_to_log, writer, iteration, normalizer=total_iterations) if writer and (iteration % args.tensorboard_log_interval == 0): + if args.record_memory_history and is_last_rank(): + snapshot = torch.cuda.memory._snapshot() + from pickle import dump + with open(args.memory_snapshot_path , 'wb') as f: + dump(snapshot, f) + if wandb_writer: wandb_writer.log({'samples vs steps': args.consumed_train_samples}, iteration) @@ -988,14 +1017,14 @@ def training_log(loss_dict, total_loss_dict, learning_rate, decoupled_learning_r wandb_writer.log({'throughput': throughput}, iteration) assert learning_rate is not None # Decoupled_learning_rate should be not None only on first and last pipeline stage. - log_string += ' learning rate: {:.6E} |'.format(learning_rate) + log_string += f' learning rate: {learning_rate:.6E} |' if args.decoupled_lr is not None and (mpu.is_pipeline_first_stage(ignore_virtual=True) or mpu.is_pipeline_last_stage(ignore_virtual=True)): assert decoupled_learning_rate is not None - log_string += ' decoupled learning rate: {:.6E} |'.format(decoupled_learning_rate) + log_string += f' decoupled learning rate: {decoupled_learning_rate:.6E} |' else: assert decoupled_learning_rate is None - log_string += ' global batch size: {:5d} |'.format(batch_size) + log_string += f' global batch size: {batch_size:5d} |' for key in total_loss_dict: if key not in [advanced_iters_key, skipped_iters_key, nan_iters_key]: @@ -1004,13 +1033,13 @@ def training_log(loss_dict, total_loss_dict, learning_rate, decoupled_learning_r if avg > 0.0: log_string += ' {}: {:.6E} |'.format(key, avg) total_loss_dict[key] = torch.tensor([0.0], dtype=torch.float, device='cuda') - log_string += ' loss scale: {:.1f} |'.format(loss_scale) + log_string += f' loss scale: {loss_scale:.1f} |' if grad_norm is not None: - log_string += ' grad norm: {:.3f} |'.format(grad_norm) + log_string += f' grad norm: {grad_norm:.3f} |' if num_zeros_in_grad is not None: - log_string += ' num zeros: {:.1f} |'.format(num_zeros_in_grad) + log_string += f' num zeros: {num_zeros_in_grad} |' if params_norm is not None: - log_string += ' params norm: {:.3f} |'.format(params_norm) + log_string += f' params norm: {params_norm:.3f} |' log_string += ' number of skipped iterations: {:3d} |'.format( total_loss_dict[skipped_iters_key]) log_string += ' number of nan iterations: {:3d} |'.format( @@ -1024,7 +1053,7 @@ def training_log(loss_dict, total_loss_dict, learning_rate, decoupled_learning_r if torch.distributed.get_rank() == 0: num_microbatches = get_num_microbatches() report_theoretical_memory(args, num_microbatches=num_microbatches, verbose=True) - report_memory('(after {} iterations)'.format(iteration)) + report_memory(f'(after {iteration} iterations)') report_memory_flag = False timers.log(timers_to_log, normalizer=args.log_interval) @@ -1065,6 +1094,18 @@ def compute_throughputs_and_append_to_progress_log(iteration, f"Tokens (in billions): {tokens_so_far / 10**9:.2f}") +def enable_forward_pre_hook(model_chunks): + for model_chunk in model_chunks: + assert isinstance(model_chunk, DDP) + model_chunk.enable_forward_pre_hook() + + +def disable_forward_pre_hook(model_chunks): + for model_chunk in model_chunks: + assert isinstance(model_chunk, DDP) + model_chunk.disable_forward_pre_hook() + + def save_checkpoint_and_time(iteration, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, checkpointing_context, non_persistent_ckpt=False, train_data_iterator=None): @@ -1081,14 +1122,14 @@ def save_checkpoint_and_time(iteration, model, optimizer, opt_param_scheduler, # Log E2E metrics before save-checkpoint one_logger_utils.track_e2e_metrics() if args.use_distributed_optimizer and args.overlap_param_gather: - optimizer.disable_pre_hook() + disable_forward_pre_hook(model) save_checkpoint(iteration, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, checkpointing_context, non_persistent_ckpt=non_persistent_ckpt, train_data_iterator=train_data_iterator, ft_client=ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.SAVE_CHECKPOINT)) + ft_integration.StateMachineActions.SAVE_CHECKPOINT), preprocess_common_state_dict_fn=preprocess_common_state_dict) if args.use_distributed_optimizer and args.overlap_param_gather: - optimizer.enable_pre_hook() + enable_forward_pre_hook(model) timers(timer_key).stop(barrier=True) timers.log([timer_key]) save_checkpoint_finish_time = timers('save-checkpoint').active_time() @@ -1106,10 +1147,150 @@ def save_checkpoint_and_time(iteration, model, optimizer, opt_param_scheduler, timers('interval-time', log_level=0).start(barrier=True) +def post_training_step_callbacks(model, optimizer, opt_param_scheduler, iteration, prof, + num_floating_point_operations_since_last_log_event): + """Run all post-training-step functions (e.g., FT heartbeats, GC).""" + args = get_args() + + # Send heartbeat to FT package and update timeouts. + if args.enable_ft_package: + ft_client = ft_integration.get_rank_monitor_client( + ft_integration.StateMachineActions.TRAIN_HEARTBEAT) + if ft_client is not None: + ft_client.send_heartbeat() + # TODO: We are always calculating timeouts in the current implementation. + # If we want to rely on manually setting these, then we need to add additional + # arguments to training and pass it here. + if ft_integration.can_update_timeouts(): + ft_integration.get_rank_monitor_client( + ft_integration.StateMachineActions.UPDATE_TIMEOUT).calculate_and_set_timeouts() + print_rank_0(f'Updated FT timeouts. New values: \ + {ft_integration.get_rank_monitor_client().timeouts}') + + # Bring CPU and GPU back in sync if on right iteration. + if args.train_sync_interval and iteration % args.train_sync_interval == 0: + torch.cuda.synchronize() + + # Straggler detector. + if iteration % args.log_interval == 0 and args.log_straggler: + stimer.report(num_floating_point_operations_since_last_log_event, args.log_interval) + num_floating_point_operations_since_last_log_event = 0.0 + + # Check weight hash across DP replicas. + if args.check_weight_hash_across_dp_replicas_interval is not None and \ + iteration % args.check_weight_hash_across_dp_replicas_interval == 0: + if args.use_distributed_optimizer and args.overlap_param_gather: + disable_forward_pre_hook(model) + assert check_param_hashes_across_dp_replicas(model, cross_check=True), \ + "Parameter hashes not matching across DP replicas" + torch.distributed.barrier() + print_rank_0(f">>> Weight hashes match after {iteration} iterations...") + if args.use_distributed_optimizer and args.overlap_param_gather: + enable_forward_pre_hook(model) + + # Autoresume. + if args.adlr_autoresume and \ + (iteration % args.adlr_autoresume_interval == 0): + check_adlr_autoresume_termination(iteration, model, optimizer, + opt_param_scheduler) + + # Profiling. + if args.profile and \ + iteration == args.profile_step_end and \ + torch.distributed.get_rank() in args.profile_ranks: + if args.use_pytorch_profiler: + assert prof is not None + prof.stop() + else: + torch.cuda.cudart().cudaProfilerStop() + + # Manual garbage collection. + if args.manual_gc: + if args.manual_gc_interval != 0 and iteration % args.manual_gc_interval == 0: + gc.collect() + + +def checkpoint_and_decide_exit(model, optimizer, opt_param_scheduler, iteration, + num_floating_point_operations_so_far, checkpointing_context, + train_data_iterator): + """Save checkpoint and decide whether to exit based on arguments (e.g., if + --exit-duration-in-mins is set). Actual exit happens in main training loop + based on the return value of this function.""" + args = get_args() + timers = get_timers() + + # Exit based on signal handler. + saved_checkpoint = False + if args.exit_signal_handler: + signal_handler = get_signal_handler() + if any(signal_handler.signals_received()): + if args.save: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + print_datetime('exiting program after receiving SIGTERM.') + + return True + + # Regular save (persistent and non-persistent). + if args.save and args.save_interval and \ + iteration % args.save_interval == 0: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + saved_checkpoint = True + + elif args.save and args.non_persistent_save_interval and \ + iteration % args.non_persistent_save_interval == 0: + timers('interval-time').stop() + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, + non_persistent_ckpt=True, train_data_iterator=train_data_iterator) + saved_checkpoint = True + timers('interval-time', log_level=0).start(barrier=True) + + # Exit based on duration. + if args.exit_duration_in_mins: + train_time = (time.time() - _TRAIN_START_TIME) / 60.0 + done_cuda = torch.tensor( + [train_time > args.exit_duration_in_mins], + dtype=torch.int, device='cuda') + torch.distributed.all_reduce( + done_cuda, op=torch.distributed.ReduceOp.MAX) + done = done_cuda.item() + if done: + if args.save and not saved_checkpoint: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + print_datetime(f'exiting program after {train_time} minutes') + + return True + + # Exit based on iterations. + if args.exit_interval and iteration % args.exit_interval == 0: + if args.save and not saved_checkpoint: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + torch.distributed.barrier() + print_datetime(f'exiting program at iteration {iteration}') + + return True + + return False + + def train(forward_step_func, model, optimizer, opt_param_scheduler, train_data_iterator, valid_data_iterator, process_non_loss_data_func, config, checkpointing_context, non_loss_data_func): - """Train the model function.""" + """Training function: run train_step desired number of times, run validation, checkpoint.""" args = get_args() timers = get_timers() one_logger = get_one_logger() @@ -1127,7 +1308,7 @@ def train(forward_step_func, model, optimizer, opt_param_scheduler, # Iterations. iteration = args.iteration - # Track E2E metrics at the start of training + # Track E2E metrics at the start of training. one_logger_utils.on_train_start(iteration=iteration, consumed_train_samples=args.consumed_train_samples, train_samples=args.train_samples, seq_length=args.seq_length, train_iters=args.train_iters, save=args.save, async_save=args.async_save, @@ -1136,7 +1317,7 @@ def train(forward_step_func, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far = args.num_floating_point_operations_so_far - # Setup some training config params + # Setup some training config params. config.grad_scale_func = optimizer.scale_loss config.timers = timers if isinstance(model[0], DDP) and args.overlap_grad_reduce: @@ -1159,17 +1340,17 @@ def train(forward_step_func, model, optimizer, opt_param_scheduler, timers('interval-time', log_level=0).start(barrier=True) print_datetime('before the start of training step') report_memory_flag = True - exit = False + should_exit = False if args.manual_gc: # Disable the default garbage collector and perform the collection manually. # This is to align the timing of garbage collection across ranks. assert args.manual_gc_interval >= 0, \ - 'Manual garbage collection interval should be laerger than or equal to 0.' + 'Manual garbage collection interval should be larger than or equal to 0' gc.disable() gc.collect() - # Singleton Initialization + # Singleton initialization of straggler detector. if args.log_straggler: global stimer world = torch.distributed.get_world_size() @@ -1179,7 +1360,7 @@ def train(forward_step_func, model, optimizer, opt_param_scheduler, mmcnt = mmcnt, enabled = not args.disable_straggler_on_startup, port = args.straggler_ctrlr_port) - total_flops = 0.0 + num_floating_point_operations_since_last_log_event = 0.0 num_microbatches = get_num_microbatches() eval_duration = 0.0 @@ -1193,17 +1374,18 @@ def get_e2e_base_metrics(): 'train_duration': timers('interval-time').active_time(), 'eval_duration': eval_duration, 'eval_iterations': eval_iterations, - 'total_flops': total_flops, + 'total_flops': num_floating_point_operations_since_last_log_event, 'num_floating_point_operations_so_far': num_floating_point_operations_so_far, 'consumed_train_samples': args.consumed_train_samples, 'world_size': args.world_size, 'seq_length': args.seq_length } - # Cache into one-logger for callback + # Cache into one-logger for callback. if one_logger: with one_logger.get_context_manager(): one_logger.store_set('get_e2e_base_metrics', get_e2e_base_metrics) + prof = None if args.profile and torch.distributed.get_rank() in args.profile_ranks and args.use_pytorch_profiler: prof = torch.profiler.profile( schedule=torch.profiler.schedule( @@ -1216,6 +1398,7 @@ def get_e2e_base_metrics(): with_stack=True) prof.start() + # Run training iterations till done. while iteration < args.train_iters: if args.profile and torch.distributed.get_rank() in args.profile_ranks: if args.use_pytorch_profiler: @@ -1224,7 +1407,7 @@ def get_e2e_base_metrics(): torch.cuda.cudart().cudaProfilerStart() torch.autograd.profiler.emit_nvtx(record_shapes=True).__enter__() - maybe_finalize_async_save(False) + maybe_finalize_async_save(blocking=False) # Update number of microbatches first without consistency check to decide if a # checkpoint should be saved. If the number of microbatches is different @@ -1233,7 +1416,8 @@ def get_e2e_base_metrics(): update_num_microbatches(args.consumed_train_samples, consistency_check=False, verbose=True) if get_num_microbatches() != num_microbatches and iteration != 0: assert get_num_microbatches() > num_microbatches, \ - "number of microbatches should be increasing due to batch size rampup ... %d -> %d." % (num_microbatches, get_num_microbatches()) + (f"Number of microbatches should be increasing due to batch size rampup; " + f"instead going from {num_microbatches} to {get_num_microbatches()}") if args.save is not None: save_checkpoint_and_time(iteration, model, optimizer, opt_param_scheduler, @@ -1242,6 +1426,7 @@ def get_e2e_base_metrics(): num_microbatches = get_num_microbatches() update_num_microbatches(args.consumed_train_samples, consistency_check=True, verbose=True) + # Run training step. args.curr_iteration = iteration loss_dict, skipped_iter, grad_norm, num_zeros_in_grad = \ train_step(forward_step_func, @@ -1262,38 +1447,15 @@ def get_e2e_base_metrics(): else: assert num_skipped_samples_in_batch == 0 args.skipped_train_samples += num_skipped_samples_in_batch - num_fp_ops = num_floating_point_operations(args, batch_size) - num_floating_point_operations_so_far += num_fp_ops - total_flops += num_fp_ops - - # Send heartbeat to FT package and update timeouts. - if args.enable_ft_package: - ft_client = ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.TRAIN_HEARTBEAT) - if ft_client is not None: - ft_client.send_heartbeat() - # TODO we are always calculating timeouts in the current implementation - # if we want to rely on manually setup then we need to add additional argument - # to training and pass it here - if ft_integration.can_update_timeouts(): - ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.UPDATE_TIMEOUT).calculate_and_set_timeouts() - print_rank_0(f'Updated FT timeouts. New values: \ - {ft_integration.get_rank_monitor_client().timeouts}') - - # Bring CPU and GPU back in sync if on right iteration. - if ( - args.train_sync_interval - and iteration % args.train_sync_interval == 0 - ): - torch.cuda.synchronize() + num_floating_point_operations_in_batch = num_floating_point_operations(args, batch_size) + num_floating_point_operations_so_far += num_floating_point_operations_in_batch + num_floating_point_operations_since_last_log_event += num_floating_point_operations_in_batch # Logging. loss_scale = optimizer.get_loss_scale().item() params_norm = None if args.log_params_norm: params_norm = calc_params_l2_norm(model) - learning_rate = None decoupled_learning_rate = None for param_group in optimizer.param_groups: @@ -1308,38 +1470,16 @@ def get_e2e_base_metrics(): report_memory_flag, skipped_iter, grad_norm, params_norm, num_zeros_in_grad) - # StragglerDetector - if iteration % args.log_interval == 0 and args.log_straggler: - stimer.report(total_flops, args.log_interval) - total_flops = 0.0 - - if args.check_weight_hash_across_dp_replicas_interval is not None and \ - iteration % args.check_weight_hash_across_dp_replicas_interval == 0: - if args.use_distributed_optimizer and args.overlap_param_gather: - optimizer.disable_pre_hook() - assert check_param_hashes_across_dp_replicas(model, cross_check=True), \ - "Parameter hashes not matching across DP replicas" - torch.distributed.barrier() - print_rank_0(f">>> Weight hashes match after {iteration} iterations...") - if args.use_distributed_optimizer and args.overlap_param_gather: - optimizer.enable_pre_hook() - - # Autoresume - if args.adlr_autoresume and \ - (iteration % args.adlr_autoresume_interval == 0): - check_adlr_autoresume_termination(iteration, model, optimizer, - opt_param_scheduler) - - # Evaluation + # Evaluation. if args.eval_interval and iteration % args.eval_interval == 0 and \ - args.do_valid: + args.do_valid: timers('interval-time').stop() if args.use_distributed_optimizer and args.overlap_param_gather: - optimizer.disable_pre_hook() + disable_forward_pre_hook(model) if args.manual_gc and args.manual_gc_eval: # Collect all objects. gc.collect() - prefix = 'iteration {}'.format(iteration) + prefix = f'iteration {iteration}' timers('eval-time', log_level=0).start(barrier=True) evaluate_and_print_results(prefix, forward_step_func, valid_data_iterator, model, @@ -1355,108 +1495,43 @@ def get_e2e_base_metrics(): # Collect only the objects created and used in evaluation. gc.collect(generation=0) if args.use_distributed_optimizer and args.overlap_param_gather: - optimizer.enable_pre_hook() + enable_forward_pre_hook(model) timers('interval-time', log_level=0).start(barrier=True) - if args.enable_ft_package and ft_integration.get_rank_monitor_client() is not None: ft_integration.get_rank_monitor_client( ft_integration.StateMachineActions.EVAL_HEARTBEAT).send_heartbeat() - # Checkpointing - saved_checkpoint = False - if args.exit_signal_handler: - signal_handler = get_signal_handler() - if any(signal_handler.signals_received()): - if args.save: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - print_datetime('exiting program after receiving SIGTERM.') - exit = True - break - - if args.save and args.save_interval and \ - iteration % args.save_interval == 0: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - saved_checkpoint = True + # Miscellaneous post-training-step functions (e.g., FT heartbeats, GC). + # Some of these only happen at specific iterations. + post_training_step_callbacks(model, optimizer, opt_param_scheduler, iteration, prof, + num_floating_point_operations_since_last_log_event) - elif args.save and args.non_persistent_save_interval and \ - iteration % args.non_persistent_save_interval == 0: - timers('interval-time').stop() - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, - non_persistent_ckpt=True, train_data_iterator=train_data_iterator) - saved_checkpoint = True - timers('interval-time', log_level=0).start(barrier=True) - - # Exiting based on duration - if args.exit_duration_in_mins: - train_time = (time.time() - _TRAIN_START_TIME) / 60.0 - done_cuda = torch.tensor( - [train_time > args.exit_duration_in_mins], - dtype=torch.int, device='cuda') - torch.distributed.all_reduce( - done_cuda, op=torch.distributed.ReduceOp.MAX) - done = done_cuda.item() - if done: - if args.save and not saved_checkpoint: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - print_datetime('exiting program after {} minutes'.format(train_time)) - exit = True - break - - # Exiting based on iterations - if args.exit_interval and iteration % args.exit_interval == 0: - if args.save and not saved_checkpoint: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - torch.distributed.barrier() - print_datetime('exiting program at iteration {}'.format(iteration)) - exit = True + # Checkpoint and decide whether to exit. + should_exit = checkpoint_and_decide_exit(model, optimizer, opt_param_scheduler, iteration, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator) + if should_exit: break - if args.profile and \ - iteration == args.profile_step_end and \ - torch.distributed.get_rank() in args.profile_ranks: - if args.use_pytorch_profiler: - prof.stop() - else: - torch.cuda.cudart().cudaProfilerStop() - - if args.manual_gc: - if args.manual_gc_interval != 0 and iteration % args.manual_gc_interval == 0: - gc.collect() - one_logger_utils.track_e2e_metrics() - # Flush TensorBoard, WandB writers and one-logger + # Flush TensorBoard, WandB writers and one-logger. writer = get_tensorboard_writer() if writer: writer.flush() # Close out pre-hooks if using distributed optimizer and overlapped param gather. if args.use_distributed_optimizer and args.overlap_param_gather: - optimizer.disable_pre_hook() + disable_forward_pre_hook(model) if args.enable_ft_package and ft_integration.get_rank_monitor_client() is not None: ft_integration.get_rank_monitor_client().shutdown_workload_monitoring() - maybe_finalize_async_save(True) + maybe_finalize_async_save(blocking=True) # If any exit conditions (signal handler, duration, iterations) have been reached, exit. - if exit: + if should_exit: wandb_writer = get_wandb_writer() if wandb_writer: wandb_writer.finish() @@ -1595,7 +1670,7 @@ def evaluate_and_print_results(prefix, forward_step_func, # Timelimit hit during evaluation if timelimit: return - string = ' validation loss at {} | '.format(prefix) + string = f' validation loss at {prefix} | ' for key in total_loss_dict: string += '{} value: {:.6E} | '.format(key, total_loss_dict[key].item()) ppl = math.exp(min(20, total_loss_dict[key].item())) @@ -1676,7 +1751,7 @@ def build_train_valid_test_data_loaders( # Backward compatibility, assume fixed batch size. if args.iteration > 0 and args.consumed_train_samples == 0: assert args.train_samples is None, \ - 'only backward compatiblity support for iteration-based training' + 'Only backward compatiblity support for iteration-based training' args.consumed_train_samples = args.iteration * args.global_batch_size if args.iteration > 0 and args.consumed_valid_samples == 0: if args.train_samples is None: diff --git a/megatron/training/utils.py b/megatron/training/utils.py index 4c3223d0d..6c4143609 100644 --- a/megatron/training/utils.py +++ b/megatron/training/utils.py @@ -1,6 +1,7 @@ # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. """General utilities.""" +import json import os import sys from datetime import datetime @@ -11,13 +12,10 @@ from transformer_engine.pytorch.optimizers import multi_tensor_applier, multi_tensor_l2norm except ImportError: try: + from amp_C import multi_tensor_l2norm from apex.multi_tensor_apply import multi_tensor_applier except ImportError: - multi_tensor_applier = None - try: - from amp_C import multi_tensor_l2norm - except ImportError: import warnings warnings.warn( f'Transformer Engine and Apex are not installed. ' @@ -36,12 +34,17 @@ ) from megatron.core import DistributedDataParallel as DDP from megatron.core import mpu +from megatron.core.datasets.utils import get_blend_from_list from megatron.core.tensor_parallel import param_is_not_tensor_parallel_duplicate +from megatron.core.utils import get_data_parallel_group_if_dtensor, to_local_if_dtensor from megatron.legacy.model import Float16Module from megatron.legacy.model.module import param_is_not_shared - -ALL_MODULE_WRAPPER_CLASSNAMES = (DDP, Float16Module) +try: + from megatron.core.distributed import TorchFullyShardedDataParallel as torch_FSDP + ALL_MODULE_WRAPPER_CLASSNAMES = (DDP, torch_FSDP, Float16Module) +except ImportError: + ALL_MODULE_WRAPPER_CLASSNAMES = (DDP, Float16Module) def unwrap_model(model, module_instances=ALL_MODULE_WRAPPER_CLASSNAMES): @@ -64,21 +67,28 @@ def calc_params_l2_norm(model): args = get_args() if not isinstance(model, list): model = [model] - # Remove duplicate params. + # Seperate moe and dense params params_data = [] - for model_ in model: - for param in model_.parameters(): + moe_params_data = [] + data_parallel_group = None + + for model_chunk in model: + for i, param in enumerate(model_chunk.parameters()): + data_parallel_group = get_data_parallel_group_if_dtensor(param, data_parallel_group) is_not_tp_duplicate = param_is_not_tensor_parallel_duplicate(param) - if mpu.get_expert_model_parallel_rank() > 0: - if not getattr(param, 'allreduce', True) and is_not_tp_duplicate: - assert param_is_not_shared(param) - params_data.append(param.data.float() if args.bf16 else param.data) + if not (param.requires_grad and is_not_tp_duplicate): + continue + assert is_not_tp_duplicate + if not getattr(param, 'allreduce', True): + assert param_is_not_shared(param) + param = to_local_if_dtensor(param) + moe_params_data.append(param.data.float() if args.bf16 else param.data) else: - is_not_shared = param_is_not_shared(param) - if is_not_shared and is_not_tp_duplicate: + if param_is_not_shared(param): + param = to_local_if_dtensor(param) params_data.append(param.data.float() if args.bf16 else param.data) - # Calculate norm + # Calculate dense param norm dummy_overflow_buf = torch.tensor([0], dtype=torch.int, device='cuda') norm, _ = multi_tensor_applier( multi_tensor_l2norm, @@ -87,19 +97,34 @@ def calc_params_l2_norm(model): False # no per-parameter norm ) norm_2 = norm * norm - if mpu.get_expert_model_parallel_world_size() == 1: - # Sum across all model-parallel GPUs(tensor + pipeline). - torch.distributed.all_reduce(norm_2, - op=torch.distributed.ReduceOp.SUM, - group=mpu.get_model_parallel_group()) - else: - # Sum across tensor, pipeline and expert model-parallel GPUs. - torch.distributed.all_reduce(norm_2, - op=torch.distributed.ReduceOp.SUM, - group=mpu.get_tensor_and_expert_parallel_group()) + + if data_parallel_group is not None: torch.distributed.all_reduce(norm_2, op=torch.distributed.ReduceOp.SUM, - group=mpu.get_pipeline_model_parallel_group()) + group=data_parallel_group) + + # Sum across all model-parallel GPUs(tensor + pipeline). + torch.distributed.all_reduce( + norm_2, + op=torch.distributed.ReduceOp.SUM, + group=mpu.get_model_parallel_group() + ) + # Calculate moe norm + if len(moe_params_data) > 0: + moe_norm, _ = multi_tensor_applier( + multi_tensor_l2norm, + dummy_overflow_buf, + [moe_params_data], + False # no per-parameter norm + ) + moe_norm_2 = moe_norm * moe_norm + # Sum across expert tensor, model and pipeline parallel GPUs. + torch.distributed.all_reduce( + moe_norm_2, + op=torch.distributed.ReduceOp.SUM, + group=mpu.get_expert_tensor_model_pipeline_parallel_group() + ) + norm_2 += moe_norm_2 return norm_2.item() ** 0.5 @@ -284,7 +309,7 @@ def print_rank_last(message): def append_to_progress_log(string, barrier=True): - """ Append given string to progress log. """ + """Append given string to progress log.""" args = get_args() if args.save is None: return @@ -299,6 +324,54 @@ def append_to_progress_log(string, barrier=True): f"# GPUs: {num_gpus}\t{string}\n") +def get_blend_and_blend_per_split(args): + """Get blend and blend_per_split from passed-in arguments.""" + use_data_path = args.data_path is not None or \ + args.data_args_path is not None + use_per_split_data_path = any( + elt is not None + for elt in [args.train_data_path, + args.valid_data_path, + args.test_data_path]) or \ + args.per_split_data_args_path is not None + + blend = None + blend_per_split = None + if use_data_path: + if args.data_args_path is not None: + assert args.data_path is None + with open(args.data_args_path, 'r') as f: + blend = get_blend_from_list(f.read().split()) + else: + assert args.data_path is not None + blend = get_blend_from_list(args.data_path) + elif use_per_split_data_path: + if args.per_split_data_args_path is not None: + with open(args.per_split_data_args_path, 'r') as f: + per_split_data_args = json.load(f) + # Each element in blend_per_split should be a list of files (and optional + # weights), so split string if needed. + for split in ["train", "valid", "test"]: + if isinstance(per_split_data_args[split], str): + per_split_data_args[split] = per_split_data_args[split].split() + + blend_per_split = [ + get_blend_from_list(per_split_data_args["train"]), + get_blend_from_list(per_split_data_args["valid"]), + get_blend_from_list(per_split_data_args["test"]) + ] + else: + blend_per_split = [ + get_blend_from_list(args.train_data_path), + get_blend_from_list(args.valid_data_path), + get_blend_from_list(args.test_data_path) + ] + else: + blend, blend_per_split = None, None + + return blend, blend_per_split + + def get_batch_on_this_tp_rank(data_iterator): args = get_args() diff --git a/megatron/training/yaml_arguments.py b/megatron/training/yaml_arguments.py index 3c6c39b07..622c917ee 100644 --- a/megatron/training/yaml_arguments.py +++ b/megatron/training/yaml_arguments.py @@ -17,6 +17,7 @@ import torch.nn.functional as F from megatron.core.transformer import TransformerConfig, MLATransformerConfig +from megatron.core.utils import get_torch_version, is_torch_min_version # Taken from https://stackoverflow.com/questions/65414773/parse-environment-variable-from-yaml-with-pyyaml # Allows for yaml to use environment variables @@ -274,10 +275,8 @@ def validate_yaml(args, defaults={}): assert args.start_weight_decay is not None assert args.end_weight_decay is not None - TORCH_MAJOR = int(torch.__version__.split('.')[0]) - TORCH_MINOR = int(torch.__version__.split('.')[1]) # Persistent fused layer norm. - if TORCH_MAJOR < 1 or (TORCH_MAJOR == 1 and TORCH_MINOR < 11): + if not is_torch_min_version("1.11.0a0"): args.language_model.persist_layer_norm = False if args.rank == 0: print('Persistent fused layer norm kernel is supported from ' @@ -295,10 +294,10 @@ def validate_yaml(args, defaults={}): assert args.language_model.recompute_method is not None, \ 'for distributed recompute activations to work you '\ 'need to use a recompute method ' - assert (TORCH_MAJOR, TORCH_MINOR) >= (1, 10), \ + assert is_torch_min_version("1.10.0a0"), \ 'distributed recompute activations are supported for pytorch ' \ 'v1.10 and above (Nvidia Pytorch container >= 21.07). Current ' \ - 'pytorch version is v%s.%s.' % (TORCH_MAJOR, TORCH_MINOR) + f'pytorch version is v{get_torch_version()}.' if args.language_model.recompute_granularity == 'selective': assert args.language_model.recompute_method is None, \ diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..ab82d9108 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,11 @@ +[mypy] +ignore_missing_imports = True +check_untyped_defs = False +disallow_untyped_calls = False +disallow_untyped_defs = False +disallow_incomplete_defs = False + +disable_error_code = call-arg,operator,var-annotated,union-attr,import-untyped + +# Enable only `assignment` error checking +enable_error_code = assignment \ No newline at end of file diff --git a/pretrain_gpt.py b/pretrain_gpt.py index 3b7f8db01..77314a1df 100644 --- a/pretrain_gpt.py +++ b/pretrain_gpt.py @@ -7,7 +7,7 @@ from contextlib import nullcontext import inspect -from typing import Union +from typing import List, Optional, Tuple, Union from megatron.training import get_args from megatron.training import print_rank_0 from megatron.training import get_timers @@ -15,7 +15,6 @@ from megatron.core import mpu from megatron.core.enums import ModelType from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.utils import get_blend_from_list from megatron.core.datasets.gpt_dataset import GPTDatasetConfig from megatron.core.datasets.gpt_dataset import MockGPTDataset, GPTDataset import megatron.legacy.model @@ -26,6 +25,7 @@ from megatron.training.utils import ( get_batch_on_this_cp_rank, get_batch_on_this_tp_rank, + get_blend_and_blend_per_split, ) from megatron.training.arguments import core_transformer_config_from_args from megatron.training.yaml_arguments import core_transformer_config_from_yaml @@ -53,6 +53,14 @@ def model_provider(pre_process=True, post_process=True) -> Union[GPTModel, megat args = get_args() use_te = args.transformer_impl == "transformer_engine" + if args.record_memory_history: + torch.cuda.memory._record_memory_history(True, + # keep 100,000 alloc/free events from before the snapshot + trace_alloc_max_entries=100000, + + # record stack information for the trace events + trace_alloc_record_context=True) + print_rank_0('building GPT model ...') # Experimental loading arguments from yaml if args.yaml_cfg is not None: @@ -73,9 +81,13 @@ def model_provider(pre_process=True, post_process=True) -> Union[GPTModel, megat transformer_layer_spec = import_module(args.spec) else: if use_te: - transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec(args.num_experts, args.moe_grouped_gemm, args.qk_layernorm, args.multi_latent_attention, args.fp8) + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( + args.num_experts, args.moe_grouped_gemm, + args.qk_layernorm, args.multi_latent_attention, args.fp8) else: - transformer_layer_spec = get_gpt_layer_local_spec(args.num_experts, args.moe_grouped_gemm, args.qk_layernorm, args.multi_latent_attention) + transformer_layer_spec = get_gpt_layer_local_spec( + args.num_experts, args.moe_grouped_gemm, + args.qk_layernorm, args.multi_latent_attention) build_model_context = nullcontext build_model_context_args = {} @@ -205,15 +217,16 @@ def is_dataset_built_on_rank(): def core_gpt_dataset_config_from_args(args): tokenizer = get_tokenizer() + # Sometimes --data-path is too long, instead we parse it from a file. + blend: Optional[Tuple[List[str], Optional[List[float]]]] + blend_per_split: Optional[List[Optional[Tuple[List[str], Optional[List[float]]]]]] + blend, blend_per_split = get_blend_and_blend_per_split(args) + return GPTDatasetConfig( random_seed=args.seed, sequence_length=args.seq_length, - blend=get_blend_from_list(args.data_path), - blend_per_split=[ - get_blend_from_list(args.train_data_path), - get_blend_from_list(args.valid_data_path), - get_blend_from_list(args.test_data_path) - ], + blend=blend, + blend_per_split=blend_per_split, renormalize_blend_weights=args.renormalize_blend_weights, split=args.split, num_dataset_builder_threads=args.num_dataset_builder_threads, @@ -224,7 +237,7 @@ def core_gpt_dataset_config_from_args(args): reset_attention_mask=args.reset_attention_mask, eod_mask_loss=args.eod_mask_loss, create_attention_mask=args.create_attention_mask_in_dataloader, - s3_cache_path = args.s3_cache_path + s3_cache_path=args.s3_cache_path, ) diff --git a/pretrain_mamba.py b/pretrain_mamba.py index f8202b6ea..6b9b86a03 100644 --- a/pretrain_mamba.py +++ b/pretrain_mamba.py @@ -4,16 +4,15 @@ import os import torch from functools import partial +from typing import List, Optional, Tuple, Union from megatron.training import get_args from megatron.training import print_rank_0 from megatron.training import get_timers from megatron.training import get_tokenizer from megatron.core import mpu -# from megatron.core import parallel_state from megatron.core.enums import ModelType from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.utils import get_blend_from_list from megatron.core.datasets.gpt_dataset import GPTDatasetConfig from megatron.core.datasets.gpt_dataset import MockGPTDataset, GPTDataset from megatron.core.models.mamba import MambaModel @@ -23,6 +22,7 @@ from megatron.training.utils import ( get_batch_on_this_cp_rank, get_batch_on_this_tp_rank, + get_blend_and_blend_per_split, ) from megatron.training.arguments import core_transformer_config_from_args from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec @@ -179,15 +179,16 @@ def is_dataset_built_on_rank(): def core_gpt_dataset_config_from_args(args): tokenizer = get_tokenizer() + # Sometimes --data-path is too long, instead we parse it from a file. + blend: Optional[Tuple[List[str], Optional[List[float]]]] + blend_per_split: Optional[List[Optional[Tuple[List[str], Optional[List[float]]]]]] + blend, blend_per_split = get_blend_and_blend_per_split(args) + return GPTDatasetConfig( random_seed=args.seed, sequence_length=args.seq_length, - blend=get_blend_from_list(args.data_path), - blend_per_split=[ - get_blend_from_list(args.train_data_path), - get_blend_from_list(args.valid_data_path), - get_blend_from_list(args.test_data_path) - ], + blend=blend, + blend_per_split=blend_per_split, renormalize_blend_weights=args.renormalize_blend_weights, split=args.split, num_dataset_builder_threads=args.num_dataset_builder_threads, @@ -198,6 +199,7 @@ def core_gpt_dataset_config_from_args(args): reset_attention_mask=args.reset_attention_mask, eod_mask_loss=args.eod_mask_loss, create_attention_mask=args.create_attention_mask_in_dataloader, + s3_cache_path=args.s3_cache_path, ) diff --git a/pretrain_t5.py b/pretrain_t5.py index 253d4b19c..21e5d4d06 100644 --- a/pretrain_t5.py +++ b/pretrain_t5.py @@ -8,30 +8,24 @@ import torch -from megatron.training import ( - get_args, - get_timers, - get_tokenizer, - print_rank_0 -) +import megatron from megatron.core import mpu, tensor_parallel from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder from megatron.core.datasets.t5_dataset import ( T5MaskedWordPieceDataset, T5MaskedWordPieceDatasetConfig, ) +from megatron.core.datasets.utils import get_blend_from_list from megatron.core.enums import ModelType from megatron.core.models.T5 import T5Model -from megatron.training import pretrain +from megatron.core.models.T5.t5_spec import ( + get_t5_decoder_with_local_block_spec, + get_t5_decoder_with_transformer_engine_block_spec, + get_t5_encoder_with_local_block_spec, + get_t5_encoder_with_transformer_engine_block_spec, +) +from megatron.training import get_args, get_timers, get_tokenizer, pretrain, print_rank_0 from megatron.training.arguments import core_transformer_config_from_args -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.t5_dataset import T5MaskedWordPieceDataset, T5MaskedWordPieceDatasetConfig -from megatron.core.datasets.utils import get_blend_from_list -from megatron.core.models.T5.t5_spec import (get_t5_encoder_with_transformer_engine_block_spec, - get_t5_decoder_with_transformer_engine_block_spec, - get_t5_encoder_with_local_block_spec, - get_t5_decoder_with_local_block_spec) -from megatron.legacy.model import T5Model as LegacyT5Model from pretrain_gpt import loss_func """ @@ -71,12 +65,14 @@ def model_provider( pre_process=True, post_process=True, add_encoder=True, add_decoder=True -) -> Union[LegacyT5Model, T5Model]: +) -> Union[megatron.legacy.model.T5Model, T5Model]: """Builds the model. Args: - pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. - post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. + pre_process (bool, optional): Set to true if you need to + compute embedings. Defaults to True. + post_process (bool, optional): Set to true if you need to want to + compute output logits/loss. Defaults to True. add_encoder (bool, optional): Defaults to True add_decoder (bool, optional): Defaults to True Returns: @@ -86,13 +82,14 @@ def model_provider( args = get_args() assert ( - args.encoder_tensor_model_parallel_size == 0 or - args.encoder_tensor_model_parallel_size == args.tensor_model_parallel_size - ), f"Because word embeddings are shared between the encoder & decoder, these have to have the same tensor parallel size." + args.encoder_tensor_model_parallel_size == 0 + or args.encoder_tensor_model_parallel_size == args.tensor_model_parallel_size + ), f"Because word embeddings are shared between the encoder & decoder, these \ + have to have the same tensor parallel size." config = core_transformer_config_from_args(args) if args.use_legacy_models: - model = LegacyT5Model( + model = megatron.legacy.model.T5Model( config=config, num_tokentypes=0, parallel_output=True, @@ -106,12 +103,16 @@ def model_provider( encoder_config.num_layers = args.encoder_num_layers if args.pipeline_model_parallel_size > 1: - assert args.encoder_pipeline_model_parallel_size > 0, "Need to know how to shard the encoder & decoder." + assert ( + args.encoder_pipeline_model_parallel_size > 0 + ), "Need to know how to shard the encoder & decoder." if args.encoder_pipeline_model_parallel_size > 0: encoder_config.pipeline_model_parallel_size = args.encoder_pipeline_model_parallel_size - encoder_layers_per_pipeline = encoder_config.num_layers // encoder_config.pipeline_model_parallel_size + encoder_layers_per_pipeline = ( + encoder_config.num_layers // encoder_config.pipeline_model_parallel_size + ) decoder_layers_per_pipeline = config.num_layers // config.pipeline_model_parallel_size if args.transformer_impl == "local": @@ -141,16 +142,16 @@ def model_provider( position_embedding_type=args.position_embedding_type, rotary_percent=args.rotary_percent, add_encoder=add_encoder, - add_decoder=add_decoder + add_decoder=add_decoder, ) return model -def get_batch(data_iterator): +def get_batch(data_iterator, use_local): """Build the batch.""" - keys = ['text_enc', 'text_dec', 'labels', 'loss_mask', 'enc_mask', 'dec_mask', 'enc_dec_mask'] + keys = ['text_enc', 'text_dec', 'labels', 'loss_mask', 'enc_mask', 'dec_mask'] datatype = torch.int64 # Broadcast data. @@ -165,10 +166,14 @@ def get_batch(data_iterator): tokens_dec = data_b['text_dec'].long() labels = data_b['labels'].long() loss_mask = data_b['loss_mask'].float() - enc_mask = data_b['enc_mask'] < 0.5 dec_mask = data_b['dec_mask'] < 0.5 - enc_dec_mask = data_b['enc_dec_mask'] < 0.5 + + # Configure attention mask based on different conditions + # (e.g., transformer-impl, TE versions, TE backends) + enc_mask, dec_mask, enc_dec_mask = T5MaskedWordPieceDataset.config_attention_mask( + tokens_enc, tokens_dec, enc_mask, dec_mask, use_local + ) return tokens_enc, tokens_dec, loss_mask, labels, enc_mask, dec_mask, enc_dec_mask @@ -186,8 +191,9 @@ def forward_step(data_iterator, model: T5Model): # Get the batch. timers('batch generator', log_level=2).start() + use_local = args.transformer_impl == "local" tokens_enc, tokens_dec, loss_mask, lm_labels, enc_mask, dec_mask, enc_dec_mask = get_batch( - data_iterator + data_iterator, use_local ) timers('batch generator').stop() @@ -203,7 +209,8 @@ def train_valid_test_datasets_provider(train_val_test_num_samples: int): """Build the train test and validation datasets. Args: - train_val_test_num_samples : A list containing the number of samples in train test and validation. + train_val_test_num_samples : A list containing the number of samples + in train test and validation. """ args = get_args() @@ -217,7 +224,7 @@ def train_valid_test_datasets_provider(train_val_test_num_samples: int): blend_per_split=[ get_blend_from_list(args.train_data_path), get_blend_from_list(args.valid_data_path), - get_blend_from_list(args.test_data_path) + get_blend_from_list(args.test_data_path), ], renormalize_blend_weights=args.renormalize_blend_weights, split=args.split, @@ -247,7 +254,8 @@ def train_valid_test_datasets_provider(train_val_test_num_samples: int): def t5_embedding_ranks(pp_ranks): - """T5's embedding ranks consist of the encoder's first rank, and the decoder's first & last ranks. + """T5's embedding ranks consist of the encoder's first rank, and + the decoder's first & last ranks. Args: pp_ranks: A list of global ranks that constitute a pipeline group. """ diff --git a/pretrain_vlm.py b/pretrain_vlm.py index 6b1848e96..605634060 100644 --- a/pretrain_vlm.py +++ b/pretrain_vlm.py @@ -11,7 +11,8 @@ from megatron.core.datasets.multimodal_dataset import MockMultimodalDataset, MultimodalDatasetConfig from megatron.core.enums import ModelType from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings -from megatron.core.models.multimodal.llava_model import LLaVAModel, IMAGE_TOKEN_INDEX +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.models.multimodal.llava_model import LLaVAModel, DEFAULT_IMAGE_TOKEN_INDEX from megatron.core.models.multimodal.llava_spec import ( decoder_model_with_transformer_engine_default_spec, decoder_model_with_local_default_spec, @@ -21,10 +22,42 @@ get_vit_layer_with_local_spec, ) from megatron.core.transformer.spec_utils import import_module +from megatron.core.packed_seq_params import PackedSeqParams from megatron.training import get_args, get_timers, get_tokenizer, pretrain, print_rank_0 from megatron.training.arguments import core_transformer_config_from_args +from megatron.training.utils import get_batch_on_this_cp_rank +from megatron.core import mpu from pretrain_gpt import loss_func +def calculate_model_parallel_padding(decoder_seq_len, text_only=False): + args = get_args() + cp_size = args.context_parallel_size + tp_size = args.tensor_model_parallel_size + + mp_padding_needed = 0 + # TP Comm overlap is performed with combined text+image embeddings. + # text_only flag skips using the full sequence length to calculate padding and uses + # the provided decoder_seq_len + if args.sequence_parallel and args.decoder_tp_comm_overlap and not text_only: + # If TP Comm Overlap is enabled for combined text+image embedding in LM backbone, + # user needs to provide decoder_seq_length with any potential padding needed for SP+CP + assert args.decoder_seq_length is not None, \ + "Please provide --decoder-seq-length when using TP Comm overlap for LM backbone" + mp_padding_needed = args.decoder_seq_length - decoder_seq_len + elif args.sequence_parallel or cp_size > 1: + if args.sequence_parallel and cp_size > 1: + # Padding to multiple of tp_size * cp_size*2 when using sequence parallel and context parallel + padding_factor = tp_size * cp_size * 2 + elif cp_size > 1: + padding_factor = cp_size * 2 + elif args.sequence_parallel: + padding_factor = tp_size + mp_padding_needed = int((decoder_seq_len + padding_factor - 1) // (padding_factor) * (padding_factor)) - decoder_seq_len + args.decoder_seq_length = decoder_seq_len + mp_padding_needed + else: + args.decoder_seq_length = decoder_seq_len + + return mp_padding_needed def model_provider( pre_process=True, post_process=True, add_encoder=True, add_decoder=True, parallel_output=True @@ -48,13 +81,26 @@ def model_provider( args = get_args() vision_model_type = "clip" + assert args.ckpt_format == 'torch', "Only ckpt-format torch is supported for VLM training currently." + + if args.pipeline_model_parallel_size > 1: + assert not args.freeze_LM, "Freezing a pipeline parallel language model is not currently supported" + + if args.encoder_pipeline_model_parallel_size == 1: + assert not args.freeze_ViT, "Freezing a vision encoder on its own pipeline rank is not currently supported" + num_image_embeddings = get_num_image_embeddings( - args.img_h, args.img_w, args.patch_dim, vision_model_type, args.disable_vision_class_token, 1 + args.img_h, args.img_w, args.patch_dim, vision_model_type, args.disable_vision_class_token, + class_token_len=1, pixel_shuffle=False, use_tile_tags=False ) old_seq_length = args.seq_length - # decoder_seq_length denotes the language model sequence length. - args.decoder_seq_length = args.seq_length + num_image_embeddings + # dataloader-seq-length is required to determine the length of text seq len + if args.dataloader_seq_length is None: + args.dataloader_seq_length = args.seq_length + + # decoder_seq_len denotes the language model sequence length. + decoder_seq_len = args.dataloader_seq_length + num_image_embeddings # seq_length and encoder_seq_length denote the vision model sequence length. Override if the user provided something else. args.seq_length = args.encoder_seq_length = num_image_embeddings @@ -62,11 +108,16 @@ def model_provider( warnings.warn( f"Changed seq_length and encoder_seq_length (vision model sequence length) from {old_seq_length} to num_image_tokens ({num_image_embeddings})" ) + mp_padding_needed = calculate_model_parallel_padding(decoder_seq_len) args.max_position_embeddings = max(args.max_position_embeddings, args.decoder_seq_length) print_rank_0('building a multimodal model ...') language_transformer_config = core_transformer_config_from_args(get_args()) + if args.decoder_tp_comm_overlap: + assert args.transformer_impl == "transformer_engine", \ + "TransformerEngine is needed to support Decoder TP Comm overlap" + language_transformer_config.tp_comm_overlap = args.decoder_tp_comm_overlap if args.spec is not None: language_transformer_layer_spec = import_module(args.spec) @@ -78,6 +129,13 @@ def model_provider( language_transformer_layer_spec = decoder_model_with_local_default_spec( args.num_experts, args.moe_grouped_gemm ) + + # Prepare mask type for any required padding to support CP/SP sequence sharding. + if mp_padding_needed > 0: + if language_transformer_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') == AttnMaskType.causal: + language_transformer_layer_spec.submodules.self_attention.params['attn_mask_type'] = AttnMaskType.padding_causal + elif language_transformer_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') == AttnMaskType.no_mask: + language_transformer_layer_spec.submodules.self_attention.params['attn_mask_type'] = AttnMaskType.padding if args.transformer_impl == "transformer_engine": vision_transformer_layer_spec = get_vit_layer_with_transformer_engine_spec() @@ -90,9 +148,23 @@ def model_provider( vision_transformer_config.first_pipeline_num_layers = None vision_transformer_config.last_pipeline_num_layers = None vision_transformer_config.vision_model_type = vision_model_type + vision_transformer_config.context_parallel_size = 1 # Force CP=1 for Vision Transformer + if vision_transformer_config.sequence_parallel: + print_rank_0("> Disabling Sequence parallelism in Vision Transformer. Not yet supported") + vision_transformer_config.sequence_parallel = False + if vision_transformer_config.tp_comm_overlap: + print_rank_0("> Disabling TP Comm overlap in Vision Transformer. Not yet supported") + vision_transformer_config.tp_comm_overlap = False vision_projection_type = "mlp" vision_projection_config = deepcopy(language_transformer_config) + vision_projection_config.context_parallel_size = 1 # Force CP=1 for Vision Projection + if vision_projection_config.sequence_parallel: + print_rank_0("> Disabling Sequence parallelism in Vision Projection. Not yet supported") + vision_projection_config.sequence_parallel = False + if vision_projection_config.tp_comm_overlap: + print_rank_0("> Disabling TP Comm overlap in Vision Projection. Not yet supported") + vision_projection_config.tp_comm_overlap = False if args.encoder_pipeline_model_parallel_size > 0: assert ( @@ -121,7 +193,7 @@ def model_provider( language_transformer_config=language_transformer_config, language_transformer_layer_spec=language_transformer_layer_spec, language_vocab_size=args.padded_vocab_size, - language_max_sequence_length=args.max_position_embeddings, + language_max_sequence_length=args.decoder_seq_length, vision_transformer_config=vision_transformer_config, vision_transformer_layer_spec=vision_transformer_layer_spec, drop_vision_class_token=args.disable_vision_class_token, @@ -164,7 +236,7 @@ def train_valid_test_datasets_provider(train_val_test_num_samples): config = MultimodalDatasetConfig( random_seed=args.seed, split=args.split, - sequence_length=args.decoder_seq_length - args.seq_length, + sequence_length=args.dataloader_seq_length, tokenizer=get_tokenizer(), reset_position_ids=args.reset_position_ids, reset_attention_mask=args.reset_attention_mask, @@ -202,7 +274,7 @@ def _preprocess_data_for_llava(data): # Prepend image token index to tokens. data["tokens"] = torch.cat( [ - IMAGE_TOKEN_INDEX + DEFAULT_IMAGE_TOKEN_INDEX * torch.ones(1, dtype=data["tokens"].dtype, device=data["tokens"].device), data["tokens"], ] @@ -223,7 +295,6 @@ def _preprocess_data_for_llava(data): return data - def get_batch(data_iterator): """Generate a batch. @@ -233,6 +304,35 @@ def get_batch(data_iterator): Returns: sample: A data sample with images, tokens, etc. """ + def _get_packed_seq_params(tokens, img_seq_len, mp_padding_needed): + batch_size = tokens.shape[0] + # Calculate the valid token seq len that LM backbone should compute on + combined_valid_seqlen = tokens.shape[1] + img_seq_len - mp_padding_needed + cu_seqlens = torch.arange( + 0, (batch_size + 1) * (combined_valid_seqlen), step=(combined_valid_seqlen), dtype=torch.int32, device=tokens.device) + # Calculate the total padded token seq len + combined_padded_seqlen = tokens.shape[1] + img_seq_len + cu_seqlens_padded = None + qkv_format = 'sbhd' + if cp_size > 1: + # Provide cu_seqlens__padded for CP support + cu_seqlens_padded = torch.arange( + 0, (batch_size + 1) * (combined_padded_seqlen), step=(combined_padded_seqlen), dtype=torch.int32, device=tokens.device) + # CP with padding mask type requires THD format + qkv_format = 'thd' + packed_seq_params = PackedSeqParams( + cu_seqlens_q=cu_seqlens, + cu_seqlens_kv=cu_seqlens, + cu_seqlens_q_padded=cu_seqlens_padded, + cu_seqlens_kv_padded=cu_seqlens_padded, + max_seqlen_q=combined_padded_seqlen, + max_seqlen_kv=combined_padded_seqlen, + qkv_format=qkv_format, + ) + return packed_seq_params + + args = get_args() + cp_size = args.context_parallel_size # Broadcast data. if data_iterator is not None: data = next(data_iterator) @@ -242,14 +342,37 @@ def get_batch(data_iterator): data_i = tensor_parallel.broadcast_data(["tokens", "position_ids", "labels"], data, torch.int64) data_f = tensor_parallel.broadcast_data(["image", "loss_mask"], data, torch.float32) + batch = dict() + packed_seq_params = None + image_token_mask = None + # Create batch with tokens and position_ids for CP sharding. tokens = data_i["tokens"].long() position_ids = data_i["position_ids"].long() labels = data_i["labels"].long() - images = data_f["image"].float() loss_mask = data_f["loss_mask"].float() + images = data_f["image"].float() + + if cp_size > 1 or args.sequence_parallel: + vision_model_type = "clip" + # Calculate the number of image embedding tokens will be added to text tokens + num_image_embeddings_per_tile = get_num_image_embeddings( + args.img_h, args.img_w, args.patch_dim, vision_model_type, args.disable_vision_class_token, 1 + ) + # Pad to make sure the text sequence can be sharded equally by CP chunks. + mp_padding_needed_for_text = calculate_model_parallel_padding(tokens.shape[1], text_only=True) + if mp_padding_needed_for_text > 0: + tokens, position_ids, labels, loss_mask = [torch.nn.functional.pad(item, (0, mp_padding_needed_for_text)) for item in (tokens, position_ids, labels, loss_mask)] + # Image token mask must be supplied before distributed sequence to CP ranks. + image_token_mask = tokens == DEFAULT_IMAGE_TOKEN_INDEX + num_images_per_sample = torch.sum(image_token_mask, dim=-1) + img_seq_len = (num_image_embeddings_per_tile * num_images_per_sample - num_images_per_sample).max() + packed_seq_params = _get_packed_seq_params(tokens, img_seq_len, mp_padding_needed_for_text) + + # slice batch along sequence dimension for context parallelism + batch = get_batch_on_this_cp_rank({"tokens": tokens, "position_ids": position_ids}) attention_mask = None # Use the attention mask type defined in layer spec. Typically no mask for the vision model and causal mask for the vision model. - return tokens, position_ids, labels, images, loss_mask, attention_mask + return batch["tokens"], batch["position_ids"], labels, images, loss_mask, attention_mask, image_token_mask, packed_seq_params def forward_step(data_iterator, model: LLaVAModel): @@ -267,11 +390,11 @@ def forward_step(data_iterator, model: LLaVAModel): # Get the batch. timers('batch-generator', log_level=2).start() - tokens, position_ids, labels, images, loss_mask, attention_mask = get_batch(data_iterator) + tokens, position_ids, labels, images, loss_mask, attention_mask, image_token_mask, packed_seq_params = get_batch(data_iterator) timers('batch-generator').stop() output_tensor, loss_mask = model( - images, tokens, position_ids, attention_mask, labels, loss_mask + images, tokens, position_ids, attention_mask, labels, loss_mask, image_token_mask=image_token_mask, packed_seq_params=packed_seq_params ) return output_tensor, partial(loss_func, loss_mask) @@ -292,6 +415,10 @@ def add_vlm_extra_args(parser): default=False, help="Drop vision model class token", ) + group.add_argument("--dataloader-seq-length", type=int, help="Make dataloader to produce sequences of specific length.") + group.add_argument("--decoder-tp-comm-overlap", action="store_true", default=False, help="Enables the overlap of " + "Tensor parallel communication and GEMM kernels in Decoder only. " + "Please provide decoder-seq-length when using this feature.") return parser diff --git a/pyproject.toml b/pyproject.toml index a4fb32980..7e27c2a69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,9 +49,6 @@ classifiers = [ "Topic :: Utilities", ] -[tool.setuptools.dynamic] -dependencies = { file = ["megatron/core/requirements.txt"] } - [project.urls] Download = "https://github.com/NVIDIA/Megatron-LM/releases" Homepage = "https://github.com/NVIDIA/Megatron-LM/megatron/core" diff --git a/requirements/pytorch:24.01/requirements.txt b/requirements/pytorch:24.01/requirements.txt new file mode 100644 index 000000000..0fe7b926d --- /dev/null +++ b/requirements/pytorch:24.01/requirements.txt @@ -0,0 +1,15 @@ +einops +flask-restful +nltk +pytest +pytest-cov +pytest_mock +pytest-random-order +sentencepiece +tiktoken +wrapt +zarr +wandb +triton==2.1.0 +tensorstore==0.1.45 +nvidia-modelopt[torch]>=0.19.0; sys_platform != "darwin" \ No newline at end of file diff --git a/requirements/pytorch:24.07/requirements.txt b/requirements/pytorch:24.07/requirements.txt new file mode 100644 index 000000000..2fe096fb2 --- /dev/null +++ b/requirements/pytorch:24.07/requirements.txt @@ -0,0 +1,14 @@ +einops +flask-restful +nltk +pytest +pytest-cov +pytest_mock +pytest-random-order +sentencepiece +tiktoken +wrapt +zarr +wandb +tensorstore==0.1.45 +nvidia-modelopt[torch]>=0.19.0; sys_platform != "darwin" \ No newline at end of file diff --git a/setup.py b/setup.py index adb00629a..73f20775a 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ import importlib.util import subprocess - +import os import setuptools from setuptools import Extension @@ -27,17 +27,23 @@ long_description = fh.read() long_description_content_type = "text/markdown" + +def req_file(filename, folder="requirements"): + environment = os.getenv("PY_ENV", "pytorch:24.07") + + with open(os.path.join(folder, environment, filename), encoding='utf-8') as f: + content = f.readlines() + # you may also want to remove whitespace characters + # Example: `\n` at the end of each line + return [x.strip() for x in content] + + +install_requires = req_file("requirements.txt") + ############################################################################### # Extension Making # # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # -extra_compile_args = ( - subprocess.check_output(["python3", "-m", "pybind11", "--includes"]) - .decode("utf-8") - .strip() - .split() -) - ############################################################################### setuptools.setup( @@ -99,11 +105,19 @@ "megatron.core.datasets.helpers", sources=["megatron/core/datasets/helpers.cpp"], language="c++", - extra_compile_args=extra_compile_args, + extra_compile_args=( + subprocess.check_output(["python3", "-m", "pybind11", "--includes"]) + .decode("utf-8") + .strip() + .split() + ) + + ['-O3', '-Wall', '-std=c++17'], + optional=True, ) ], # Add in any packaged data. include_package_data=True, # PyPI package information. keywords=__keywords__, + install_requires=install_requires, ) diff --git a/tests/functional_tests/jet_recipes/multimodal-llava.yaml b/tests/functional_tests/jet_recipes/multimodal-llava.yaml deleted file mode 100644 index 3149f5664..000000000 --- a/tests/functional_tests/jet_recipes/multimodal-llava.yaml +++ /dev/null @@ -1,36 +0,0 @@ -type: basic -format_version: 1 -maintainers: [mcore] -loggers: [stdout] -spec: - name: "{test_case}" - model: multimodal-llava - build: mcore-pyt - nodes: 1 - gpus: 8 - platforms: dgx_a100 - time_limit: 12000 - scope: null - script: |- - ls - cd /opt/megatron-lm - - ARGUMENTS=( - "DATA_PATH='-'" - "DATA_CACHE_PATH='-'" - "OUTPUT_PATH={assets_dir}" - "TENSORBOARD_PATH={assets_dir}/tensorboard" - "CHECKPOINT_PATH=/workspace/checkpoints" - "TRAINING_SCRIPT_PATH=pretrain_vlm.py" - "TEST_CASE_PATH=./tests/functional_tests/test_cases/{model}/{test_case}" - ) - - bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} - -products: - - scope: [mr] - test_case: - - multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G - - multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G - - multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G - - multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G \ No newline at end of file diff --git a/tests/functional_tests/python_test_utils/common.py b/tests/functional_tests/python_test_utils/common.py index 3a9fd359a..1b21fa81d 100644 --- a/tests/functional_tests/python_test_utils/common.py +++ b/tests/functional_tests/python_test_utils/common.py @@ -26,7 +26,7 @@ class TypeOfTest(enum.Enum): } METRIC_TO_THRESHOLD = { - "iteration-time": 0.5, + "iteration-time": 0.8, "mem-allocated-bytes": 3 * 1000 * 1000, # 3MB "lm loss": 0.05, } @@ -53,18 +53,30 @@ def read_tb_logs_as_list(path, index=0): return summaries files.sort(key=lambda x: os.path.getmtime(os.path.join(path, x))) - - event_file = files[index] - ea = event_accumulator.EventAccumulator(event_file, size_guidance=SIZE_GUIDANCE) - ea.Reload() - - for scalar_name in ea.Tags()["scalars"]: - summaries[scalar_name] = [round(x.value, 5) for x in ea.Scalars(scalar_name)] - - print( - f"Extracted {len(summaries[scalar_name])} values of {scalar_name} from Tensorboard \ -logs. Here are the first 5 values: {summaries[scalar_name][:5]}" - ) + accumulators = [] + + if index == -1: + for event_file in files: + ea = event_accumulator.EventAccumulator(event_file, size_guidance=SIZE_GUIDANCE) + ea.Reload() + accumulators.append(ea) + else: + event_file = files[index] + ea = event_accumulator.EventAccumulator(event_file, size_guidance=SIZE_GUIDANCE) + ea.Reload() + accumulators.append(ea) + + for ea in accumulators: + for scalar_name in ea.Tags()["scalars"]: + if scalar_name in summaries: + summaries[scalar_name] += [round(x.value, 5) for x in ea.Scalars(scalar_name)] + else: + summaries[scalar_name] = [round(x.value, 5) for x in ea.Scalars(scalar_name)] + + print( + f"Extracted {len(summaries[scalar_name])} values of {scalar_name} from Tensorboard \ + logs. Here are the first 5 values: {summaries[scalar_name][:5]}" + ) return summaries @@ -72,6 +84,9 @@ def read_tb_logs_as_list(path, index=0): def load_expected_data(): expected_metrics_file = os.getenv("EXPECTED_METRICS_FILE") + if expected_metrics_file is None: + raise ValueError("Unknown EXPECTED_METRICS_FILE") + with open(expected_metrics_file) as f: if os.path.exists(expected_metrics_file): with open(expected_metrics_file) as f: diff --git a/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py b/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py index 3c0b67ed3..d046b2534 100644 --- a/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py +++ b/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py @@ -11,8 +11,14 @@ @click.command() @click.option("--logs-dir", required=True, type=str, help="Path to Tensorboard logs") @click.option("--output-path", required=False, type=str, help="Path to write golden values") -def collect_train_test_metrics(logs_dir: str, output_path: str): - summaries = common.read_tb_logs_as_list(logs_dir) +@click.option( + "--is-convergence-test/--is-normal-test", + type=bool, + help="Tensorboard index to extract", + default=False, +) +def collect_train_test_metrics(logs_dir: str, output_path: str, is_convergence_test: bool): + summaries = common.read_tb_logs_as_list(logs_dir, index=-1 if is_convergence_test else 0) train_metrics = { metric_name: { diff --git a/tests/functional_tests/python_test_utils/jet/generate_jet_trigger_job.py b/tests/functional_tests/python_test_utils/jet/generate_jet_trigger_job.py deleted file mode 100644 index 436b08444..000000000 --- a/tests/functional_tests/python_test_utils/jet/generate_jet_trigger_job.py +++ /dev/null @@ -1,113 +0,0 @@ -import pathlib -from typing import Optional - -import click -import yaml - -from tests.functional_tests.python_test_utils.jet import common - -BASE_PATH = pathlib.Path(__file__).parent.resolve() - - -@click.command() -@click.option("--scope", required=True, type=str, help="Test scope") -@click.option("--a100-cluster", required=True, type=str, help="A100 Cluster to run on") -@click.option("--h100-cluster", required=True, type=str, help="H100 Cluster to run on") -@click.option("--output-path", required=True, type=str, help="Path to write GitLab job to") -@click.option("--container-image", required=True, type=str, help="LTS Container tag to use") -@click.option("--container-image-dev", required=True, type=str, help="Dev Container tag to use") -@click.option("--container-tag", required=True, type=str, help="Container tag to use") -@click.option( - "--run-name", required=False, type=str, help="Run name (only relevant for release tests)" -) -@click.option( - "--wandb-experiment", - required=False, - type=str, - help="Wandb experiment (only relevant for release tests)", -) -def main( - scope: str, - a100_cluster: str, - h100_cluster: str, - output_path: str, - container_image: str, - container_image_dev: str, - container_tag: str, - run_name: Optional[str] = None, - wandb_experiment: Optional[str] = None, -): - test_cases = [ - test_case - for test_case in common.load_workloads(scope=scope, container_tag=container_tag) - if test_case.type != "build" - ] - - gitlab_pipeline = { - "stages": list(set([test_case.spec.model for test_case in test_cases])), - "default": {"interruptible": True}, - } - - for test_case in test_cases: - if test_case.spec.platforms == "dgx_a100": - cluster = a100_cluster - elif test_case.spec.platforms == "dgx_h100": - cluster = h100_cluster - else: - raise ValueError(f"Platform {test_case.spec.platforms} unknown") - - script = [ - "export PYTHONPATH=$(pwd); " - "python tests/functional_tests/python_test_utils/jet/launch_jet_workload.py", - f"--model {test_case.spec.model}", - f"--test-case {test_case.spec.test_case}", - f"--container-tag {container_tag}", - f"--cluster {cluster}", - ] - - with open( - pathlib.Path( - BASE_PATH - / ".." - / ".." - / "test_cases" - / test_case.spec.model - / test_case.spec.test_case - / "model_config.yaml" - ) - ) as stream: - try: - test_case_dict = yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) - - if 'EXPERIMENTAL' in test_case_dict and test_case_dict['EXPERIMENTAL']: - script.append(f"--container-image {container_image_dev}") - - if run_name is not None and wandb_experiment is not None: - script.append(f"--run-name {run_name}") - test_case.spec.model - script.append( - f"--wandb-experiment {wandb_experiment}-{test_case.spec.model}-{test_case.spec.test_case}" - ) - - gitlab_pipeline[test_case.spec.test_case] = { - "stage": f"{test_case.spec.model}", - "image": f"{container_image}:{container_tag}", - "tags": ["mcore-docker-node-jet"], - "rules": [ - {"if": '$CI_PIPELINE_SOURCE == "parent_pipeline"'}, - {"if": '$CI_MERGE_REQUEST_ID'}, - ], - "timeout": "7 days", - "needs": [{"pipeline": '$PARENT_PIPELINE_ID', "job": "jet-generate"}], - "script": [" ".join(script)], - "artifacts": {"paths": ["results/"], "when": "always"}, - } - - with open(output_path, 'w') as outfile: - yaml.dump(gitlab_pipeline, outfile, default_flow_style=False) - - -if __name__ == "__main__": - main() diff --git a/tests/functional_tests/python_test_utils/jet/launch_jet_workload.py b/tests/functional_tests/python_test_utils/jet/launch_jet_workload.py deleted file mode 100644 index 3e243c542..000000000 --- a/tests/functional_tests/python_test_utils/jet/launch_jet_workload.py +++ /dev/null @@ -1,216 +0,0 @@ -import os -import pathlib -import re -import signal -import sys -import tempfile -from typing import List, Optional, Tuple - -import click -import jetclient -import yaml -from jetclient.services.dtos.pipeline import PipelineStatus - -from tests.functional_tests.python_test_utils.jet import common - -BASE_PATH = pathlib.Path(__file__).parent.resolve() - - -def resolve_cluster_config(cluster: str) -> str: - if cluster == "dgxh100_eos": - return "mcore/eos" - if cluster == "dgxa100_dracooci": - return "mcore/draco-oci" - if cluster == "dgxa100_dracooci-ord": - return "mcore/draco-oci-ord" - if cluster == "dgxh100_coreweave": - return "mcore/coreweave" - raise ValueError(f"Unknown cluster {cluster} provided.") - - -def register_pipeline_terminator(pipeline: jetclient.JETPipeline): - def sigterm_handler(_signo, _stack_frame): - print(f"Trying to terminate pipeline {pipeline.jet_id}") - pipeline.cancel() - print(f"Pipeline {pipeline.jet_id} terminated") - sys.exit(0) - - signal.signal(signal.SIGINT, sigterm_handler) - signal.signal(signal.SIGTERM, sigterm_handler) - - -def launch_and_wait_for_completion( - test_case: str, - container_image: str, - container_tag: str, - cluster: str, - account: str, - run_name: Optional[str], - wandb_experiment: Optional[str], -) -> jetclient.JETPipeline: - pipeline = jetclient.JETClient( - customer='mcore', gitlab_ci_token=os.getenv("RO_API_TOKEN"), env="prod" - ).workloads.submit( - workloads=common.load_workloads( - test_case=test_case, container_image=container_image, container_tag=container_tag - ), - config_id=resolve_cluster_config(cluster), - custom_config={ - "launchers": {cluster: {"account": account}}, - "executors": { - "jet-ci": { - "environments": { - cluster: { - "variables": { - "RUN_NAME": run_name or "", - "WANDB_API_KEY": os.getenv("WANDB_API_KEY") or "", - "WANDB_EXPERIMENT": wandb_experiment or "", - } - } - } - } - }, - }, - wait_for_validation=True, - ) - - register_pipeline_terminator(pipeline=pipeline) - - print( - f"Pipeline triggered; inspect it here: https://gitlab-master.nvidia.com/dl/jet/ci/-/pipelines/{pipeline.jet_id}", - flush=True, - ) - - pipeline.wait(max_wait_time=60 * 60 * 24 * 7) - print(f"Pipeline terminated; status: {pipeline.get_status()}") - return pipeline - - -def download_job_assets(job: jetclient.JETJob, iteration: int = 0) -> List[str]: - logs = job.get_logs() - if not logs: - return [""] - - assets_base_path = BASE_PATH / ".." / ".." / ".." / ".." / "results" / f"iteration={iteration}" - - for restart_idx, log in enumerate(logs): - assets = log.get_assets() - assets_path = assets_base_path / f"restart={restart_idx}" - assets_path.mkdir(parents=True, exist_ok=True) - for log_filename in assets.keys(): - with open(assets_path / log_filename, "w") as fh: - assets[log_filename].download(pathlib.Path(fh.name)) - - -def download_job_logs(job: jetclient.JETJob) -> List[str]: - logs = job.get_logs() - if not logs: - return [""] - - assets = logs[0].get_assets() - log_filename = [key for key in assets.keys() if key.endswith(".log")][0] - - with tempfile.NamedTemporaryFile() as tmp_file: - assets[log_filename].download(pathlib.Path(tmp_file.name)) - with open(pathlib.Path(tmp_file.name), "r") as fh: - return fh.readlines() - - -def parse_iterations_from_logs(logs: List[str]) -> Optional[Tuple[int, int]]: - for log_row in logs[::-1]: - match = re.search(r"iteration\s+(\d+)\s*/\s*(\d+)", log_row) - if match is not None: - return int(match.group(1)), int(match.group(2)) - - -@click.command() -@click.option("--model", required=True, type=str, help="Model") -@click.option("--test-case", required=True, type=str, help="Test case") -@click.option( - "--account", - required=False, - type=str, - help="Slurm account to use", - default="coreai_dlalgo_mcore", -) -@click.option("--cluster", required=True, type=str, help="Cluster to run on") -@click.option("--container-tag", required=True, type=str, help="Base image of Mcore image") -@click.option("--container-image", required=False, type=str, help="Base image of Mcore image") -@click.option( - "--run-name", required=False, type=str, help="Run name (only relevant for release tests)" -) -@click.option( - "--wandb-experiment", - required=False, - type=str, - help="Wandb experiment (only relevant for release tests)", -) -def main( - model: str, - test_case: str, - account: str, - cluster: str, - container_tag: str, - container_image: Optional[str] = None, - run_name: Optional[str] = None, - wandb_experiment: Optional[str] = None, -): - - with open( - pathlib.Path( - BASE_PATH / ".." / ".." / "test_cases" / model / test_case / "model_config.yaml" - ) - ) as stream: - try: - test_case_dict = yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) - - test_type = test_case_dict['TEST_TYPE'] - - if test_type == "release" and (run_name is None or wandb_experiment is None): - print(f"Not all arguments provided ({run_name=}, {wandb_experiment=})") - sys.exit(1) - - n_attempts = 0 - n_iteration = 0 - while True and n_attempts < 3: - pipeline = launch_and_wait_for_completion( - test_case=test_case, - container_image=container_image, - container_tag=container_tag, - cluster=cluster, - account=account, - run_name=run_name, - wandb_experiment=wandb_experiment, - ) - - main_job = [job for job in pipeline.get_jobs() if job.name.startswith("basic")][0] - - logs = download_job_logs(job=main_job) - concat_logs = "\n".join(logs) - print(f"Logs:\n{concat_logs}") - - download_job_assets(job=main_job, iteration=n_iteration) - - if test_type != "release": - success = pipeline.get_status() == PipelineStatus.SUCCESS - sys.exit(int(not success)) # invert for exit 0 - - parsed_result = parse_iterations_from_logs(logs=logs) - if not parsed_result: - print("Weird log, no iterations found") - n_attempts += 1 - continue - - current_iteration, total_iterations = parsed_result - if current_iteration == total_iterations: - - success = pipeline.get_status() == PipelineStatus.SUCCESS - sys.exit(int(not success)) # invert for exit 0 - n_iteration += 1 - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/tests/functional_tests/shell_test_utils/_run_training.sh b/tests/functional_tests/shell_test_utils/_run_training.sh index 847f93613..b7757ce1c 100644 --- a/tests/functional_tests/shell_test_utils/_run_training.sh +++ b/tests/functional_tests/shell_test_utils/_run_training.sh @@ -37,12 +37,15 @@ for mandatory_var in "${MANDATORY_VARS[@]}"; do fi done +cp $TRAINING_PARAMS_PATH "$TRAINING_PARAMS_PATH.${SLURM_PROCID}" +TRAINING_PARAMS_PATH="$TRAINING_PARAMS_PATH.${SLURM_PROCID}" + # Envsubst model_params cat $TRAINING_PARAMS_PATH | envsubst "$(env | cut -d= -f1 | sed -e 's/^/$/')" >$TRAINING_PARAMS_PATH.tmp -mv $TRAINING_PARAMS_PATH.tmp $TRAINING_PARAMS_PATH +mv $TRAINING_PARAMS_PATH.tmp "$TRAINING_PARAMS_PATH" # Pull env vars to export -ENV_VARS=$(yq '... comments="" | .ENV_VARS | to_entries | .[] | [.key + "=" + .value] | join(" ")' $TRAINING_PARAMS_PATH) +ENV_VARS=$(yq '... comments="" | .ENV_VARS | to_entries | .[] | [.key + "=" + .value] | join(" ")' "$TRAINING_PARAMS_PATH") while IFS= read -r ARGUMENT; do KEY=$(echo $ARGUMENT | cut -f1 -d=) @@ -54,7 +57,7 @@ while IFS= read -r ARGUMENT; do done <<< "$ENV_VARS" # Run before script -SCRIPT=$(cat $TRAINING_PARAMS_PATH | yq '.BEFORE_SCRIPT') +SCRIPT=$(cat "$TRAINING_PARAMS_PATH" | yq '.BEFORE_SCRIPT') if [[ "$SCRIPT" != null ]]; then eval "$SCRIPT" fi; @@ -62,19 +65,19 @@ fi; # Exit earlier to leave time for properly saving checkpoint if [[ $(echo "$TRAINING_SCRIPT_PATH" | tr '[:upper:]' '[:lower:]') == *nemo* ]]; then PARAMS="" - TRAINING_PARAMS_FROM_CONFIG=$(yq '... comments="" | .MODEL_ARGS | to_entries | .[] | with(select(.value == "true"); .value = "") | [.key + "=" + .value] | join("")' $TRAINING_PARAMS_PATH | tr '\n' ' ') + TRAINING_PARAMS_FROM_CONFIG=$(yq '... comments="" | .MODEL_ARGS | to_entries | .[] | with(select(.value == "true"); .value = "") | [.key + "=" + .value] | join("")' "$TRAINING_PARAMS_PATH" | tr '\n' ' ') else # If this is a second run (of checkpoint-resume), we might want to use a # different model configuration than during first time. So if key `MODEL_ARGS_2` # exists we use it, otherwise we use the same as for the first run. - if [[ $RUN_NUMBER -eq 2 && $(yq 'has("MODEL_ARGS_2")' $TRAINING_PARAMS_PATH) == true ]]; then + if [[ $RUN_NUMBER -eq 2 && $(yq 'has("MODEL_ARGS_2")' "$TRAINING_PARAMS_PATH") == true ]]; then export KEY="MODEL_ARGS_2" else export KEY="MODEL_ARGS" fi - TRAINING_PARAMS_FROM_CONFIG=$(yq '... comments="" | .[env(KEY)] | to_entries | .[] | with(select(.value == "true"); .value = "") | [.key + " " + .value] | join("")' $TRAINING_PARAMS_PATH | tr '\n' ' ') + TRAINING_PARAMS_FROM_CONFIG=$(yq '... comments="" | .[env(KEY)] | to_entries | .[] | with(select(.value == "true"); .value = "") | [.key + " " + .value] | join("")' "$TRAINING_PARAMS_PATH" | tr '\n' ' ') PARAMS="--exit-duration-in-mins $((($SLURM_JOB_END_TIME - $SLURM_JOB_START_TIME) / 60 - 15))" fi @@ -85,21 +88,6 @@ PARAMS="$PARAMS $TRAINING_PARAMS_FROM_CONFIG" export PYTHONPATH="$(pwd):${PYTHONPATH:-}" export WANDB_API_KEY="${WANDB_API_KEY:-}" -######## Distributed training settings. ######## -echo "------ARGUMENTS for SLURM ---" -MASTER_ADDR=${MASTER_ADDR:-localhost} -MASTER_PORT=${MASTER_PORT:-6000} -NUM_NODES=${NUM_NODES:-${SLURM_NNODES}} -GPUS_PER_NODE=${GPUS_PER_NODE:-8} -NODE_RANK=${SLURM_NODEID:-${SLURM_NODEID}} -DISTRIBUTED_ARGS=( - --nproc_per_node $GPUS_PER_NODE - --nnodes $NUM_NODES - --master_addr $MASTER_ADDR - --master_port $MASTER_PORT - --node_rank $SLURM_NODEID -) - # Start training -torchrun ${DISTRIBUTED_ARGS[@]} $TRAINING_SCRIPT_PATH $PARAMS +python $TRAINING_SCRIPT_PATH $PARAMS diff --git a/tests/functional_tests/shell_test_utils/notify.sh b/tests/functional_tests/shell_test_utils/notify.sh deleted file mode 100644 index 1bb2ea5c3..000000000 --- a/tests/functional_tests/shell_test_utils/notify.sh +++ /dev/null @@ -1,192 +0,0 @@ -set -euxo pipefail - -collect_jobs () { - PAGE=1 - PER_PAGE=100 - RESULTS="[]" - - while true; do - # Fetch the paginated results - RESPONSE=$(curl \ - -s \ - --globoff \ - --header "PRIVATE-TOKEN: $RO_API_TOKEN" \ - "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${DOWNSTREAM_PIPELINE_ID}/jobs?page=$PAGE&per_page=$PER_PAGE" - ) - # Combine the results - RESULTS=$(jq -s '.[0] + .[1]' <<< "$RESULTS $RESPONSE") - - # Check if there are more pages - if [[ $(jq 'length' <<< "$RESPONSE") -lt $PER_PAGE ]]; then - break - fi - - # Increment the page number - PAGE=$((PAGE + 1)) - done - - echo "$RESULTS" -} - -CI_PIPELINE_ID=${1:-16595865} -CI_PROJECT_ID=${CI_PROJECT_ID:-19378} - -# Fetch Elastic logs -set +x -PIPELINE_JSON=$(curl \ - --fail \ - --silent \ - --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ - "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/bridges?per_page=100" - ) || ret_code=$? -set -x -if [[ ${ret_code:-0} -ne 0 ]]; then - echo CI_PIPELINE_ID=$CI_PIPELINE_ID does not exist - exit 1 -fi - -# Fetch GitLab logs of JET downstream pipeline -DOWNSTREAM_PIPELINE_ID=$(jq '.[0].downstream_pipeline.id' <<< "$PIPELINE_JSON") - -PIPELINE_URL=https://${GITLAB_ENDPOINT}/ADLR/megatron-lm/-/pipelines/$CI_PIPELINE_ID -JOB_URL=https://${GITLAB_ENDPOINT}/ADLR/megatron-lm/-/jobs/ - -if [[ $DOWNSTREAM_PIPELINE_ID == null ]]; then - FAILED_JOBS=$(curl \ - --fail \ - --silent \ - --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ - "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/jobs?per_page=100" \ - | jq --arg JOB_URL "$JOB_URL" '[.[] | select(.status == "failed") | ("<" + $JOB_URL + (.id | tostring) + "|" + .name + ">")] | join("\n• Job: ")' | tr -d '"') - curl \ - -X POST \ - -H "Content-type: application/json" \ - --data ' - { - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<'$PIPELINE_URL'|Report of '$DATE' ('$CONTEXT')>:\n" - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "\n• Job: '"$FAILED_JOBS"'" - } - }, - ] - - }' \ - $WEBHOOK_URL - -else - set +x - JOBS=$(echo "$(collect_jobs)" | jq '[.[] | {id, name, status}]') - echo $JOBS - set -x - - FAILED_JOBS=$(echo "$JOBS" \ - | jq --arg GITLAB_ENDPOINT "$GITLAB_ENDPOINT" '[ - .[] - | select(.status != "success") - | { - name, - id, - "url": ("https://" + $GITLAB_ENDPOINT + "/dl/jet/ci/-/jobs/" + (.id | tostring)), - } - ]' - ) - set -x - - for row in $(echo "${FAILED_JOBS}" | jq -r '.[] | @base64'); do - _jq() { - echo ${row} | base64 --decode | jq -r ${1} - } - JOB_ID=$(_jq '.id') - FULL_LOG=$(curl \ - --location \ - --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ - "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/jobs/${JOB_ID}/trace") - - if [[ "$FULL_LOG" == *exception* ]]; then - LAST_EXCEPTION_POS=$(echo "$FULL_LOG" | grep -o -b 'exception' | tail -1 | cut -d: -f1) - SHORT_LOG=${FULL_LOG:$LAST_EXCEPTION_POS-500:499} - else - SHORT_LOG=${FULL_LOG: -1000} - fi - - FAILED_JOBS=$(echo "$FAILED_JOBS" \ - | jq \ - --argjson JOB_ID "$JOB_ID" \ - --arg SLURM_FAILURE "$SHORT_LOG" ' - .[] |= ((select(.id==$JOB_ID) += { - "slurm_failure_reason": $SLURM_FAILURE})) - ') - done - - NUM_FAILED=$(echo "$FAILED_JOBS" | jq 'length') - NUM_TOTAL=$(echo "$JOBS" | jq 'length') - - if [[ $NUM_FAILED -eq 0 ]]; then - BLOCKS='[ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": ":doge3d: <'$PIPELINE_URL'|Report of '$DATE' ('$CONTEXT')>: All '$NUM_TOTAL' passed" - } - } - ]' - else - BLOCKS=$(echo "$FAILED_JOBS" \ - | jq --arg DATE "$DATE" --arg CONTEXT "$CONTEXT" --arg URL "$PIPELINE_URL" --arg NUM_FAILED "$NUM_FAILED" --arg NUM_TOTAL "$NUM_TOTAL" ' - [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": (":doctorge: <" + $URL + "|Report of " + $DATE + " (" + $CONTEXT + ")>: " + $NUM_FAILED + " of " + $NUM_TOTAL + " failed") - } - } - ] + [ - .[] - | { - "type": "section", - "text": { - "type": "mrkdwn", - "text": ( - "• Job: <" +.url + "|" + .name + ">" - + "\n SLURM failure reason: \n```" + .slurm_failure_reason + "```" - - ) - } - } - ] + [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": ("===============================================") - } - } - ]' - ) - fi - - for row in $(echo "${BLOCKS}" | jq -r '.[] | @base64'); do - _jq() { - echo ${row} | base64 --decode - } - - curl \ - -X POST \ - -H "Content-type: application/json" \ - --data '{"blocks": '["$(_jq)"]'}' \ - $WEBHOOK_URL - done - -fi \ No newline at end of file diff --git a/tests/functional_tests/shell_test_utils/notify_unit_tests.sh b/tests/functional_tests/shell_test_utils/notify_unit_tests.sh deleted file mode 100644 index 46be8b078..000000000 --- a/tests/functional_tests/shell_test_utils/notify_unit_tests.sh +++ /dev/null @@ -1,186 +0,0 @@ -set -euxo pipefail - -collect_jobs () { - PAGE=1 - PER_PAGE=100 - RESULTS="[]" - - while true; do - # Fetch the paginated results - RESPONSE=$(curl \ - -s \ - --globoff \ - --header "PRIVATE-TOKEN: $RO_API_TOKEN" \ - "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${DOWNSTREAM_PIPELINE_ID}/jobs?page=$PAGE&per_page=$PER_PAGE" - ) - # Combine the results - RESULTS=$(jq -s '.[0] + .[1]' <<< "$RESULTS $RESPONSE") - - # Check if there are more pages - if [[ $(jq 'length' <<< "$RESPONSE") -lt $PER_PAGE ]]; then - break - fi - - # Increment the page number - PAGE=$((PAGE + 1)) - done - - echo "$RESULTS" -} - -CI_PIPELINE_ID=${1:-16595865} -CI_PROJECT_ID=${CI_PROJECT_ID:-19378} -PIPELINE_URL=https://${GITLAB_ENDPOINT}/ADLR/megatron-lm/-/pipelines/$CI_PIPELINE_ID -JOB_URL=https://${GITLAB_ENDPOINT}/ADLR/megatron-lm/-/jobs/ -CONTEXT="unit-tests-extended" - -# Fetch Elastic logs -set +x -PIPELINE_JSON=$(curl \ - --fail \ - --silent \ - --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ - "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/jobs" - ) || ret_code=$? -set -x -if [[ ${ret_code:-0} -ne 0 ]]; then - echo CI_PIPELINE_ID=$CI_PIPELINE_ID does not exist - exit 1 -fi - -UNIT_TESTS_JOBS=$(echo -E $PIPELINE_JSON | jq '[.[] | select(.name | startswith("unit_tests"))]') - -if [[ $UNIT_TESTS_JOBS == null ]]; then - FAILED_JOBS=$(curl \ - --fail \ - --silent \ - --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ - "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/jobs?per_page=100" \ - | jq --arg JOB_URL "$JOB_URL" '[.[] | select(.status == "failed") | ("<" + $JOB_URL + (.id | tostring) + "|" + .name + ">")] | join("\n• Job: ")' | tr -d '"') - curl \ - -X POST \ - -H "Content-type: application/json" \ - --data ' - { - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<'$PIPELINE_URL'|Report of '$DATE' ('$CONTEXT')>:\n" - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "\n• Job: '"$FAILED_JOBS"'" - } - }, - ] - - }' \ - $WEBHOOK_URL - -else - FAILED_JOBS=$(echo -E "$UNIT_TESTS_JOBS" \ - | jq --arg GITLAB_ENDPOINT "$GITLAB_ENDPOINT" --arg JOB_URL "$JOB_URL" '[ - .[] - | select(.status != "success") - | { - name, - id, - "url": ($JOB_URL + (.id | tostring)), - } - ]' - ) - set -x - - for row in $(echo "${FAILED_JOBS}" | jq -r '.[] | @base64'); do - _jq() { - echo ${row} | base64 --decode | jq -r ${1} - } - JOB_ID=$(_jq '.id') - FULL_LOG=$(curl \ - --location \ - --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ - "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/jobs/${JOB_ID}/trace") - - if [[ "$FULL_LOG" == *exception* ]]; then - LAST_EXCEPTION_POS=$(echo "$FULL_LOG" | grep -o -b 'exception' | tail -1 | cut -d: -f1) - SHORT_LOG=${FULL_LOG:$LAST_EXCEPTION_POS-500:499} - else - SHORT_LOG=${FULL_LOG: -1000} - fi - - FAILED_JOBS=$(echo "$FAILED_JOBS" \ - | jq \ - --argjson JOB_ID "$JOB_ID" \ - --arg SLURM_FAILURE "$SHORT_LOG" ' - .[] |= ((select(.id==$JOB_ID) += { - "slurm_failure_reason": $SLURM_FAILURE})) - ') - done - - NUM_FAILED=$(echo "$FAILED_JOBS" | jq 'length') - NUM_TOTAL=$(echo "$UNIT_TESTS_JOBS" | jq 'length') - - if [[ $NUM_FAILED -eq 0 ]]; then - BLOCKS='[ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": ":doge3d: <'$PIPELINE_URL'|Report of '$DATE' ('$CONTEXT')>: All '$NUM_TOTAL' passed" - } - } - ]' - else - BLOCKS=$(echo "$FAILED_JOBS" \ - | jq --arg DATE "$DATE" --arg CONTEXT "$CONTEXT" --arg URL "$PIPELINE_URL" --arg NUM_FAILED "$NUM_FAILED" --arg NUM_TOTAL "$NUM_TOTAL" ' - [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": (":doctorge: <" + $URL + "|Report of " + $DATE + " (" + $CONTEXT + ")>: " + $NUM_FAILED + " of " + $NUM_TOTAL + " failed") - } - } - ] + [ - .[] - | { - "type": "section", - "text": { - "type": "mrkdwn", - "text": ( - "• Job: <" +.url + "|" + .name + ">" - + "\n SLURM failure reason: \n```" + .slurm_failure_reason + "```" - - ) - } - } - ] + [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": ("===============================================") - } - } - ]' - ) - fi - - for row in $(echo "${BLOCKS}" | jq -r '.[] | @base64'); do - _jq() { - echo ${row} | base64 --decode - } - - curl \ - -X POST \ - -H "Content-type: application/json" \ - --data '{"blocks": '["$(_jq)"]'}' \ - $WEBHOOK_URL - done - -fi \ No newline at end of file diff --git a/tests/functional_tests/shell_test_utils/run_ci_test.sh b/tests/functional_tests/shell_test_utils/run_ci_test.sh index c9c16b43c..e585ab7c3 100644 --- a/tests/functional_tests/shell_test_utils/run_ci_test.sh +++ b/tests/functional_tests/shell_test_utils/run_ci_test.sh @@ -17,7 +17,8 @@ echo "---------------------------------" # Check that mandatory vars are set MANDATORY_VARS=( "TRAINING_SCRIPT_PATH" - "TEST_CASE_PATH" + "TRAINING_PARAMS_PATH" + "GOLDEN_VALUES_PATH" "OUTPUT_PATH" "TENSORBOARD_PATH" "CHECKPOINT_PATH" @@ -31,9 +32,6 @@ for mandatory_var in "${MANDATORY_VARS[@]}"; do fi done -export TRAINING_PARAMS_PATH=$TEST_CASE_PATH/model_config.yaml -export GOLDEN_VALUES_PATH=$TEST_CASE_PATH/golden_values.json - SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) ROOT_DIR=$(realpath $SCRIPT_DIR/../../../) @@ -44,10 +42,8 @@ NVTE_ALLOW_NONDETERMINISTIC_ALGO=$(cat $TRAINING_PARAMS_PATH \ | yq '.ENV_VARS.NVTE_ALLOW_NONDETERMINISTIC_ALGO') SKIP_PYTEST=$(cat $TRAINING_PARAMS_PATH \ | yq '.ENV_VARS.SKIP_PYTEST') -N_REPEATS=$(cat $TRAINING_PARAMS_PATH \ - | yq '.ENV_VARS.N_REPEATS //1') -for i in $(seq 1 $N_REPEATS); +for i in $(seq 1 $N_REPEAT); do if [[ $i -gt 1 ]]; then rm -rf $CHECKPOINT_PATH/* @@ -59,17 +55,30 @@ do # Maybe checkpoint resume training if [[ "$TEST_TYPE" == "ckpt-resume" ]]; then - rm -rf $CHECKPOINT_PATH/iter_0000100; - echo 50 > $CHECKPOINT_PATH/latest_checkpointed_iteration.txt; + if [[ ${SLURM_PROCID} -eq 0 ]]; then + rm -rf $CHECKPOINT_PATH/iter_0000100; + echo 50 > $CHECKPOINT_PATH/latest_checkpointed_iteration.txt; + fi + export RUN_NUMBER=2 bash $ROOT_DIR/tests/functional_tests/shell_test_utils/_run_training.sh fi + if [[ ${SLURM_PROCID} -gt 0 ]]; then + continue + fi + # Save run results export PYTHONPATH=$ROOT_DIR + if [[ "$TEST_TYPE" == "release" ]]; then + EXTRACT_ARGS=("--is-convergence-test") + else + EXTRACT_ARGS=("--is-normal-test") + fi python3 $ROOT_DIR/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py \ --logs-dir $TENSORBOARD_PATH \ - --output-path ${OUTPUT_PATH}/$(basename $GOLDEN_VALUES_PATH) + --output-path ${OUTPUT_PATH}/$(basename $GOLDEN_VALUES_PATH) \ + "${EXTRACT_ARGS[@]}" # Maybe run tests if [[ ${SKIP_PYTEST:-0} != 1 ]]; then diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..0f6772f01 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,52 @@ +{ "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.49569, + 10.48173, + 10.48047, + 10.45353, + 10.44394, + 10.35611, + 10.13779, + 10.04017, + 9.86834, + 9.67307 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 2254.0, + 2585.0, + 2101.0, + 2157.0, + 2241.0, + 2475.0, + 2890.0, + 3199.0, + 3524.0, + 3090.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 13.65829, + 1.27589, + 1.2782, + 1.32374, + 1.26543, + 1.26423, + 1.26203, + 1.54723, + 1.27297, + 1.26491 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 704fd1ce5..d9268d02e 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..a1443c913 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,70 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.49566, + 10.48172, + 10.48046, + 10.45369, + 10.44391, + 10.35613, + 10.13791, + 10.04025, + 9.86848, + 9.67328 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 2183.0, + 2571.0, + 2097.0, + 2118.0, + 2414.0, + 2464.0, + 2988.0, + 3223.0, + 3481.0, + 3046.0 + ] + }, + "mem-allocated-bytes": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1767237120.0, + 1767237632.0, + 1767237632.0, + 1767237632.0, + 1767237632.0, + 1767237632.0, + 1767237632.0, + 1767237632.0, + 1767237632.0, + 1767237632.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 13.74859, + 1.16037, + 1.15664, + 1.28303, + 1.16087, + 1.1576, + 1.15188, + 1.1644, + 1.15171, + 1.38366 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/model_config.yaml index eaf288d30..207acb5aa 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index 7072374fa..a8fb42075 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/model_config.yaml index f3afb10fd..10fbeb700 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..83fd26794 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.54308, 10.53881, 10.55633, 10.53805, 10.52589, 10.49568, 10.45958, 10.32846, 10.17264, 9.96952]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [22584.0, 20590.0, 27442.0, 22852.0, 22567.0, 20740.0, 23315.0]}, "iteration_timing_avg": 0.7692817647058824} diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/model_config.yaml index 1e8f60479..991dfae68 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/model_config.yaml index 66ab6cabf..cfc4827a2 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..5e5b76276 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.43755, 10.43587, 10.44704, 10.44395, 10.44965, 10.44295, 10.32757, 10.23341, 10.09049, 9.93294]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [27979.0, 20991.0, 29735.0, 24779.0, 26808.0, 33075.0, 24387.0]}, "iteration_timing_avg": 0.7523635294117648} diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 94d2f2fec..c3c70f8b0 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index 2f6d24e94..9ffa49327 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json new file mode 100644 index 000000000..bfc68cb54 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.49411, + 10.4825, + 10.49242, + 10.47802, + 10.46608, + 10.35193, + 10.17693, + 10.07728, + 9.88753, + 9.68034 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1931.0, + 2555.0, + 2017.0, + 2135.0, + 2440.0, + 2464.0, + 3070.0, + 3006.0, + 2932.0, + 2303.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.94975, + 0.67196, + 0.67378, + 0.66862, + 0.69618, + 0.66936, + 0.67757, + 0.67189, + 0.67519, + 0.67762 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml similarity index 97% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml index cb94c9c91..73ad47092 100644 --- a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 @@ -43,4 +42,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_dev.json new file mode 100644 index 000000000..915df9667 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.46796, + 10.45723, + 10.44911, + 10.44107, + 10.41739, + 10.34626, + 10.11387, + 10.0439, + 9.86702, + 9.679 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 2404.0, + 2610.0, + 2173.0, + 2312.0, + 2371.0, + 2652.0, + 3089.0, + 3200.0, + 3497.0, + 3075.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 15.80389, + 0.94155, + 0.88518, + 1.22442, + 0.86955, + 0.85166, + 1.02329, + 1.07525, + 0.90283, + 0.88308 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml similarity index 97% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml index 3dd071d3d..29fa50cab 100644 --- a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 @@ -44,4 +43,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json new file mode 100644 index 000000000..65e3ca244 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.42085, + 10.42901, + 10.43576, + 10.40804, + 10.38463, + 10.32426, + 10.13148, + 10.04317, + 9.86257, + 9.65771 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 3252.0, + 2595.0, + 3240.0, + 3429.0, + 3463.0, + 3509.0, + 4065.0, + 4114.0, + 4651.0, + 4253.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.83012, + 2.26196, + 2.22779, + 2.22677, + 2.23847, + 2.24307, + 2.23859, + 2.23544, + 2.2414, + 2.25107 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml similarity index 97% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml index 6d39266da..d8fb0dc61 100644 --- a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml @@ -5,7 +5,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 @@ -43,4 +42,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json new file mode 100644 index 000000000..428150fc3 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json @@ -0,0 +1,50 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.49101, + 10.49526, + 10.48682, + 10.48817, + 10.49415, + 10.4724, + 10.42265, + 10.29901, + 10.1572, + 9.97594 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 12.56945, + 0.58599, + 0.58451, + 0.68178, + 0.6056, + 0.609, + 0.59965, + 0.60618, + 0.60152, + 0.59945 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 34, + "step_interval": 5, + "values": [ + 17032.0, + 16918.0, + 19957.0, + 18761.0, + 25689.0, + 19897.0, + 22224.0 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml similarity index 97% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml index 989988f7c..2d35954bf 100644 --- a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml @@ -6,7 +6,6 @@ ENV_VARS: NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 NVTE_APPLY_QK_LAYER_SCALING: 1 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 @@ -46,4 +45,4 @@ MODEL_ARGS: --fp16: true --apply-query-key-layer-scaling: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json new file mode 100644 index 000000000..9cd1672cf --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json @@ -0,0 +1,50 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.49734, + 10.49243, + 10.49325, + 10.50311, + 10.48985, + 10.4721, + 10.41217, + 10.2805, + 10.14052, + 9.94191 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 8.58282, + 2.06311, + 2.05789, + 2.24493, + 2.05273, + 2.05118, + 2.05666, + 2.04533, + 2.05152, + 2.04761 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 34, + "step_interval": 5, + "values": [ + 26081.0, + 18799.0, + 24479.0, + 23782.0, + 21056.0, + 19877.0, + 19774.0 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values.json rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml similarity index 96% rename from tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml rename to tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml index edcf75a77..abc650a5e 100644 --- a/tests/functional_tests/test_cases/bert/bert_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml @@ -6,7 +6,6 @@ ENV_VARS: NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 NVTE_APPLY_QK_LAYER_SCALING: 1 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 24 --hidden-size: 1024 @@ -43,7 +42,7 @@ MODEL_ARGS: --deterministic-mode: true --no-gradient-accumulation-fusion: true --data-cache-path: ${DATA_CACHE_PATH} - --fp16: true + --fp16: true --apply-query-key-layer-scaling: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.8.0.json b/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.8.0.json deleted file mode 100644 index cd3708942..000000000 --- a/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.8.0.json +++ /dev/null @@ -1,6590 +0,0 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 16335, - "step_interval": 5, - "values": [ - 10.53793, - 10.53833, - 10.57328, - 10.53546, - 10.07398, - 9.7437, - 9.42134, - 9.37734, - 9.23363, - 9.19234, - 8.97735, - 8.9212, - 8.71322, - 8.6598, - 8.60404, - 8.35312, - 8.22921, - 8.17413, - 7.70251, - 7.94843, - 7.75401, - 7.6155, - 7.57677, - 7.57115, - 7.46261, - 7.3348, - 7.34965, - 7.21065, - 7.2967, - 7.51623, - 7.50848, - 7.13886, - 7.26099, - 7.22096, - 7.33946, - 7.29352, - 7.13829, - 7.33535, - 7.46038, - 7.35064, - 7.16396, - 7.3037, - 7.1074, - 7.22845, - 7.0236, - 7.38542, - 7.13949, - 7.35053, - 7.19933, - 7.16134, - 7.49269, - 7.24922, - 7.12929, - 7.10281, - 7.04489, - 7.23503, - 7.05831, - 7.2197, - 7.43084, - 7.22903, - 7.13581, - 6.87717, - 6.99137, - 6.74988, - 7.0204, - 7.00762, - 7.15195, - 7.0732, - 7.04017, - 6.91983, - 7.26792, - 7.03561, - 6.89552, - 7.00603, - 7.08591, - 7.13913, - 6.68255, - 7.00998, - 7.14783, - 7.03557, - 6.80588, - 7.0735, - 7.04492, - 6.89815, - 6.7917, - 7.02153, - 6.91982, - 7.09829, - 7.02664, - 6.9825, - 6.87097, - 6.7737, - 7.15663, - 6.84695, - 6.63555, - 6.78703, - 7.23335, - 6.78468, - 6.839, - 7.1042, - 6.97448, - 7.06354, - 6.94179, - 6.87885, - 6.75294, - 6.72927, - 7.07929, - 6.83135, - 6.9368, - 6.89887, - 6.86077, - 6.86416, - 6.91727, - 6.83948, - 6.91308, - 6.95168, - 6.79076, - 6.6855, - 6.78904, - 6.69888, - 7.00146, - 6.86774, - 6.88572, - 6.80512, - 6.90702, - 6.72501, - 6.86568, - 7.0434, - 6.54832, - 6.81509, - 6.91147, - 6.86305, - 6.9005, - 6.81867, - 6.82176, - 6.64392, - 6.5638, - 6.77185, - 6.81198, - 6.79084, - 6.93628, - 6.82454, - 6.80167, - 6.76513, - 6.57557, - 6.43356, - 6.69509, - 6.80516, - 6.65939, - 6.92698, - 6.8058, - 6.72331, - 6.78141, - 6.75542, - 6.79796, - 6.6264, - 6.86748, - 6.36556, - 6.78603, - 7.00148, - 6.77036, - 6.91134, - 6.71107, - 6.77084, - 6.8175, - 6.45329, - 6.51056, - 7.04084, - 6.70346, - 6.71543, - 6.88176, - 6.88362, - 6.64275, - 6.36647, - 6.49632, - 6.56393, - 6.51217, - 6.75527, - 6.80634, - 6.46915, - 6.8323, - 6.54895, - 6.74257, - 6.49547, - 6.80514, - 6.62616, - 6.69978, - 6.58011, - 6.30268, - 6.76174, - 6.24135, - 6.63064, - 6.67607, - 6.82092, - 6.66534, - 6.57511, - 6.58103, - 6.76152, - 6.65552, - 6.45148, - 6.77848, - 6.61225, - 6.43268, - 6.7872, - 6.68052, - 6.97383, - 6.83668, - 6.11858, - 6.50668, - 6.36788, - 6.86786, - 6.70669, - 6.78096, - 6.33542, - 6.67341, - 6.75006, - 6.60192, - 6.57628, - 6.54004, - 6.71131, - 6.57678, - 6.74634, - 6.45335, - 6.72892, - 6.90587, - 6.5513, - 6.71344, - 6.74165, - 6.72742, - 6.74569, - 6.33972, - 6.52666, - 6.36364, - 6.65061, - 6.71181, - 6.86922, - 6.69166, - 6.8349, - 6.79604, - 6.38846, - 6.7216, - 6.75765, - 6.1974, - 6.45594, - 6.53824, - 6.93955, - 6.70867, - 6.55834, - 6.53449, - 6.8526, - 6.4796, - 6.48663, - 6.86959, - 6.27279, - 6.84281, - 6.39654, - 6.66493, - 6.56859, - 6.46318, - 6.75265, - 6.59639, - 6.65157, - 6.52565, - 6.23494, - 6.54594, - 6.43118, - 6.44598, - 6.36322, - 6.54569, - 6.46544, - 6.60581, - 6.58219, - 6.63418, - 6.30714, - 6.50061, - 6.44069, - 6.49446, - 6.67531, - 6.64179, - 6.40956, - 6.65959, - 6.66559, - 6.45583, - 6.45205, - 6.56506, - 6.5485, - 6.46778, - 6.51845, - 6.73219, - 6.5964, - 6.09757, - 6.49973, - 6.50196, - 6.49873, - 6.67664, - 6.47666, - 6.34272, - 6.25304, - 6.3851, - 6.60383, - 6.33063, - 6.32831, - 6.40469, - 6.61802, - 6.62854, - 6.73167, - 6.51272, - 6.54725, - 6.59096, - 6.52632, - 6.81511, - 6.5014, - 6.31227, - 6.33856, - 6.6418, - 6.39458, - 6.44231, - 6.38421, - 6.31583, - 6.58783, - 6.30739, - 6.21895, - 6.28344, - 6.55022, - 6.3775, - 6.75864, - 6.55435, - 6.94564, - 6.31112, - 6.71671, - 6.25305, - 6.29523, - 6.4124, - 6.56301, - 6.7562, - 6.49733, - 6.63249, - 6.29465, - 6.27924, - 6.68726, - 6.30938, - 6.38028, - 6.57888, - 6.42417, - 6.38214, - 6.12301, - 6.49907, - 6.25454, - 6.33313, - 6.35794, - 6.50602, - 6.02649, - 6.61622, - 6.34758, - 6.35316, - 6.37007, - 6.31706, - 6.23337, - 6.38233, - 6.402, - 6.5168, - 6.42076, - 6.35078, - 6.32276, - 6.43155, - 6.2052, - 6.3692, - 6.51592, - 6.29469, - 6.42076, - 6.60076, - 6.61081, - 6.40174, - 6.29924, - 6.74568, - 6.39252, - 6.33087, - 6.24725, - 6.32582, - 6.71362, - 6.50464, - 6.29898, - 6.58622, - 6.20531, - 6.37231, - 6.47688, - 6.06606, - 6.4361, - 6.43802, - 5.93011, - 6.50386, - 6.34479, - 6.2994, - 6.57209, - 6.25778, - 6.45508, - 6.39037, - 6.45798, - 6.36904, - 6.3742, - 6.34459, - 6.40159, - 6.35231, - 6.21572, - 6.41328, - 6.65358, - 6.50605, - 6.30743, - 6.02136, - 6.42199, - 6.44523, - 6.53604, - 6.37327, - 6.27059, - 6.56258, - 6.34048, - 6.38827, - 5.99745, - 6.26555, - 6.45509, - 6.6419, - 6.17585, - 6.07765, - 6.32005, - 5.9988, - 6.3088, - 6.32593, - 6.28967, - 6.49087, - 6.57397, - 6.75413, - 6.16988, - 6.26637, - 6.50306, - 6.63417, - 6.55743, - 6.4403, - 6.57198, - 6.30406, - 6.2777, - 6.30065, - 6.2156, - 6.27963, - 5.94078, - 6.21481, - 6.64228, - 6.30421, - 6.55175, - 6.41225, - 6.18714, - 6.53382, - 5.99607, - 6.10913, - 6.2521, - 6.2201, - 6.31349, - 6.51799, - 6.45944, - 6.33556, - 6.56389, - 6.43665, - 6.36721, - 6.34374, - 6.15574, - 6.47752, - 6.38969, - 6.47163, - 6.53956, - 6.51249, - 6.39771, - 6.04294, - 6.58281, - 6.31275, - 6.42086, - 6.14868, - 6.21364, - 6.19408, - 6.41132, - 6.45343, - 6.19411, - 6.18659, - 6.56525, - 6.40467, - 6.28638, - 6.33442, - 6.6218, - 6.43731, - 6.36122, - 6.25071, - 6.12011, - 6.40226, - 5.99376, - 6.60549, - 6.16224, - 6.56538, - 6.38555, - 6.43746, - 6.43002, - 6.62869, - 6.15875, - 6.34685, - 6.3523, - 6.49109, - 6.37212, - 6.44384, - 6.10934, - 6.39318, - 6.42245, - 6.14934, - 6.46085, - 6.32821, - 6.60509, - 6.46596, - 6.39857, - 5.87817, - 6.24183, - 6.44909, - 6.33179, - 6.4368, - 6.24726, - 6.40252, - 6.131, - 6.50046, - 6.3391, - 6.34118, - 6.46806, - 6.31596, - 6.16235, - 6.54313, - 6.42882, - 6.37647, - 6.51876, - 6.16584, - 6.47311, - 6.21822, - 6.32196, - 6.07977, - 6.44668, - 6.39247, - 6.25631, - 6.47592, - 6.29171, - 6.38129, - 6.55715, - 6.28978, - 6.26295, - 6.4926, - 6.18279, - 6.58878, - 6.10062, - 6.17452, - 6.10584, - 6.18107, - 6.4517, - 6.46322, - 6.18413, - 6.04441, - 6.15884, - 6.2331, - 6.16856, - 6.18516, - 6.56784, - 6.25482, - 6.38822, - 6.03013, - 6.03972, - 6.41785, - 6.30254, - 6.36035, - 6.02451, - 6.50559, - 6.40899, - 6.18496, - 6.34395, - 6.52951, - 6.25829, - 6.51237, - 6.28479, - 6.14295, - 6.52767, - 6.07687, - 6.40724, - 6.39342, - 6.28972, - 6.2584, - 6.32533, - 6.43399, - 6.36631, - 6.16643, - 6.33093, - 6.45457, - 6.25883, - 6.34143, - 6.2437, - 6.23937, - 6.16769, - 6.07649, - 6.12008, - 6.40524, - 6.32947, - 6.39147, - 6.28194, - 6.12545, - 6.35343, - 6.33975, - 6.53219, - 6.41075, - 6.21738, - 6.37557, - 6.51013, - 6.1613, - 6.14545, - 6.33928, - 6.4156, - 6.34552, - 6.18562, - 6.31044, - 6.535, - 6.2967, - 6.34847, - 6.38755, - 6.09215, - 6.15779, - 6.09988, - 6.3951, - 6.11293, - 6.15412, - 6.34488, - 6.02805, - 6.37669, - 6.08256, - 6.29337, - 6.11569, - 6.3343, - 6.23769, - 6.33333, - 6.19854, - 6.13166, - 6.53816, - 6.14203, - 6.22576, - 6.31578, - 6.18142, - 6.24817, - 6.54147, - 6.26769, - 6.50317, - 6.35394, - 6.00299, - 6.1815, - 6.22899, - 6.25878, - 6.44192, - 6.44892, - 6.39553, - 5.98413, - 6.43795, - 6.37013, - 6.06328, - 6.58424, - 6.35392, - 6.30076, - 6.4262, - 6.08959, - 6.37101, - 6.25673, - 5.98083, - 6.42341, - 6.22051, - 6.31869, - 5.99465, - 6.20636, - 6.29428, - 6.28203, - 6.15005, - 6.03871, - 6.18434, - 6.53488, - 6.36443, - 6.07942, - 6.30651, - 6.06713, - 6.26565, - 6.40616, - 6.741, - 6.24939, - 6.13291, - 6.09875, - 6.31759, - 5.93891, - 6.2543, - 6.00153, - 6.54021, - 6.40471, - 6.22258, - 6.2507, - 6.12092, - 6.1711, - 6.03053, - 6.46355, - 6.29811, - 6.27215, - 6.08401, - 6.22164, - 6.39539, - 6.47017, - 6.11386, - 6.45237, - 6.04349, - 6.30801, - 6.3468, - 6.18748, - 6.42659, - 5.99932, - 6.12072, - 6.22595, - 6.33846, - 6.56846, - 6.08395, - 6.37881, - 6.59243, - 6.15607, - 6.2082, - 6.21438, - 6.27514, - 5.84324, - 6.40712, - 6.19796, - 6.33034, - 6.18061, - 6.41243, - 6.21666, - 6.15695, - 5.96279, - 6.30155, - 6.15897, - 6.21676, - 6.0512, - 6.08294, - 6.0621, - 6.09995, - 6.13439, - 6.40333, - 6.33143, - 5.96941, - 6.13624, - 6.43448, - 6.23377, - 6.40988, - 6.22927, - 5.99602, - 6.41574, - 6.17216, - 6.32381, - 6.12876, - 5.96916, - 5.99431, - 6.17928, - 6.01173, - 6.20852, - 6.3407, - 6.39336, - 6.09081, - 6.35499, - 6.24335, - 6.31461, - 6.15029, - 6.30659, - 6.26253, - 6.39301, - 6.2042, - 6.37907, - 5.97963, - 6.38598, - 6.27523, - 6.03397, - 6.552, - 6.27548, - 6.28337, - 6.21724, - 6.20224, - 6.07868, - 6.073, - 6.30956, - 6.21111, - 6.12205, - 6.45981, - 6.1036, - 6.15625, - 6.18828, - 6.40387, - 6.34025, - 6.2894, - 6.39874, - 6.18994, - 6.12809, - 6.30166, - 6.20345, - 6.35857, - 6.12282, - 6.3579, - 6.42851, - 6.2104, - 6.13, - 6.32673, - 5.99126, - 6.53213, - 6.39713, - 6.22232, - 6.36209, - 6.37234, - 6.06583, - 5.96905, - 6.07293, - 5.89625, - 6.16057, - 6.04981, - 6.10996, - 6.48529, - 6.08862, - 6.29631, - 6.25923, - 6.16974, - 6.27645, - 6.34773, - 6.14065, - 6.39893, - 6.20423, - 6.44389, - 6.14672, - 6.09501, - 6.23888, - 6.14447, - 6.30253, - 6.38443, - 6.40943, - 6.34193, - 6.26095, - 6.06244, - 6.42097, - 6.1041, - 6.38684, - 6.37667, - 6.12186, - 5.99692, - 6.19204, - 6.1919, - 6.50044, - 6.3115, - 6.05882, - 5.86439, - 6.45141, - 5.88432, - 6.23995, - 6.11292, - 6.20951, - 5.90822, - 6.19528, - 5.81616, - 6.2398, - 6.34606, - 6.36593, - 6.09603, - 6.33785, - 6.42073, - 5.92349, - 6.37215, - 6.39677, - 6.36358, - 6.22775, - 5.98277, - 6.35036, - 6.21034, - 5.97164, - 6.09301, - 6.12039, - 6.46194, - 6.2046, - 5.96427, - 6.29253, - 6.10433, - 6.08377, - 6.3307, - 6.4867, - 6.31023, - 6.09359, - 6.22142, - 6.05327, - 6.15394, - 6.23608, - 6.03966, - 5.8949, - 6.2167, - 6.26209, - 5.93462, - 6.07415, - 6.09805, - 6.29827, - 6.3569, - 6.21374, - 6.25305, - 6.44093, - 6.31724, - 5.94012, - 6.06901, - 6.44223, - 6.15413, - 6.30072, - 6.16676, - 6.16942, - 5.98695, - 6.23098, - 6.05042, - 6.28081, - 6.09711, - 6.37741, - 6.06699, - 6.05882, - 6.17689, - 6.22381, - 6.32849, - 6.24238, - 6.31961, - 5.93739, - 6.2644, - 5.98268, - 6.16066, - 5.98254, - 6.23034, - 6.13085, - 6.00423, - 5.90725, - 6.16344, - 6.04893, - 6.19732, - 6.05768, - 6.04611, - 6.21645, - 6.14967, - 6.24572, - 6.01439, - 6.30176, - 5.80022, - 6.47263, - 6.18387, - 6.25577, - 6.24843, - 5.91143, - 5.96473, - 6.14371, - 6.11824, - 5.84433, - 6.0589, - 6.22986, - 6.33661, - 5.88936, - 6.4773, - 6.1532, - 6.24312, - 5.5371, - 5.94914, - 6.09041, - 6.13193, - 5.7848, - 6.08348, - 6.14052, - 6.0647, - 6.26865, - 6.25012, - 6.25113, - 6.30421, - 6.3171, - 6.45796, - 6.27366, - 6.14312, - 6.49744, - 6.16217, - 6.23036, - 5.86772, - 6.02907, - 6.19862, - 6.26842, - 6.35715, - 6.10501, - 5.91702, - 6.03526, - 6.15697, - 6.03631, - 6.07692, - 6.24646, - 6.14011, - 6.05932, - 6.15876, - 6.05441, - 5.99278, - 6.12618, - 6.39054, - 6.14162, - 6.10958, - 6.45082, - 6.30386, - 6.0778, - 5.93397, - 5.90111, - 6.06705, - 6.14443, - 6.31779, - 5.74064, - 6.10349, - 5.97327, - 6.09052, - 6.25249, - 6.07548, - 6.07552, - 5.98058, - 5.99296, - 6.05499, - 5.86394, - 5.86196, - 5.83776, - 5.83957, - 6.2593, - 5.83799, - 6.1191, - 6.08244, - 6.22337, - 6.09661, - 6.0732, - 5.98194, - 6.35632, - 5.77603, - 5.84978, - 6.18573, - 5.89755, - 6.14481, - 6.15262, - 5.94744, - 5.90468, - 6.14408, - 6.02246, - 6.12202, - 5.92749, - 6.19453, - 6.06292, - 6.05398, - 5.78895, - 6.07653, - 5.87674, - 6.10413, - 6.20621, - 6.02689, - 6.15198, - 6.22689, - 5.85123, - 6.07978, - 5.97042, - 5.81312, - 6.10418, - 6.21739, - 6.1917, - 6.24606, - 5.95878, - 5.82133, - 5.92305, - 5.85724, - 6.05554, - 6.18299, - 6.15499, - 5.83163, - 6.46447, - 6.15277, - 6.04714, - 6.07566, - 6.14775, - 6.07494, - 5.95285, - 5.96777, - 5.99285, - 6.25656, - 5.90819, - 5.84823, - 5.9248, - 6.12159, - 6.05189, - 6.25358, - 5.98047, - 5.91779, - 6.07089, - 6.10884, - 6.05018, - 5.91499, - 5.84059, - 6.00829, - 6.01661, - 6.08329, - 5.8952, - 6.01278, - 5.67961, - 5.83088, - 6.13372, - 6.0899, - 6.15196, - 6.18286, - 6.14409, - 5.7606, - 6.08712, - 6.10897, - 5.99769, - 5.93637, - 5.87955, - 5.95937, - 6.29087, - 5.87092, - 5.78197, - 6.14667, - 6.05809, - 6.16481, - 5.94991, - 5.75291, - 5.8592, - 6.19805, - 5.9858, - 6.1639, - 6.09678, - 6.02787, - 5.81271, - 6.09139, - 6.32533, - 5.96413, - 6.16299, - 6.00276, - 6.19657, - 6.02726, - 6.05171, - 5.84633, - 5.77209, - 5.96961, - 5.9849, - 6.02932, - 6.0537, - 6.08561, - 5.89283, - 6.19435, - 6.06464, - 6.2568, - 5.80293, - 6.02946, - 5.7978, - 6.10829, - 5.84662, - 5.77951, - 5.7912, - 6.04755, - 5.90745, - 5.93444, - 6.17925, - 5.82008, - 5.96972, - 5.71202, - 6.00809, - 5.80207, - 5.97974, - 5.88935, - 6.33257, - 6.14508, - 5.86721, - 5.86794, - 6.01291, - 5.74821, - 5.91841, - 5.82207, - 5.83811, - 5.54737, - 5.80353, - 5.72796, - 6.0506, - 6.03371, - 5.80528, - 5.93526, - 6.11032, - 6.03443, - 5.9479, - 5.84056, - 5.86626, - 5.88418, - 6.0262, - 5.86155, - 6.06552, - 5.88192, - 5.8404, - 5.92057, - 5.83942, - 6.01708, - 5.96875, - 5.79609, - 5.88157, - 5.78996, - 6.01264, - 6.04324, - 5.8411, - 5.83899, - 5.94632, - 6.03382, - 5.8096, - 5.6814, - 5.61011, - 5.82258, - 6.0532, - 6.26449, - 5.90097, - 6.03606, - 5.59388, - 5.84266, - 5.97485, - 5.95277, - 6.24308, - 5.91125, - 6.12072, - 5.96379, - 5.86492, - 5.99428, - 5.83884, - 5.82211, - 5.70013, - 6.0971, - 6.03164, - 5.78511, - 5.90645, - 5.66368, - 5.73694, - 6.13804, - 6.1053, - 5.96152, - 6.11842, - 5.99783, - 6.00233, - 5.63439, - 5.85923, - 5.93705, - 5.58148, - 5.94662, - 5.76007, - 5.84042, - 5.74787, - 5.88519, - 5.97658, - 5.7215, - 5.87309, - 6.00525, - 5.93322, - 5.81608, - 5.74541, - 5.8454, - 5.93668, - 5.85126, - 5.7304, - 5.84281, - 6.01029, - 5.98761, - 5.73332, - 5.84772, - 5.72475, - 5.54015, - 5.99439, - 6.09163, - 5.84615, - 5.70075, - 5.81065, - 6.0266, - 5.76754, - 5.72074, - 6.09481, - 5.72303, - 5.56257, - 5.85745, - 5.69924, - 5.82868, - 5.78828, - 5.67483, - 5.496, - 5.73639, - 5.72971, - 5.76467, - 5.66526, - 5.65788, - 5.92271, - 5.62234, - 5.31858, - 5.64535, - 5.99382, - 5.651, - 5.76309, - 5.79016, - 5.95155, - 5.68025, - 5.53956, - 5.92439, - 5.78876, - 5.79481, - 5.81312, - 5.69195, - 5.7748, - 5.70214, - 5.90134, - 5.75172, - 5.8835, - 5.57238, - 5.60218, - 5.45807, - 5.53449, - 5.58066, - 5.6957, - 5.64536, - 5.68633, - 5.81438, - 5.40124, - 5.83671, - 5.96217, - 6.00974, - 5.58393, - 5.53247, - 5.78327, - 5.88263, - 5.84458, - 5.78983, - 5.58777, - 5.74236, - 5.75036, - 5.52226, - 5.49968, - 5.67871, - 6.00464, - 5.641, - 5.65137, - 5.55635, - 5.61197, - 5.44461, - 5.63676, - 5.85305, - 5.6634, - 5.70227, - 5.63678, - 5.87241, - 5.9005, - 6.00072, - 5.71109, - 5.85047, - 5.8183, - 5.5811, - 5.28681, - 5.53006, - 6.04771, - 5.50425, - 5.67854, - 5.51973, - 5.84652, - 5.86275, - 5.91333, - 5.60112, - 5.80213, - 5.60584, - 5.40794, - 5.63212, - 5.47845, - 5.80563, - 5.64168, - 5.89571, - 5.89592, - 5.88066, - 5.62191, - 5.64817, - 5.49271, - 5.80496, - 5.63366, - 5.49444, - 5.81441, - 5.86738, - 5.77686, - 5.81384, - 5.73914, - 5.77844, - 5.41317, - 5.57368, - 5.85532, - 5.57311, - 5.72023, - 5.66576, - 5.31334, - 5.78508, - 5.93047, - 5.85842, - 5.94373, - 5.67211, - 5.54567, - 5.49603, - 5.57147, - 5.33313, - 5.55491, - 5.33363, - 5.72239, - 5.662, - 5.45219, - 5.5106, - 5.53594, - 5.82025, - 5.77807, - 5.2408, - 5.59296, - 5.62683, - 5.69741, - 5.73427, - 5.49788, - 5.66272, - 5.57567, - 5.74357, - 5.52734, - 5.50491, - 5.57587, - 5.96142, - 5.49539, - 5.71266, - 5.70483, - 5.23033, - 5.44142, - 5.59221, - 5.61425, - 5.36935, - 5.57102, - 5.73355, - 5.58329, - 5.76048, - 5.78104, - 5.51218, - 5.54391, - 5.89282, - 5.71522, - 5.56901, - 5.45096, - 5.36384, - 5.78966, - 5.79038, - 5.52832, - 5.47669, - 5.65642, - 5.59188, - 5.56174, - 5.52253, - 5.50719, - 5.29606, - 5.75425, - 5.68504, - 5.46854, - 5.67471, - 5.72898, - 5.90051, - 5.5793, - 5.6441, - 5.7178, - 5.8198, - 5.57355, - 5.61022, - 5.66798, - 5.19177, - 5.91541, - 5.40464, - 5.39557, - 5.50319, - 5.66164, - 5.7401, - 5.55738, - 5.72171, - 5.61542, - 5.6533, - 5.50204, - 5.5001, - 5.6838, - 5.74351, - 5.23517, - 5.27947, - 5.7736, - 5.74565, - 5.61515, - 5.51495, - 5.34017, - 5.55685, - 5.78903, - 5.57942, - 5.85997, - 5.24422, - 5.33002, - 5.52458, - 5.6809, - 5.7238, - 5.45601, - 5.57291, - 5.51181, - 5.56948, - 5.32142, - 5.35315, - 5.47335, - 5.58987, - 5.56781, - 5.33109, - 5.47933, - 5.60359, - 5.33716, - 5.70209, - 5.57574, - 5.15947, - 5.40233, - 5.14065, - 5.39899, - 5.68815, - 5.05608, - 5.26242, - 5.46771, - 5.10152, - 5.704, - 5.29233, - 5.33947, - 5.25637, - 5.67878, - 5.55052, - 5.51558, - 5.46657, - 5.1927, - 5.63042, - 5.54801, - 5.61803, - 5.59148, - 5.59111, - 5.53997, - 5.71475, - 5.751, - 5.50991, - 5.54956, - 5.26494, - 5.25531, - 5.62038, - 5.40946, - 5.45863, - 5.08687, - 5.5366, - 5.60898, - 5.30272, - 5.6928, - 5.55462, - 5.6038, - 5.35577, - 5.4286, - 5.77712, - 5.12033, - 5.44462, - 5.41782, - 5.32479, - 5.21973, - 5.45154, - 5.20559, - 5.6674, - 5.21263, - 5.42332, - 5.54029, - 5.68911, - 5.21107, - 5.5421, - 5.28456, - 5.22619, - 5.07375, - 5.77718, - 5.52267, - 5.27374, - 5.39799, - 5.42136, - 5.29616, - 5.37187, - 5.18627, - 5.41708, - 5.56821, - 5.51711, - 5.26606, - 5.44275, - 5.27222, - 5.48044, - 5.42999, - 5.36919, - 5.82357, - 5.48711, - 5.23278, - 5.33405, - 5.24011, - 5.39905, - 5.4392, - 5.36185, - 5.42562, - 5.43673, - 5.2401, - 5.44366, - 5.55005, - 5.18979, - 5.56064, - 5.27104, - 5.37792, - 5.72462, - 5.31993, - 5.43134, - 5.26772, - 5.47394, - 5.37205, - 5.27303, - 5.29492, - 5.32969, - 5.514, - 5.41325, - 5.24781, - 5.50394, - 5.43094, - 5.21885, - 5.697, - 5.49622, - 5.3313, - 5.37993, - 5.31966, - 5.38266, - 5.40369, - 5.27459, - 5.26548, - 5.47746, - 5.32108, - 5.4704, - 5.3552, - 5.68324, - 5.56886, - 5.59513, - 5.26185, - 5.19901, - 5.47215, - 5.46836, - 4.99488, - 5.4407, - 5.34759, - 5.79016, - 5.42391, - 5.31161, - 5.51834, - 5.37018, - 5.33223, - 5.62554, - 5.1873, - 5.26472, - 5.22393, - 5.01926, - 5.41349, - 5.23932, - 5.41591, - 5.23388, - 5.46969, - 5.59588, - 5.63601, - 5.51309, - 5.25855, - 5.47349, - 5.54422, - 5.54735, - 5.30105, - 5.1544, - 5.38647, - 5.18654, - 5.45893, - 5.42539, - 5.46495, - 5.30878, - 5.16631, - 5.61421, - 5.32415, - 5.5367, - 5.46586, - 5.4395, - 5.40487, - 5.10759, - 5.43359, - 5.5656, - 5.35044, - 5.2805, - 5.52335, - 5.3629, - 5.62948, - 5.25984, - 5.40786, - 5.22698, - 5.44817, - 5.20858, - 5.3904, - 5.67465, - 5.50158, - 5.25219, - 5.40554, - 5.42222, - 5.12741, - 5.58132, - 5.23858, - 5.472, - 5.53455, - 5.09749, - 5.32636, - 5.66949, - 5.47415, - 5.83646, - 5.15267, - 5.65019, - 5.39714, - 5.2346, - 5.39145, - 5.21172, - 5.38191, - 5.29957, - 5.4159, - 5.23551, - 5.46337, - 5.10637, - 5.49482, - 5.51147, - 5.22539, - 5.48015, - 5.36735, - 5.41412, - 5.31927, - 5.6195, - 5.4469, - 5.04296, - 5.01706, - 5.42501, - 5.57975, - 5.18865, - 5.30631, - 5.23734, - 5.14166, - 5.29754, - 4.74249, - 5.33519, - 5.17675, - 4.96699, - 5.02152, - 5.48829, - 5.37785, - 5.52028, - 5.2346, - 5.21928, - 5.42326, - 5.21575, - 5.34642, - 5.50497, - 5.34291, - 5.44243, - 5.26401, - 5.48028, - 5.29042, - 4.97953, - 5.21126, - 5.40469, - 5.093, - 5.33717, - 5.18471, - 5.20772, - 5.23414, - 5.00452, - 4.85325, - 5.4221, - 5.34867, - 5.44642, - 5.41004, - 5.01, - 5.10068, - 5.3912, - 5.30883, - 5.02749, - 5.25628, - 4.84244, - 5.53958, - 5.06558, - 5.18397, - 5.16718, - 5.43679, - 5.41454, - 5.2013, - 5.17036, - 5.61725, - 5.21891, - 5.18433, - 5.27505, - 5.08694, - 5.04475, - 5.00165, - 4.89636, - 5.10688, - 4.87777, - 5.12496, - 5.12076, - 5.28615, - 5.37844, - 5.31216, - 5.16521, - 5.26539, - 5.04044, - 5.22532, - 5.06384, - 4.87431, - 5.27989, - 5.39772, - 5.26121, - 5.10267, - 5.04472, - 5.30136, - 5.12835, - 5.32223, - 5.30201, - 5.47047, - 5.08983, - 5.09329, - 5.22051, - 5.18219, - 5.26414, - 4.85314, - 4.80557, - 5.11929, - 4.97588, - 5.10509, - 5.12232, - 5.1768, - 5.21992, - 5.18914, - 5.40696, - 4.9601, - 5.13121, - 5.039, - 5.08148, - 5.00974, - 4.95523, - 5.22023, - 5.18992, - 5.23818, - 5.43358, - 5.25654, - 5.1727, - 5.38586, - 5.33956, - 5.15538, - 5.31171, - 5.03377, - 5.15866, - 5.1277, - 5.05149, - 5.22973, - 5.31626, - 4.79504, - 5.08908, - 5.21996, - 4.99717, - 5.11511, - 5.09157, - 5.18415, - 5.35206, - 4.483, - 5.11497, - 5.18612, - 5.09318, - 5.3488, - 5.19722, - 4.92825, - 4.76935, - 4.97035, - 4.93379, - 5.11701, - 5.18488, - 4.99943, - 5.11904, - 4.78261, - 5.29948, - 5.12962, - 5.26287, - 5.32794, - 5.23089, - 5.07579, - 5.21165, - 5.15483, - 4.94098, - 5.14296, - 4.70642, - 5.02005, - 4.9152, - 5.27068, - 5.31659, - 5.29478, - 5.17467, - 5.48285, - 5.17564, - 4.97944, - 5.11965, - 4.77649, - 5.43721, - 5.06011, - 5.12371, - 4.96652, - 5.11622, - 5.20294, - 5.20476, - 4.83474, - 4.99933, - 5.23165, - 4.80956, - 5.16499, - 5.40001, - 5.15955, - 5.10155, - 5.4379, - 4.92316, - 5.29426, - 4.83243, - 4.96744, - 5.04034, - 4.96892, - 5.42396, - 5.02501, - 4.91994, - 5.06529, - 5.23294, - 4.98085, - 5.0054, - 5.12737, - 4.99702, - 4.85744, - 4.64251, - 4.97963, - 5.30969, - 5.13006, - 4.84322, - 5.23145, - 5.0589, - 5.02944, - 5.1554, - 5.14248, - 5.29471, - 5.11387, - 5.01216, - 4.90647, - 4.93221, - 5.35247, - 5.39206, - 4.90045, - 5.27059, - 5.22647, - 5.11795, - 5.06723, - 4.96303, - 5.24919, - 5.29575, - 5.04291, - 5.20157, - 5.44766, - 5.09375, - 5.00037, - 5.18376, - 5.07238, - 5.05871, - 5.04124, - 4.98874, - 4.80654, - 5.15762, - 5.35158, - 5.13558, - 5.04201, - 5.21272, - 4.84443, - 5.09973, - 5.26597, - 5.26834, - 5.10139, - 5.36117, - 5.11024, - 5.31294, - 4.97496, - 4.7405, - 5.25625, - 4.9144, - 5.21628, - 5.06403, - 4.79898, - 4.89406, - 5.19256, - 5.24569, - 4.88062, - 5.01205, - 4.90107, - 5.14932, - 4.86965, - 4.99126, - 4.91607, - 4.86337, - 5.09162, - 4.9213, - 4.99198, - 4.81591, - 5.04119, - 5.08007, - 4.91372, - 4.88984, - 5.15553, - 5.44333, - 5.21246, - 5.00124, - 5.15027, - 4.82246, - 4.97428, - 4.94423, - 4.567, - 5.30908, - 4.99444, - 4.69225, - 4.80792, - 4.76228, - 4.91197, - 5.27037, - 4.83068, - 4.66668, - 4.93349, - 4.96998, - 4.88633, - 5.12723, - 4.93398, - 4.73109, - 5.27862, - 5.08144, - 4.8117, - 5.03094, - 4.85073, - 5.19184, - 5.38803, - 5.12819, - 4.97051, - 5.22417, - 5.01635, - 5.0717, - 5.19179, - 5.09407, - 5.09324, - 5.07832, - 5.26847, - 5.28364, - 5.1167, - 5.0541, - 4.58195, - 4.98147, - 4.96462, - 5.09185, - 5.15236, - 5.06825, - 5.01385, - 4.97451, - 5.09335, - 5.04342, - 5.08338, - 4.90682, - 5.17985, - 5.16023, - 5.08981, - 4.98628, - 4.89905, - 4.72349, - 4.79049, - 5.01912, - 4.71261, - 4.73899, - 5.31541, - 5.17609, - 4.88201, - 5.12856, - 4.91881, - 5.10478, - 4.78821, - 4.91988, - 4.55291, - 5.28126, - 5.38192, - 4.90148, - 4.91535, - 4.86343, - 4.51877, - 4.82147, - 5.19334, - 4.99626, - 5.1268, - 4.90126, - 4.97496, - 4.6243, - 5.06909, - 4.78466, - 4.94887, - 4.41497, - 5.12551, - 4.89441, - 5.01441, - 4.9732, - 4.80138, - 4.87926, - 4.86248, - 4.78461, - 4.4913, - 4.93864, - 5.09337, - 5.02533, - 4.96463, - 4.91174, - 4.90578, - 5.02837, - 5.0042, - 5.18834, - 5.16745, - 4.94125, - 4.78142, - 5.08765, - 5.162, - 4.99523, - 4.72421, - 5.06853, - 5.15604, - 4.70324, - 5.14308, - 5.26969, - 5.01419, - 4.89412, - 4.66994, - 4.56827, - 4.82008, - 4.88612, - 4.99335, - 5.00443, - 5.00444, - 4.76957, - 5.23505, - 4.73968, - 5.14181, - 4.91469, - 5.23114, - 5.33121, - 4.81551, - 4.90884, - 4.9496, - 5.10944, - 4.47681, - 4.67398, - 4.8943, - 4.84807, - 5.11156, - 4.88003, - 5.00481, - 4.9316, - 5.34696, - 4.76706, - 4.66782, - 4.91814, - 5.01827, - 4.93052, - 4.7207, - 4.63041, - 4.76303, - 4.84309, - 4.69046, - 5.03413, - 5.03258, - 4.59029, - 5.05744, - 4.90873, - 5.21043, - 4.81666, - 5.0944, - 5.14665, - 4.78434, - 5.15583, - 4.9822, - 4.85239, - 5.05721, - 5.0517, - 4.78335, - 4.85769, - 4.99127, - 5.0996, - 4.9464, - 4.80083, - 4.62979, - 4.96829, - 4.8878, - 4.96983, - 4.61779, - 5.05413, - 4.79733, - 5.06758, - 4.85831, - 5.00424, - 4.79188, - 4.69064, - 5.03358, - 5.19736, - 4.92724, - 4.83414, - 4.78382, - 4.77864, - 5.132, - 5.23577, - 5.05201, - 4.72849, - 4.82143, - 4.63096, - 4.87687, - 4.48367, - 4.97165, - 4.85723, - 5.18116, - 4.99292, - 4.97902, - 5.17941, - 4.77471, - 4.71585, - 5.35185, - 4.68413, - 4.98282, - 4.67711, - 5.03022, - 4.93753, - 4.71009, - 4.88578, - 5.17075, - 5.02417, - 4.75791, - 4.95128, - 5.35481, - 4.56358, - 4.80616, - 4.70277, - 4.97661, - 4.83534, - 4.75097, - 4.87225, - 4.97889, - 4.5431, - 4.59369, - 5.12614, - 4.63494, - 4.97415, - 4.79503, - 5.15621, - 4.67314, - 4.70713, - 4.90119, - 4.92401, - 4.64504, - 5.11849, - 4.97763, - 5.1621, - 4.65454, - 4.6877, - 5.1589, - 5.01839, - 4.81071, - 5.24575, - 4.9913, - 4.80177, - 5.18696, - 4.87271, - 4.97809, - 4.88067, - 4.9305, - 4.81187, - 4.4605, - 4.92943, - 5.23168, - 4.94083, - 4.69259, - 4.76095, - 4.74441, - 4.81102, - 4.94293, - 4.90204, - 4.53579, - 4.91026, - 4.63342, - 4.90098, - 5.04656, - 4.89438, - 4.89704, - 4.9667, - 4.94035, - 4.64381, - 4.76133, - 4.49628, - 4.60273, - 4.87816, - 4.86968, - 5.03411, - 4.71504, - 4.18378, - 5.06436, - 4.47125, - 4.80177, - 5.02795, - 4.95047, - 4.74993, - 4.84984, - 4.99234, - 4.57989, - 4.80215, - 4.72603, - 4.96978, - 4.96059, - 4.83065, - 4.78615, - 4.85814, - 4.69989, - 4.56412, - 4.70496, - 4.85209, - 4.80944, - 4.791, - 4.8028, - 4.65022, - 4.90279, - 4.8498, - 4.68366, - 4.82477, - 4.96829, - 5.114, - 5.11631, - 4.94083, - 4.67494, - 5.05614, - 4.61798, - 4.68506, - 4.58312, - 4.89027, - 4.71545, - 4.92529, - 4.77487, - 4.3764, - 4.97832, - 4.81992, - 4.81131, - 4.91933, - 4.72543, - 4.5749, - 4.85909, - 4.98992, - 4.62782, - 5.00526, - 4.77509, - 4.54296, - 4.93964, - 4.65526, - 4.74844, - 4.98197, - 4.93855, - 4.73361, - 4.40623, - 4.84044, - 4.68303, - 4.5449, - 4.74978, - 4.73286, - 4.63082, - 5.10716, - 5.11458, - 5.04425, - 5.11559, - 4.88711, - 4.78152, - 4.92955, - 4.79275, - 4.92607, - 4.43538, - 4.72603, - 4.67828, - 4.76623, - 4.8814, - 4.96701, - 5.2285, - 4.83771, - 4.63808, - 4.58013, - 4.96567, - 5.07546, - 5.02061, - 4.51382, - 4.67226, - 4.6261, - 5.19041, - 4.9004, - 4.81254, - 4.92005, - 4.63456, - 4.82491, - 4.8335, - 4.78664, - 4.41905, - 4.87111, - 4.8236, - 4.36369, - 4.50181, - 4.99971, - 4.54458, - 4.40778, - 4.37317, - 4.84384, - 4.89916, - 4.83623, - 4.96574, - 4.72721, - 4.93398, - 4.90094, - 4.87484, - 4.69947, - 4.46603, - 4.83921, - 5.13761, - 4.68306, - 4.49873, - 4.85083, - 4.93194, - 4.80737, - 4.9269, - 4.81604, - 4.56751, - 4.76934, - 4.97913, - 5.07645, - 4.61252, - 4.62552, - 4.79322, - 4.92026, - 4.65237, - 4.71413, - 4.6462, - 5.07187, - 4.36671, - 4.67012, - 5.09229, - 4.79901, - 4.6969, - 4.92218, - 4.69102, - 4.97988, - 4.75608, - 4.93425, - 4.3048, - 4.85624, - 4.65828, - 4.76871, - 5.08266, - 4.55283, - 4.58891, - 4.65472, - 4.81356, - 4.8506, - 4.57807, - 4.39672, - 5.14019, - 4.34043, - 4.68014, - 4.94118, - 4.444, - 4.90963, - 4.67061, - 5.12985, - 4.61707, - 4.58806, - 4.68679, - 4.96487, - 4.76082, - 4.39427, - 4.63108, - 4.55283, - 4.75749, - 4.49963, - 4.40536, - 4.98277, - 4.79013, - 4.6621, - 4.61666, - 4.83047, - 4.80454, - 4.66187, - 4.68888, - 4.86322, - 4.91509, - 4.53975, - 4.67541, - 4.73188, - 4.88715, - 4.57492, - 4.7416, - 4.51026, - 4.87815, - 4.64985, - 4.6465, - 4.78482, - 4.7504, - 4.57867, - 4.53992, - 4.8434, - 4.77999, - 4.48138, - 4.63586, - 4.55482, - 4.57308, - 4.57164, - 4.64359, - 4.75031, - 4.89821, - 4.65596, - 4.62546, - 4.68994, - 4.91806, - 4.49626, - 4.86053, - 4.71938, - 4.37908, - 4.65407, - 4.73407, - 4.57251, - 4.4987, - 4.76839, - 4.8754, - 4.79227, - 4.53006, - 4.54724, - 4.47674, - 4.42248, - 4.80017, - 4.73179, - 4.79641, - 4.79088, - 4.6273, - 4.66027, - 4.80137, - 4.48846, - 4.84206, - 4.40344, - 5.0109, - 4.62057, - 4.71667, - 4.9149, - 4.68968, - 4.25696, - 4.49662, - 4.80345, - 4.66772, - 4.86094, - 5.02861, - 4.55318, - 4.43461, - 4.78399, - 4.78803, - 4.75466, - 4.82244, - 4.53552, - 4.6763, - 4.88463, - 4.64964, - 4.73164, - 4.81068, - 5.19057, - 4.50818, - 4.5406, - 4.94924, - 4.57704, - 4.58163, - 4.80786, - 4.98468, - 4.58419, - 4.66698, - 4.65373, - 4.92446, - 4.74359, - 4.50878, - 4.89068, - 4.63939, - 4.61131, - 4.98252, - 4.59273, - 4.79158, - 4.53856, - 4.93761, - 4.61306, - 4.42088, - 4.63097, - 4.6103, - 4.59015, - 4.58752, - 4.62203, - 4.87797, - 4.72938, - 4.43258, - 4.60739, - 4.68735, - 4.42201, - 4.42015, - 4.74505, - 4.64322, - 4.91427, - 4.53722, - 4.70557, - 4.62932, - 4.66876, - 4.82749, - 4.71134, - 4.80566, - 4.52442, - 4.6009, - 4.64384, - 4.79434, - 4.74472, - 4.45022, - 4.77569, - 4.68638, - 4.4187, - 4.85921, - 4.87999, - 4.79189, - 4.37663, - 4.64966, - 4.29849, - 4.76478, - 4.68621, - 4.55806, - 4.53001, - 4.47709, - 4.78342, - 4.58067, - 4.50417, - 4.34648, - 4.52445, - 4.80306, - 4.51902, - 4.75548, - 4.64674, - 4.39946, - 4.71706, - 4.63076, - 4.62203, - 4.71245, - 4.82305, - 4.52816, - 4.71965, - 4.75728, - 4.50563, - 5.02663, - 4.79956, - 4.65917, - 4.5779, - 4.47024, - 4.83687, - 4.45878, - 4.60851, - 4.62461, - 4.89863, - 4.91485, - 4.72872, - 4.54498, - 4.9651, - 4.3266, - 4.64575, - 4.74564, - 4.81184, - 4.65392, - 4.59487, - 4.75213, - 4.66301, - 4.46364, - 4.5547, - 4.58862, - 4.44177, - 4.70497, - 4.51295, - 4.49054, - 4.69194, - 4.37789, - 4.66219, - 4.79966, - 4.55419, - 4.33516, - 4.20753, - 4.88029, - 5.06925, - 4.44313, - 4.32421, - 4.58562, - 4.62403, - 4.68836, - 4.33875, - 4.59315, - 4.87061, - 4.71288, - 4.39329, - 4.38261, - 4.44289, - 4.46501, - 4.58984, - 4.4295, - 4.76357, - 4.65818, - 4.29182, - 4.71164, - 4.65288, - 4.4973, - 4.78969, - 4.37633, - 4.35127, - 4.307, - 4.52359, - 4.82105, - 4.53729, - 4.76207, - 4.42362, - 4.40303, - 4.4377, - 4.86301, - 4.90302, - 4.692, - 4.57753, - 4.70418, - 4.50144, - 4.85641, - 4.55561, - 4.31637, - 4.35236, - 4.30115, - 4.79165, - 4.90526, - 4.86331, - 4.66247, - 4.54139, - 4.68041, - 4.58016, - 4.27833, - 4.5759, - 4.67343, - 4.27369, - 4.67216, - 4.65717, - 4.67139, - 4.54835, - 4.39216, - 4.50057, - 4.56748, - 4.60155, - 4.80153, - 4.11793, - 4.47047, - 4.18955, - 4.33829, - 4.66226, - 4.44477, - 4.62824, - 4.30975, - 4.42812, - 4.71616, - 4.73539, - 4.30571, - 4.09786, - 4.67863, - 4.48796, - 4.55961, - 4.67433, - 4.72275, - 4.19958, - 4.47261, - 4.58471, - 4.30993, - 4.96653, - 4.40258, - 4.44839, - 4.32347, - 4.51009, - 4.26612, - 4.43606, - 4.70357, - 4.66502, - 4.42429, - 4.2093, - 4.79596, - 4.15997, - 4.91028, - 4.17702, - 4.20549, - 4.44555, - 4.32572, - 4.61908, - 4.15513, - 4.79776, - 4.50623, - 4.38259, - 4.42717, - 4.57026, - 4.36837, - 4.86207, - 4.64917, - 4.61132, - 4.50166, - 4.58746, - 4.66519, - 4.30949, - 4.40413, - 4.76713, - 4.52146, - 4.78904, - 4.4571, - 4.50096, - 4.56644, - 4.73034, - 4.78384, - 4.61916, - 4.73353, - 4.57054, - 4.39329, - 4.7341, - 4.35901, - 4.70845, - 4.65756, - 4.66067, - 4.51914, - 4.64305, - 4.52182, - 4.66556, - 4.4135, - 4.41948, - 4.24224, - 4.2263, - 4.4588, - 4.47769, - 4.31695, - 4.73466, - 4.44606, - 4.73487, - 3.9312, - 4.85601, - 4.63095, - 4.26169, - 4.42984, - 4.48301, - 4.42146, - 4.55999, - 4.47162, - 4.74291, - 4.6523, - 4.68257, - 4.29395, - 4.49655, - 4.85343, - 4.4064, - 4.56434, - 4.47784, - 4.91544, - 4.67268, - 4.42724, - 4.98248, - 4.25848, - 4.66936, - 4.76909, - 4.25358, - 4.49284, - 4.65497, - 4.44305, - 4.17465, - 4.72947, - 4.03942, - 4.68037, - 4.45605, - 4.77292, - 4.48504, - 4.63545, - 4.55736, - 4.14487, - 4.44325, - 4.71957, - 4.37663, - 4.56119, - 4.35405, - 4.46848, - 4.27411, - 4.23502, - 4.25284, - 4.37734, - 4.60687, - 4.14061, - 4.51885, - 4.26807, - 4.6728, - 4.66543, - 4.68522, - 4.052, - 4.23172, - 4.37141, - 4.23223, - 4.70984, - 4.28569, - 4.53202, - 4.69518, - 4.51001, - 4.622, - 4.61422, - 4.27405, - 4.70186, - 4.53139, - 4.61653, - 4.52805, - 4.45494, - 4.64947, - 4.36956, - 4.60318, - 4.57024, - 4.54094, - 4.48008, - 4.63427, - 4.72048, - 4.38163, - 4.48795, - 4.58948, - 4.43165, - 4.42964, - 4.36689, - 4.29122, - 4.46294, - 4.25289, - 4.2381, - 4.5669, - 4.65292, - 4.72824, - 4.5424, - 4.5074, - 4.41069, - 4.34589, - 4.66087, - 4.3667, - 4.12599, - 4.46192, - 4.6647, - 4.39198, - 4.30146, - 4.44691, - 4.0823, - 4.37265, - 4.44928, - 4.55266, - 4.32833, - 4.56199, - 4.5511, - 4.61409, - 4.52698, - 4.58919, - 4.40964, - 4.62931, - 4.65034, - 4.72942, - 4.58582, - 4.75097, - 4.45131, - 4.62278, - 4.30087, - 4.20944, - 4.72759, - 4.64991, - 4.276, - 4.61855, - 4.34225, - 4.31856, - 4.43884, - 4.20519, - 4.62112, - 4.41565, - 4.29785, - 4.24867, - 4.48361, - 4.78776, - 4.68757, - 4.53799, - 4.21952, - 4.28089, - 4.51176, - 4.25543, - 4.61468, - 4.38846, - 4.21651, - 4.40214, - 4.89177, - 4.34657, - 4.47874, - 4.22253, - 4.37631, - 4.24356, - 4.01877, - 4.47286, - 4.38093, - 4.22209, - 4.62499, - 4.38607, - 4.66667, - 4.71728, - 4.40116, - 4.45076, - 4.50306, - 4.60412, - 4.72615, - 4.47617, - 4.56085, - 4.81438, - 4.23634, - 4.3366, - 4.46868, - 4.78242, - 4.53482, - 4.23392, - 4.61119, - 4.4743, - 4.13638, - 4.10941, - 4.80199, - 4.33583, - 4.40042, - 4.74981, - 4.40471, - 4.5992, - 4.44396, - 4.29101, - 4.59187, - 4.36723, - 4.45177, - 4.55756, - 4.36824, - 4.54848, - 4.31046, - 4.69068, - 4.60546, - 4.29302, - 3.78524, - 4.64622, - 4.52625, - 4.36206, - 4.0618, - 4.61758, - 4.43272, - 4.02894, - 4.47178, - 4.32032, - 4.63518, - 4.32917, - 4.5668, - 4.35877, - 4.72676, - 5.00534, - 4.58696, - 4.2586, - 4.60091, - 4.34239, - 4.36907, - 4.86409, - 4.29057, - 4.38333, - 4.30863, - 4.39333, - 4.59365, - 4.40166, - 4.07245, - 4.60984, - 4.61895, - 4.00926, - 4.6481, - 4.53555, - 4.2329, - 4.45218, - 4.32422, - 4.56335, - 4.18252, - 4.00789, - 4.36448, - 4.56634, - 4.55995, - 4.24424, - 4.49537, - 4.4365, - 4.32871, - 4.51815, - 4.58975, - 4.35395, - 4.44043, - 4.39594, - 4.31501, - 4.24702, - 4.59454, - 4.32586, - 4.79668, - 4.24409, - 4.53054, - 4.44084, - 4.55064, - 3.97967, - 4.37847, - 4.36902, - 4.62033, - 4.41077, - 4.54702, - 4.66114, - 4.58558, - 4.73869, - 4.6505, - 4.28815, - 4.62306, - 4.61922, - 4.62194, - 4.47024, - 4.38572, - 4.23153, - 4.4582, - 4.39949, - 4.51669, - 4.54652, - 4.44432, - 4.07713, - 4.89498, - 4.40956, - 4.5585, - 4.45401, - 4.64648, - 4.34599, - 4.38254, - 4.2725, - 4.71591, - 3.87683, - 4.37337, - 4.47734, - 4.45168, - 4.08619, - 4.23965, - 4.39212, - 4.5313, - 4.33085, - 4.23232, - 4.45552, - 4.48156, - 4.36242, - 4.43116, - 4.19682, - 4.29684, - 4.38084, - 4.62292, - 4.45856, - 4.44504, - 4.36544, - 4.63477, - 4.2519, - 4.2906, - 4.01187, - 4.71216, - 4.30352, - 4.29585, - 4.25058, - 4.46083, - 4.66354, - 4.71122, - 4.60744, - 4.12529, - 3.94824, - 4.48864, - 4.2015, - 4.2891, - 4.62722, - 4.5061, - 4.37218, - 4.45055, - 4.00527, - 4.45265, - 4.43356, - 4.2977, - 4.55992, - 4.6705, - 4.18849, - 4.54513, - 4.4587, - 3.99098, - 4.21912, - 4.2775, - 4.42525, - 4.31546, - 4.25047, - 4.28106, - 4.68477, - 4.20129, - 4.5783, - 4.4996, - 4.62058, - 4.35665, - 4.56785, - 4.28635, - 4.20255, - 4.7094, - 4.28498, - 4.29269, - 4.71604, - 4.29835, - 4.19412, - 4.70592, - 4.73931, - 4.3699, - 4.25445, - 4.23463, - 4.89396, - 4.72456, - 4.47222, - 4.47906, - 4.4803, - 4.22133, - 4.74637, - 4.07069, - 4.33534, - 4.72215, - 4.5711, - 4.30587, - 4.15091, - 4.16803, - 4.27706, - 4.29576, - 4.53465, - 4.48614, - 4.37501, - 4.04455, - 4.30444, - 4.2725, - 4.21472, - 4.40963, - 4.35502, - 4.31452, - 4.29067, - 4.65515, - 4.05838, - 4.53869, - 4.05647, - 4.42281, - 4.47959, - 4.24617, - 4.33588, - 4.05389, - 4.31867, - 4.49374, - 4.11889, - 4.35429, - 4.28919, - 4.52904, - 4.37941, - 4.4773, - 4.26081, - 3.991, - 4.45552, - 4.17192, - 4.36896, - 4.18408, - 3.96995, - 4.23564, - 4.43569, - 4.4537, - 4.05621, - 4.1512, - 4.43451 - ] - }, - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 16335, - "step_interval": 5, - "values": [ - 151624192.0, - 151624704.0, - 152017920.0, - 231819776.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 234965504.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 231295488.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 234965504.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233392640.0, - 234965504.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 234965504.0, - 232344064.0, - 232344064.0, - 231295488.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232868352.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 234965504.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 234965504.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 234965504.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 234965504.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232868352.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233916928.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 234965504.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232868352.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 231295488.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 234965504.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 234965504.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 234965504.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 234965504.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 234965504.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232868352.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 234965504.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232868352.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233392640.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232868352.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232868352.0, - 233916928.0, - 232344064.0, - 232868352.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232868352.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 234965504.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 234965504.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 234965504.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232868352.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232868352.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 234965504.0, - 233392640.0, - 233916928.0, - 233392640.0, - 234965504.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 234965504.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232868352.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 231295488.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232868352.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 231295488.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 234965504.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 231295488.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 234965504.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 234965504.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 234965504.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 234965504.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232868352.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 231295488.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 234965504.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233916928.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232868352.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233916928.0, - 232344064.0, - 233392640.0, - 232344064.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233916928.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 233392640.0, - 232344064.0, - 233392640.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 163, - "step_interval": 5, - "values": [ - 0.95312, - 0.38289, - 0.45849, - 0.52211, - 0.39902, - 0.40484, - 0.46371, - 0.42504, - 0.61644, - 0.40232, - 0.37125, - 0.43733, - 0.65037, - 0.41577, - 0.42127, - 0.40125, - 0.42634, - 0.40008, - 0.42375, - 0.52799, - 0.41603, - 0.41023, - 0.52821, - 0.50114, - 0.58024, - 0.63016, - 0.45667, - 0.40373, - 0.41419, - 0.44541, - 0.43878, - 0.43471, - 0.50943 - ] - } -} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.9.0.json new file mode 100644 index 000000000..2353210e1 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.9.0.json @@ -0,0 +1,8063 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 20000, + "step_interval": 5, + "values": [ + 10.51817, + 10.50697, + 10.54245, + 10.50667, + 9.92479, + 9.60301, + 9.27159, + 9.15922, + 9.1102, + 8.9799, + 8.75283, + 8.49649, + 8.52147, + 8.46628, + 8.33981, + 8.126, + 8.11512, + 7.80749, + 7.79653, + 7.8064, + 7.45337, + 7.42126, + 7.37001, + 7.35008, + 7.16051, + 7.14867, + 6.98236, + 7.31865, + 7.08964, + 6.84725, + 6.91697, + 6.82774, + 6.81873, + 6.90941, + 6.94075, + 6.89522, + 6.98502, + 6.59654, + 6.63277, + 6.94323, + 6.6785, + 6.80563, + 6.78144, + 6.95029, + 6.97322, + 6.71342, + 6.75433, + 6.77541, + 6.84547, + 6.80697, + 6.70396, + 6.65091, + 6.7526, + 6.61228, + 6.83516, + 6.80936, + 6.79944, + 6.85291, + 6.91914, + 6.53032, + 6.56537, + 6.62259, + 7.02059, + 6.47323, + 6.35438, + 6.50088, + 6.56089, + 6.59465, + 6.78021, + 6.69531, + 6.56238, + 6.56812, + 6.68091, + 6.59664, + 6.41566, + 6.5857, + 6.54195, + 6.58479, + 6.73615, + 6.4443, + 6.54865, + 6.55916, + 6.59845, + 6.43595, + 6.45401, + 6.18586, + 6.49294, + 6.68185, + 6.60608, + 6.559, + 6.19033, + 6.4009, + 6.40274, + 6.57056, + 6.53271, + 6.49194, + 6.36749, + 6.64527, + 6.49944, + 6.45025, + 6.51408, + 6.25955, + 6.63222, + 6.18585, + 6.30021, + 6.26754, + 6.42376, + 6.38336, + 6.3996, + 6.20304, + 6.6971, + 6.28159, + 6.19231, + 6.44574, + 6.78283, + 6.57514, + 6.3222, + 6.45288, + 6.43441, + 6.05597, + 6.55394, + 6.51277, + 6.42845, + 6.43754, + 6.41117, + 6.52694, + 6.04904, + 6.43141, + 6.31829, + 6.38719, + 6.48179, + 6.38679, + 6.15156, + 6.43417, + 6.37958, + 6.19399, + 6.3122, + 6.34221, + 6.27933, + 6.4711, + 6.1234, + 6.49485, + 6.71635, + 6.10516, + 6.17404, + 6.37549, + 6.01451, + 6.41138, + 6.31646, + 6.4248, + 6.21942, + 6.47332, + 6.33059, + 6.31427, + 6.18997, + 6.37343, + 6.50451, + 6.01189, + 6.18301, + 5.92232, + 6.4218, + 6.19402, + 6.44301, + 6.45792, + 6.29853, + 6.23516, + 6.09728, + 6.30322, + 6.54659, + 6.38562, + 6.38736, + 6.18747, + 6.31506, + 6.2397, + 6.39278, + 6.34112, + 6.27398, + 6.31134, + 5.96738, + 6.33133, + 6.10347, + 6.35765, + 6.37403, + 6.27959, + 6.36945, + 6.07987, + 6.23722, + 6.23969, + 6.20518, + 6.33283, + 5.91523, + 6.06771, + 5.8396, + 6.30586, + 6.43435, + 6.33055, + 6.23108, + 6.31522, + 6.14368, + 6.35712, + 6.0813, + 6.38602, + 6.19308, + 6.39707, + 6.26784, + 5.95543, + 6.39075, + 6.24059, + 6.15195, + 6.59246, + 6.23993, + 5.98167, + 6.08794, + 6.22457, + 6.24932, + 6.19731, + 6.41025, + 6.16779, + 6.14702, + 6.3142, + 6.1905, + 6.48519, + 6.22603, + 6.1012, + 6.07963, + 6.07777, + 6.09788, + 6.21642, + 6.06703, + 6.0736, + 6.34331, + 6.13042, + 5.97578, + 6.08952, + 6.01427, + 6.19113, + 6.36768, + 5.90277, + 6.26481, + 6.17568, + 6.30063, + 6.36281, + 6.04123, + 6.22493, + 5.89205, + 6.2712, + 6.22852, + 6.20738, + 6.42681, + 6.24806, + 6.34901, + 6.42603, + 6.21449, + 6.05921, + 6.16218, + 6.10802, + 6.17101, + 6.00663, + 6.3087, + 6.21621, + 6.23808, + 6.35984, + 6.10643, + 6.21751, + 6.32045, + 6.17364, + 6.32778, + 6.11195, + 6.24344, + 6.41059, + 6.17918, + 6.20837, + 6.11848, + 5.81564, + 6.31861, + 6.08424, + 6.29686, + 6.16169, + 6.14986, + 6.3447, + 6.05647, + 6.28571, + 6.42451, + 6.12725, + 5.88995, + 5.97151, + 6.13232, + 6.36328, + 6.32436, + 5.83657, + 6.19237, + 6.13804, + 6.17165, + 6.05564, + 6.05336, + 6.3311, + 6.20131, + 6.25644, + 6.26059, + 6.15301, + 6.09441, + 5.96695, + 6.23876, + 6.40664, + 6.16058, + 6.07392, + 6.34433, + 6.14116, + 6.25574, + 5.85199, + 6.21815, + 6.39583, + 5.99999, + 6.14387, + 6.15051, + 6.25526, + 5.85115, + 6.07627, + 6.00124, + 5.96682, + 5.99723, + 6.23724, + 6.24784, + 6.05465, + 5.94052, + 6.0319, + 6.15907, + 6.35365, + 6.23999, + 6.02366, + 6.17868, + 6.27531, + 6.10036, + 5.99662, + 6.19096, + 5.98736, + 6.06427, + 5.85432, + 6.03222, + 6.06351, + 6.27157, + 6.08552, + 6.09093, + 5.99386, + 6.25373, + 6.0298, + 6.18881, + 5.93073, + 5.90092, + 6.22774, + 6.02014, + 6.18113, + 5.87635, + 5.76267, + 6.19385, + 6.0271, + 5.80885, + 6.11822, + 6.41123, + 6.15246, + 6.12562, + 6.11515, + 6.11178, + 6.14833, + 6.13696, + 6.0483, + 5.90552, + 5.821, + 6.26382, + 6.03231, + 6.146, + 6.11886, + 6.10893, + 6.16299, + 6.09743, + 6.12602, + 6.03215, + 6.02295, + 6.25967, + 6.1337, + 6.30705, + 6.45111, + 6.05164, + 5.92855, + 6.07976, + 6.18155, + 6.15608, + 6.1541, + 5.93571, + 6.14067, + 5.7221, + 6.23682, + 5.95431, + 5.82749, + 5.807, + 5.95881, + 6.39691, + 5.91315, + 5.96697, + 6.18937, + 6.20403, + 6.25608, + 5.85749, + 6.0781, + 5.90695, + 6.18268, + 6.02446, + 6.15587, + 6.27412, + 5.99697, + 6.08953, + 6.23896, + 6.22791, + 6.08966, + 6.05174, + 6.03454, + 6.02379, + 6.02549, + 6.12694, + 6.15147, + 6.13949, + 5.96208, + 6.039, + 5.93912, + 5.74178, + 6.00726, + 6.05676, + 6.07005, + 5.78401, + 6.18148, + 5.99094, + 6.05439, + 6.0011, + 5.94535, + 5.65689, + 5.90724, + 6.01688, + 5.86744, + 5.84958, + 5.83715, + 5.61111, + 5.93448, + 6.15726, + 6.02414, + 5.76973, + 6.29326, + 6.11649, + 5.83082, + 6.14223, + 6.00111, + 5.98988, + 6.43447, + 5.73371, + 5.91641, + 6.36336, + 6.16274, + 6.28, + 6.09012, + 5.8942, + 6.12913, + 6.01726, + 5.95304, + 5.94608, + 6.09611, + 6.04629, + 6.02524, + 6.10135, + 6.25692, + 5.93219, + 6.05535, + 6.08078, + 6.25733, + 6.10818, + 6.03638, + 6.22702, + 5.81009, + 6.10102, + 5.98953, + 5.84714, + 6.18397, + 6.06079, + 6.2054, + 6.05417, + 5.92869, + 5.84022, + 6.15406, + 5.96206, + 6.06074, + 6.07171, + 5.90473, + 6.0514, + 5.96242, + 6.06422, + 6.14824, + 6.09494, + 5.77827, + 6.3064, + 6.00993, + 6.2371, + 6.02496, + 5.84215, + 6.02974, + 6.14715, + 5.93831, + 6.37739, + 6.13046, + 5.94359, + 6.18319, + 5.93852, + 5.95794, + 5.85023, + 6.19997, + 5.99258, + 6.10812, + 5.94916, + 6.18755, + 5.96491, + 5.8899, + 6.17812, + 5.96364, + 6.10578, + 6.11038, + 5.97466, + 6.00693, + 5.98535, + 6.18803, + 5.96577, + 6.0219, + 6.0942, + 6.10419, + 6.13657, + 6.06244, + 5.87461, + 6.19408, + 6.12413, + 5.77577, + 6.08653, + 5.96586, + 6.06471, + 6.07338, + 5.84106, + 5.98622, + 5.97016, + 6.02866, + 6.01132, + 5.88509, + 6.00115, + 6.14698, + 6.02431, + 6.03975, + 6.0098, + 6.01558, + 6.1797, + 6.20138, + 5.95864, + 5.96013, + 6.04125, + 5.87593, + 5.80975, + 6.17579, + 6.17304, + 5.78979, + 6.25387, + 5.93408, + 5.93671, + 6.30197, + 6.12889, + 5.90932, + 6.11098, + 6.04489, + 6.05513, + 5.9135, + 6.06193, + 6.10079, + 6.10188, + 5.85069, + 5.8413, + 5.89402, + 6.26349, + 6.04118, + 6.08565, + 6.065, + 6.13269, + 6.11291, + 5.86254, + 6.10467, + 6.05387, + 5.94895, + 6.1818, + 6.05343, + 6.02384, + 5.9609, + 6.21701, + 6.09864, + 5.79897, + 6.20999, + 6.12097, + 5.83995, + 5.78299, + 6.20008, + 6.16731, + 6.10642, + 6.32568, + 6.13099, + 5.8644, + 6.14147, + 5.7461, + 5.63084, + 5.82654, + 6.26232, + 6.0985, + 5.92978, + 6.10104, + 6.12813, + 6.23907, + 5.88807, + 6.34628, + 6.06435, + 6.05448, + 6.07128, + 5.93676, + 6.03108, + 5.89012, + 6.1816, + 6.09598, + 6.12548, + 5.88057, + 5.87118, + 5.81435, + 6.09769, + 6.01679, + 5.93883, + 6.0273, + 6.0164, + 5.89597, + 6.17274, + 5.73088, + 6.28675, + 5.98412, + 6.21755, + 5.74064, + 6.06264, + 6.2111, + 6.18387, + 5.83547, + 5.99602, + 5.98562, + 5.92462, + 5.90849, + 6.06777, + 5.9088, + 6.0204, + 5.6665, + 5.80911, + 5.96813, + 6.23178, + 5.82357, + 6.05969, + 5.84712, + 6.04017, + 5.96287, + 5.90165, + 5.79747, + 5.91486, + 5.91607, + 6.02435, + 5.98636, + 5.86205, + 6.17819, + 5.63541, + 5.73696, + 6.11451, + 5.97651, + 6.07753, + 6.06145, + 6.08863, + 6.29546, + 6.02292, + 6.03794, + 5.85776, + 5.79737, + 6.06528, + 5.74563, + 6.05699, + 6.12658, + 5.92117, + 6.13579, + 5.54065, + 5.76269, + 5.87993, + 5.91242, + 6.03735, + 5.92272, + 6.09372, + 5.8169, + 5.86553, + 5.86954, + 5.76153, + 6.09647, + 5.73825, + 6.23511, + 6.06764, + 5.71329, + 6.21079, + 5.9418, + 6.12618, + 5.80646, + 6.14399, + 6.17109, + 5.9638, + 6.07147, + 5.87998, + 5.98958, + 6.10486, + 5.94009, + 5.98863, + 6.06121, + 6.25642, + 6.01759, + 5.86526, + 5.74566, + 6.16195, + 6.10693, + 6.05532, + 6.02885, + 5.78566, + 5.87564, + 5.83874, + 5.62324, + 5.81889, + 6.08758, + 5.88765, + 5.81942, + 6.04841, + 5.99598, + 5.95132, + 6.08819, + 6.26621, + 6.02789, + 5.84812, + 5.90048, + 5.7218, + 5.95754, + 6.01512, + 5.79566, + 5.89034, + 5.86056, + 5.9712, + 5.89064, + 5.73494, + 5.98824, + 6.00045, + 6.00537, + 5.99502, + 6.06507, + 5.84488, + 6.03438, + 5.71394, + 5.86569, + 5.91636, + 5.81769, + 5.67685, + 6.03505, + 5.49676, + 6.02789, + 5.90114, + 5.69273, + 6.04561, + 5.8742, + 6.11631, + 5.70595, + 6.10092, + 6.03107, + 6.12552, + 6.08357, + 5.87592, + 5.95572, + 6.14525, + 5.91104, + 6.02733, + 6.1637, + 6.03623, + 6.00631, + 5.81493, + 5.77306, + 5.90989, + 5.86642, + 5.92262, + 5.83316, + 6.01167, + 5.9438, + 6.0537, + 5.95341, + 6.09256, + 5.74826, + 5.76917, + 6.02621, + 6.03644, + 6.0784, + 5.95486, + 5.87948, + 6.03272, + 5.94087, + 6.08934, + 6.09997, + 5.9177, + 5.77976, + 5.89886, + 5.7164, + 6.01999, + 5.98272, + 5.78219, + 5.80691, + 5.85284, + 5.84277, + 5.95625, + 5.81189, + 6.05099, + 6.06015, + 5.75557, + 5.97108, + 5.81367, + 6.09467, + 5.96639, + 5.76024, + 5.9028, + 5.77803, + 6.05656, + 5.85214, + 6.00212, + 6.04935, + 5.72926, + 5.8153, + 5.91811, + 5.9014, + 5.56556, + 5.83749, + 5.76485, + 5.87879, + 5.93373, + 6.06735, + 6.03101, + 6.09616, + 6.04688, + 5.92916, + 5.86993, + 5.7176, + 5.86549, + 5.95245, + 5.69993, + 5.93455, + 5.69702, + 5.88953, + 5.94726, + 5.88734, + 5.93859, + 5.82601, + 5.9819, + 5.98518, + 5.84135, + 5.82831, + 6.04323, + 5.98497, + 6.02173, + 5.84704, + 5.83521, + 6.01448, + 5.87788, + 6.06302, + 6.01489, + 5.86304, + 6.17774, + 5.78696, + 5.86811, + 5.91998, + 5.71957, + 6.04416, + 6.02449, + 5.8539, + 5.88979, + 5.93267, + 5.87023, + 5.9243, + 5.92837, + 5.68343, + 5.85726, + 5.87625, + 5.99757, + 5.86586, + 6.01434, + 6.05585, + 5.79117, + 5.69103, + 5.76513, + 6.1054, + 5.90205, + 5.71626, + 5.72425, + 5.96747, + 5.78541, + 5.7318, + 5.9825, + 6.06086, + 5.85327, + 6.05739, + 5.90233, + 5.9151, + 5.70958, + 6.20464, + 5.88365, + 5.74122, + 5.77504, + 5.91744, + 6.03886, + 6.01076, + 5.96969, + 5.92302, + 6.06975, + 5.91473, + 5.95218, + 5.83588, + 5.58634, + 5.84976, + 6.1213, + 6.15442, + 5.85942, + 5.94779, + 5.99031, + 6.00633, + 5.95967, + 5.89928, + 6.01925, + 5.88478, + 5.94224, + 5.91401, + 5.82956, + 5.82824, + 5.83868, + 5.83117, + 5.87794, + 6.0331, + 5.89646, + 6.05464, + 5.86751, + 5.77017, + 5.81422, + 5.77389, + 5.86271, + 5.84156, + 6.12881, + 5.7815, + 6.00807, + 6.09046, + 5.9379, + 5.88377, + 5.94251, + 5.91166, + 5.92921, + 5.89292, + 5.96918, + 5.55188, + 5.76032, + 5.67902, + 5.84015, + 5.73224, + 5.94588, + 5.43833, + 5.84906, + 5.84235, + 5.77496, + 6.00021, + 5.77369, + 5.69096, + 6.11037, + 5.8926, + 5.69087, + 5.73564, + 5.9196, + 6.02277, + 6.0821, + 5.73689, + 6.06767, + 5.68134, + 5.88726, + 5.76632, + 5.94122, + 5.85097, + 6.06624, + 5.78789, + 6.12634, + 5.7086, + 5.74157, + 6.00467, + 6.06798, + 6.25098, + 5.84732, + 5.81206, + 5.87449, + 5.93454, + 5.5304, + 6.02019, + 6.01734, + 5.86044, + 5.99006, + 6.12051, + 5.89547, + 6.08783, + 5.98881, + 5.50672, + 5.65035, + 6.05277, + 5.79633, + 5.7667, + 5.80437, + 5.93654, + 6.02751, + 5.76962, + 5.88305, + 5.69771, + 5.90861, + 6.096, + 6.10885, + 6.02175, + 5.87293, + 5.85626, + 5.74448, + 5.88746, + 5.76223, + 5.97301, + 5.95833, + 6.07221, + 5.56389, + 5.74472, + 5.82477, + 5.9365, + 5.73817, + 5.49313, + 5.78058, + 5.9239, + 5.96589, + 6.12467, + 5.89207, + 5.79991, + 5.70344, + 5.95456, + 6.17915, + 6.17869, + 5.74695, + 5.91135, + 6.03182, + 5.90523, + 5.99983, + 5.67873, + 5.68088, + 6.01449, + 5.85001, + 6.18222, + 5.80411, + 5.80382, + 5.84815, + 5.96831, + 5.90235, + 6.03294, + 6.05113, + 6.14595, + 5.80833, + 5.96028, + 5.65118, + 5.85271, + 5.8623, + 6.07333, + 5.6907, + 5.91971, + 6.02173, + 5.96661, + 6.09506, + 5.72175, + 5.96678, + 5.88797, + 5.92198, + 5.49269, + 5.88569, + 5.96455, + 6.01671, + 5.70527, + 5.75155, + 5.78047, + 5.84001, + 5.86736, + 5.84501, + 5.83254, + 5.93259, + 6.02108, + 5.94471, + 6.12619, + 6.04959, + 5.78407, + 5.66789, + 6.11476, + 5.87561, + 5.91178, + 5.73906, + 5.93146, + 5.98557, + 6.09548, + 5.74059, + 5.98117, + 5.91247, + 5.93101, + 5.84936, + 5.69119, + 5.86238, + 5.89403, + 5.67395, + 5.88732, + 5.84461, + 5.67952, + 5.81781, + 5.80892, + 5.73643, + 5.94271, + 5.99453, + 5.71643, + 5.78788, + 5.97038, + 6.035, + 5.83654, + 5.91245, + 5.82831, + 5.43351, + 6.11724, + 5.63003, + 5.76819, + 5.73018, + 5.82327, + 5.93817, + 5.7622, + 6.00721, + 5.84835, + 5.82843, + 6.06111, + 6.00835, + 5.71861, + 5.86418, + 5.87246, + 5.8283, + 5.84512, + 5.7291, + 5.85626, + 6.00548, + 5.68508, + 5.72271, + 5.95573, + 5.91411, + 5.77567, + 5.97971, + 6.01619, + 5.94789, + 6.04235, + 5.92623, + 5.82736, + 6.03855, + 5.80717, + 5.82134, + 5.86947, + 5.94254, + 6.10217, + 5.87591, + 5.65855, + 5.91821, + 6.13018, + 5.63911, + 5.79941, + 5.77977, + 5.74167, + 5.79741, + 5.80638, + 5.86412, + 5.74558, + 5.8795, + 5.84981, + 5.94432, + 5.55934, + 5.92196, + 5.76573, + 6.16785, + 5.87734, + 5.60914, + 5.82916, + 5.85576, + 5.93431, + 6.04834, + 6.01633, + 5.94011, + 5.93521, + 5.79534, + 5.79225, + 5.68445, + 5.64982, + 5.79235, + 5.98056, + 6.054, + 5.91754, + 6.05105, + 5.73838, + 5.719, + 5.77888, + 5.72269, + 5.9901, + 5.91495, + 5.871, + 6.04414, + 6.01798, + 5.87393, + 6.15308, + 5.89919, + 6.2463, + 5.85094, + 5.99511, + 5.71773, + 5.97943, + 5.92089, + 5.92193, + 6.20199, + 5.87681, + 6.05154, + 5.99758, + 5.89011, + 5.57193, + 6.02664, + 5.99426, + 5.73991, + 5.92144, + 5.58033, + 5.80556, + 5.9772, + 5.80375, + 5.63945, + 5.75142, + 5.55072, + 5.53673, + 5.84958, + 5.61298, + 5.90347, + 5.75528, + 5.93477, + 5.62974, + 5.76581, + 5.81259, + 5.86702, + 6.07998, + 5.80322, + 5.91904, + 5.69643, + 5.91703, + 5.92627, + 5.6317, + 5.94898, + 5.30188, + 5.97203, + 5.75757, + 5.97019, + 5.97553, + 5.75687, + 5.93316, + 5.76571, + 5.73225, + 6.0253, + 5.80417, + 5.707, + 5.93621, + 5.69593, + 5.76353, + 6.03185, + 5.97027, + 5.82503, + 6.04874, + 5.74024, + 5.67189, + 5.91949, + 5.64414, + 5.86914, + 5.83681, + 5.91871, + 5.73788, + 5.85618, + 5.82104, + 5.99048, + 5.85878, + 5.94137, + 5.83757, + 5.91765, + 5.81586, + 5.92403, + 5.87708, + 5.77047, + 5.86524, + 6.15844, + 5.9869, + 5.97434, + 5.92558, + 5.7892, + 5.84703, + 5.88695, + 5.68735, + 5.86599, + 5.75874, + 5.81679, + 5.79944, + 5.73223, + 5.81132, + 5.79908, + 5.8077, + 5.95727, + 5.83627, + 5.91199, + 5.6967, + 6.04695, + 5.94184, + 5.73485, + 5.72855, + 5.81908, + 5.73976, + 5.92564, + 5.77489, + 5.95665, + 5.52984, + 5.70867, + 5.73005, + 5.98513, + 6.05166, + 5.94071, + 5.97337, + 5.86712, + 5.61517, + 5.77487, + 6.05967, + 6.02391, + 5.73958, + 5.7498, + 5.85126, + 6.03855, + 5.92835, + 5.88963, + 5.772, + 5.85759, + 5.60436, + 5.92853, + 5.78997, + 5.59679, + 5.9911, + 5.71415, + 5.93715, + 6.13991, + 5.5862, + 5.8774, + 6.11598, + 5.80606, + 5.62792, + 5.78293, + 5.90434, + 5.94513, + 5.69461, + 5.94406, + 5.8935, + 5.73361, + 5.79636, + 6.03205, + 5.90509, + 5.58558, + 6.01558, + 5.88857, + 5.77436, + 5.94823, + 5.85871, + 6.0355, + 5.75707, + 5.79768, + 5.67636, + 5.7253, + 5.88153, + 5.92901, + 5.39763, + 5.92955, + 5.68024, + 5.92206, + 5.83913, + 5.80502, + 5.76125, + 6.06211, + 5.86988, + 5.93483, + 5.8253, + 5.81727, + 5.95184, + 5.95516, + 5.85508, + 6.00283, + 5.82047, + 5.81943, + 5.86427, + 5.87532, + 5.8348, + 5.8545, + 5.93766, + 5.378, + 5.73824, + 5.74601, + 5.85273, + 5.82394, + 5.57251, + 5.82922, + 5.69758, + 5.99377, + 5.8443, + 5.91771, + 5.78867, + 5.65071, + 5.8881, + 5.75031, + 5.94389, + 5.89038, + 5.81134, + 5.96824, + 5.61951, + 5.75301, + 5.63601, + 5.72601, + 5.82447, + 6.01421, + 5.79561, + 5.80435, + 5.88217, + 5.88077, + 5.88073, + 5.61679, + 5.54178, + 5.87395, + 5.84007, + 5.82206, + 5.97586, + 5.72593, + 5.89843, + 5.9867, + 5.49935, + 5.68226, + 5.90707, + 5.82196, + 5.80617, + 6.01033, + 5.78375, + 5.69943, + 5.62976, + 5.81089, + 5.73651, + 5.97377, + 6.04683, + 5.70847, + 5.62338, + 5.93473, + 5.68378, + 5.87929, + 6.07437, + 5.58913, + 5.5587, + 5.95788, + 5.80927, + 5.81975, + 5.84129, + 5.93355, + 5.83822, + 5.56277, + 5.80884, + 5.71109, + 6.06421, + 5.53857, + 5.90978, + 5.97326, + 5.77918, + 5.81896, + 5.81587, + 5.50322, + 5.79004, + 5.68049, + 5.50592, + 5.59198, + 5.93173, + 5.59016, + 5.67392, + 5.79619, + 5.87002, + 6.03378, + 6.0934, + 5.5528, + 5.80135, + 5.63105, + 5.938, + 5.82999, + 6.01797, + 5.69501, + 5.61144, + 5.89177, + 6.08708, + 5.82596, + 5.49735, + 5.74006, + 5.99862, + 5.74806, + 6.1095, + 5.66165, + 5.71547, + 5.6484, + 5.78283, + 5.5931, + 5.9062, + 5.67977, + 5.31654, + 5.57789, + 5.78487, + 6.00066, + 5.73366, + 5.61612, + 5.97542, + 5.61031, + 5.81081, + 5.80517, + 6.00054, + 5.92824, + 5.56937, + 5.86793, + 5.64913, + 5.77547, + 5.62121, + 5.79237, + 5.76751, + 5.48263, + 6.12654, + 5.81921, + 5.55478, + 5.67251, + 5.85506, + 5.91582, + 5.85987, + 5.7451, + 5.6288, + 5.9358, + 5.77117, + 5.87969, + 5.68693, + 5.54155, + 5.46948, + 5.92449, + 5.69578, + 5.61774, + 5.91407, + 5.99281, + 5.7242, + 6.02733, + 5.83353, + 5.8941, + 5.90845, + 5.58274, + 5.90239, + 5.73442, + 5.76793, + 5.5455, + 5.80091, + 5.57495, + 5.93329, + 5.32212, + 5.69693, + 6.00364, + 5.84634, + 5.49144, + 5.70317, + 5.96304, + 5.75659, + 5.90796, + 5.46461, + 5.82196, + 5.70382, + 5.89507, + 5.85437, + 5.75404, + 5.7554, + 5.87031, + 5.59845, + 5.84484, + 5.4662, + 5.95048, + 5.6778, + 5.76869, + 5.6736, + 5.72082, + 5.72414, + 5.81206, + 5.56189, + 5.96838, + 5.90296, + 5.55599, + 5.86036, + 5.81815, + 5.87567, + 5.8659, + 5.83868, + 5.8297, + 5.96301, + 5.6167, + 5.71097, + 5.86768, + 5.60405, + 5.73223, + 5.84023, + 5.7564, + 5.8207, + 5.81478, + 5.46125, + 5.76515, + 5.87999, + 5.90936, + 5.83261, + 5.89529, + 5.76316, + 5.7638, + 5.47661, + 5.8634, + 5.61013, + 5.72378, + 5.75599, + 5.81251, + 6.0351, + 5.84867, + 5.87368, + 5.82237, + 5.70847, + 5.71423, + 5.95109, + 5.82724, + 5.78444, + 5.75695, + 5.69541, + 5.98377, + 5.54576, + 5.86877, + 5.81308, + 5.52578, + 5.47295, + 5.29252, + 5.73054, + 5.70435, + 5.89061, + 5.71961, + 6.18811, + 5.64285, + 5.75957, + 5.93835, + 5.52125, + 5.42426, + 5.75271, + 5.73761, + 5.98976, + 5.58229, + 5.7084, + 5.60565, + 5.64709, + 5.85746, + 5.99712, + 5.62785, + 5.70429, + 5.62972, + 5.649, + 5.68113, + 5.75792, + 5.70403, + 5.69472, + 5.66492, + 5.57693, + 5.65648, + 5.56991, + 5.88348, + 5.67161, + 5.73256, + 5.92812, + 5.56846, + 5.46481, + 5.80872, + 5.83126, + 5.7754, + 5.89272, + 5.54325, + 5.57892, + 5.71277, + 5.87338, + 5.70907, + 5.67721, + 5.51086, + 5.85753, + 5.76377, + 5.75087, + 5.90718, + 5.63706, + 5.8155, + 5.83352, + 5.8482, + 5.67357, + 5.63407, + 5.59035, + 5.71877, + 5.47683, + 5.74627, + 5.42606, + 5.73645, + 5.55478, + 5.95138, + 5.48409, + 5.54159, + 5.99212, + 5.52026, + 5.26822, + 5.64829, + 5.9037, + 5.55651, + 5.77397, + 5.64556, + 5.82035, + 5.73169, + 5.44745, + 5.65008, + 5.83118, + 5.82984, + 5.72634, + 5.64323, + 5.65479, + 5.74833, + 5.60132, + 5.47233, + 5.74113, + 5.63439, + 5.60235, + 5.44416, + 5.48049, + 5.58994, + 5.66653, + 5.66043, + 5.79726, + 5.70997, + 5.78961, + 5.62937, + 5.56678, + 5.80482, + 5.71759, + 5.78356, + 5.743, + 5.84223, + 5.42644, + 5.63196, + 5.80348, + 5.49088, + 5.826, + 5.52771, + 5.48095, + 5.35392, + 5.50077, + 5.3596, + 5.33064, + 5.86532, + 5.84238, + 5.57801, + 5.69746, + 5.74569, + 5.46517, + 5.50377, + 5.65439, + 5.63352, + 5.37607, + 5.5011, + 5.71651, + 5.90336, + 5.66397, + 5.73206, + 5.6508, + 5.52432, + 5.30448, + 5.81099, + 5.76475, + 5.56978, + 5.86827, + 5.51776, + 5.73968, + 5.59452, + 5.66373, + 5.55969, + 5.76577, + 5.91615, + 5.56708, + 5.74735, + 5.60566, + 5.35345, + 5.7854, + 5.76588, + 5.80156, + 5.74362, + 5.65695, + 5.73585, + 5.69036, + 5.57686, + 5.77655, + 5.62383, + 5.81772, + 5.75568, + 5.43952, + 5.6666, + 5.43186, + 5.65536, + 5.47906, + 5.63328, + 5.40467, + 5.66207, + 5.49452, + 5.43046, + 5.37363, + 5.54146, + 5.81395, + 5.52932, + 5.51237, + 5.3286, + 5.78025, + 5.81219, + 5.67441, + 5.64227, + 5.62336, + 5.60404, + 5.58174, + 5.59439, + 5.65366, + 5.39794, + 5.68567, + 5.40278, + 5.58909, + 5.71938, + 5.6502, + 5.617, + 5.77397, + 5.47779, + 5.56019, + 5.38541, + 5.32017, + 5.57065, + 5.85876, + 5.69156, + 5.61595, + 5.66446, + 5.82477, + 5.76422, + 5.74248, + 5.53179, + 5.42022, + 5.49126, + 5.5432, + 5.55075, + 5.6735, + 5.74431, + 5.73108, + 5.53347, + 5.47832, + 5.78369, + 5.63811, + 5.66957, + 5.58212, + 5.61234, + 5.56783, + 5.73898, + 5.17077, + 5.29027, + 5.28486, + 5.42042, + 5.65544, + 5.52742, + 5.69398, + 5.25064, + 5.29141, + 5.60403, + 5.51356, + 5.69282, + 5.60921, + 5.75197, + 5.39797, + 5.54715, + 5.59264, + 5.50544, + 5.74403, + 5.58659, + 5.73969, + 5.42799, + 5.71356, + 5.53956, + 5.2957, + 5.48232, + 5.49809, + 5.67207, + 5.50522, + 5.45096, + 5.39666, + 5.45412, + 5.62721, + 5.55272, + 5.73106, + 5.61996, + 5.36752, + 5.47768, + 5.84356, + 5.50586, + 5.50929, + 5.75589, + 5.81358, + 5.24376, + 5.3289, + 5.35628, + 5.39986, + 5.61486, + 5.6138, + 5.18214, + 5.51438, + 5.60589, + 5.44436, + 5.64708, + 5.50689, + 5.39556, + 5.76281, + 5.41118, + 5.57928, + 5.57219, + 5.49241, + 5.18128, + 5.47572, + 5.4267, + 5.60438, + 5.53136, + 5.57904, + 5.48748, + 5.59556, + 5.62021, + 5.33214, + 5.56346, + 5.31297, + 5.33727, + 5.14609, + 5.47305, + 5.69699, + 5.60172, + 5.52302, + 5.90634, + 5.52441, + 5.44089, + 5.40369, + 5.61849, + 5.30077, + 5.42964, + 5.69667, + 5.48485, + 5.5569, + 5.46049, + 5.452, + 5.45372, + 5.46275, + 5.07789, + 5.34791, + 5.48665, + 5.53812, + 5.26858, + 5.59704, + 5.53699, + 5.53245, + 5.29146, + 5.52025, + 5.42498, + 5.56623, + 5.33484, + 5.38538, + 5.43149, + 5.48089, + 5.45807, + 5.23074, + 5.44418, + 5.49082, + 5.56671, + 5.45221, + 5.83609, + 5.52985, + 5.26792, + 5.27749, + 5.58115, + 5.39591, + 5.63925, + 5.55577, + 5.65961, + 5.18139, + 5.6515, + 5.4231, + 5.33857, + 5.25229, + 5.27869, + 5.27201, + 5.45623, + 5.62906, + 5.29797, + 5.40776, + 5.35209, + 5.31923, + 5.66727, + 5.43877, + 5.33801, + 5.58614, + 5.46001, + 5.22625, + 5.46325, + 5.33833, + 5.40649, + 5.54292, + 5.6152, + 5.68297, + 5.39826, + 5.51364, + 5.49285, + 5.32128, + 5.52947, + 5.42864, + 5.54477, + 5.43745, + 5.29185, + 5.67558, + 5.54092, + 5.51634, + 5.42958, + 5.34685, + 5.34374, + 5.32932, + 5.47149, + 5.4214, + 5.55439, + 5.30149, + 5.43681, + 5.27134, + 5.43216, + 5.48044, + 5.53087, + 5.5032, + 5.55384, + 5.3391, + 5.49206, + 5.41623, + 5.52624, + 5.59869, + 5.22, + 5.3715, + 5.62166, + 5.45451, + 5.28584, + 5.50569, + 5.51017, + 5.4466, + 5.13754, + 5.44868, + 5.18499, + 5.46024, + 5.23826, + 5.42544, + 5.25092, + 5.55384, + 5.30178, + 5.28058, + 5.37146, + 5.59456, + 5.18002, + 5.27799, + 5.15724, + 5.31095, + 5.37193, + 5.54516, + 5.49711, + 5.24965, + 5.21013, + 5.57767, + 5.2507, + 5.4933, + 5.32102, + 5.10858, + 5.53542, + 5.36511, + 4.71173, + 5.51204, + 5.22079, + 5.33625, + 5.44288, + 5.18746, + 5.28881, + 5.27271, + 5.48616, + 5.37204, + 5.5184, + 5.06015, + 5.41652, + 5.35428, + 5.1541, + 5.34309, + 5.37151, + 5.46503, + 4.85724, + 5.26728, + 5.55824, + 5.2262, + 5.53201, + 5.45214, + 5.22074, + 5.42692, + 5.68887, + 5.35381, + 5.55141, + 5.3241, + 5.41281, + 5.11551, + 5.40312, + 5.21171, + 5.25316, + 5.3392, + 5.05048, + 5.35847, + 5.42669, + 5.56858, + 5.1747, + 5.46602, + 5.75666, + 5.32427, + 5.30176, + 5.63527, + 4.97713, + 5.26137, + 5.32693, + 5.2639, + 5.08794, + 5.18969, + 5.31055, + 5.20447, + 5.01636, + 5.15223, + 5.32107, + 5.77956, + 5.32862, + 5.38851, + 5.28772, + 5.30779, + 5.10187, + 5.23964, + 5.46528, + 5.14392, + 5.46838, + 5.45809, + 5.28989, + 5.51445, + 5.52868, + 5.02213, + 5.36721, + 5.40146, + 5.11598, + 5.40436, + 5.34648, + 5.21502, + 5.5097, + 5.34349, + 5.41626, + 5.42903, + 5.28654, + 5.19858, + 5.25407, + 5.22389, + 5.1878, + 5.52696, + 5.31761, + 5.32592, + 5.34449, + 5.30384, + 5.29588, + 5.06043, + 5.36704, + 5.38289, + 5.3147, + 5.12446, + 5.30151, + 5.23061, + 5.40578, + 5.32178, + 5.5677, + 5.2172, + 5.36517, + 5.04721, + 5.48196, + 5.11675, + 5.30977, + 5.35277, + 5.31389, + 5.03331, + 4.91443, + 5.16695, + 5.15749, + 5.25002, + 5.39032, + 5.41513, + 5.46878, + 5.10841, + 5.23591, + 5.13587, + 5.10942, + 5.34008, + 5.19869, + 5.43464, + 5.21271, + 5.24229, + 5.33876, + 5.10147, + 4.9879, + 5.15545, + 5.17442, + 5.36629, + 5.1683, + 5.31321, + 5.12776, + 5.20052, + 5.4809, + 5.41782, + 5.50602, + 5.32078, + 5.3394, + 5.33153, + 5.50257, + 5.38825, + 5.1136, + 5.27785, + 5.27292, + 5.19409, + 5.26564, + 5.33936, + 5.02114, + 5.26253, + 5.09193, + 5.23216, + 5.06008, + 4.86054, + 5.11267, + 5.59441, + 5.14097, + 5.23948, + 5.33491, + 5.43153, + 4.98945, + 5.17786, + 5.31712, + 5.34861, + 5.18015, + 5.31518, + 5.30742, + 5.39912, + 5.08969, + 5.17411, + 5.29569, + 5.24149, + 5.26019, + 5.32662, + 5.31137, + 5.4418, + 5.31443, + 5.66082, + 4.93711, + 4.87331, + 5.38169, + 4.92414, + 5.26322, + 5.24007, + 5.39664, + 5.10697, + 5.08402, + 5.11854, + 5.09357, + 5.09955, + 5.35863, + 5.27392, + 4.97619, + 5.308, + 5.17195, + 5.38842, + 5.35411, + 5.12821, + 5.11117, + 5.3141, + 5.05127, + 5.35491, + 5.28986, + 5.09619, + 5.28657, + 4.93423, + 5.07337, + 5.20424, + 5.19875, + 5.39102, + 5.53801, + 5.5996, + 5.30026, + 5.06866, + 5.21347, + 5.2345, + 5.34677, + 5.45026, + 5.23945, + 5.17821, + 5.2652, + 5.42398, + 5.11507, + 4.84804, + 5.06659, + 5.35822, + 5.35681, + 5.1749, + 4.89166, + 5.35909, + 5.16128, + 5.31103, + 5.40746, + 5.01967, + 5.07468, + 5.35477, + 4.92901, + 5.18326, + 5.30188, + 5.25777, + 5.06153, + 5.34074, + 5.01921, + 5.22785, + 5.33062, + 5.28423, + 5.35566, + 5.12203, + 4.87548, + 5.30273, + 5.26406, + 5.19015, + 5.25912, + 5.40361, + 5.04088, + 5.06439, + 5.21639, + 4.81718, + 5.26005, + 5.14982, + 5.10204, + 4.87488, + 5.26706, + 5.34184, + 5.03559, + 5.16921, + 5.09201, + 5.34235, + 5.04492, + 5.51481, + 5.21303, + 5.25327, + 5.29198, + 5.15068, + 5.19809, + 5.01813, + 5.21644, + 5.32524, + 5.32909, + 5.19627, + 5.13819, + 5.04436, + 5.27149, + 5.39707, + 5.32266, + 5.05586, + 5.28163, + 5.12252, + 5.09511, + 5.12202, + 5.25741, + 5.06226, + 5.10673, + 5.30161, + 5.64094, + 4.75382, + 4.94014, + 4.86893, + 5.11161, + 5.2992, + 5.05462, + 5.21631, + 5.25319, + 5.12557, + 5.09663, + 5.11625, + 5.25184, + 5.25183, + 5.12146, + 5.32237, + 5.27572, + 5.18663, + 5.44772, + 4.98199, + 5.13069, + 4.8904, + 5.26643, + 5.28753, + 5.16967, + 5.02555, + 5.06744, + 5.13618, + 5.60073, + 5.25329, + 5.23131, + 5.17239, + 5.2802, + 5.0492, + 5.2336, + 5.21103, + 5.0782, + 5.07578, + 5.27828, + 5.20161, + 5.17359, + 5.34911, + 5.56614, + 5.02903, + 5.27066, + 5.26847, + 5.12645, + 5.05682, + 5.31035, + 5.1279, + 5.35036, + 5.28608, + 4.98388, + 4.91951, + 4.97147, + 5.17543, + 5.42239, + 5.33696, + 5.32573, + 5.28952, + 4.99793, + 5.03698, + 5.05609, + 5.18092, + 5.25405, + 5.05309, + 4.98282, + 5.14047, + 4.95812, + 5.19651, + 5.36928, + 5.26988, + 5.11472, + 5.07285, + 5.19385, + 4.95, + 4.88092, + 5.08328, + 5.10312, + 5.03417, + 5.00403, + 5.36209, + 5.23387, + 5.15096, + 5.2094, + 5.09823, + 5.14726, + 5.34523, + 5.19852, + 5.32363, + 5.06802, + 5.06118, + 5.34192, + 5.39855, + 5.06357, + 5.08979, + 5.16987, + 5.08755, + 5.3038, + 4.78285, + 5.28166, + 5.44891, + 5.37895, + 5.18097, + 4.8459, + 4.96273, + 5.22204, + 5.29273, + 5.01692, + 5.10067, + 4.99983, + 5.18615, + 4.91466, + 5.07543, + 5.35625, + 5.23361, + 4.91442, + 5.27039, + 5.22696, + 5.03862, + 5.33039, + 5.19666, + 5.14329, + 5.15978, + 5.06526, + 5.07196, + 4.92824, + 5.21493, + 4.87279, + 5.11686, + 4.72383, + 4.76061, + 5.17244, + 5.19503, + 4.82076, + 5.07406, + 5.22216, + 5.22409, + 5.12517, + 5.14265, + 5.10973, + 4.92948, + 4.71399, + 5.05252, + 4.95447, + 5.04924, + 4.81134, + 5.02118, + 5.18932, + 5.31945, + 5.18727, + 5.02452, + 5.00977, + 5.20673, + 5.07912, + 4.84976, + 5.13559, + 4.9962, + 5.10494, + 5.01237, + 5.06375, + 5.17279, + 4.8862, + 5.21022, + 4.88218, + 5.1434, + 4.94841, + 5.06916, + 4.96878, + 5.11254, + 5.09921, + 4.94326, + 5.49375, + 5.10647, + 4.69007, + 5.31173, + 5.00468, + 5.2713, + 5.1166, + 5.01493, + 4.8162, + 5.24698, + 5.00906, + 5.19491, + 5.36891, + 5.31876, + 5.13686, + 5.06037, + 5.13931, + 5.10946, + 5.14347, + 5.18842, + 4.85183, + 5.12737, + 4.88633, + 5.05568, + 4.68849, + 4.81501, + 4.92576, + 4.84922, + 5.15192, + 4.82015, + 5.16202, + 5.22041, + 5.37737, + 5.07956, + 5.35763, + 5.00798, + 5.2017, + 4.9788, + 5.08903, + 5.1426, + 4.90204, + 5.15237, + 4.95937, + 4.93282, + 4.92471, + 5.26827, + 5.07379, + 5.06729, + 4.92603, + 5.11726, + 4.92719, + 5.12496, + 5.34107, + 4.99549, + 5.17694, + 4.82681, + 5.01582, + 4.84362, + 4.9221, + 5.04538, + 5.23487, + 5.05967, + 4.82045, + 5.01152, + 4.71046, + 5.18505, + 4.77454, + 5.06829, + 4.85174, + 4.98717, + 5.03624, + 5.16996, + 5.0774, + 5.21395, + 4.91876, + 4.93876, + 5.04977, + 4.9806, + 5.29482, + 4.96882, + 4.96496, + 4.66948, + 5.25628, + 4.98788, + 4.94659, + 5.03207, + 5.11041, + 5.14139, + 5.09407, + 5.05772, + 4.97315, + 5.13327, + 5.2315, + 5.07239, + 4.85819, + 5.01047, + 5.13299, + 5.21575, + 4.89224, + 4.9342, + 5.1189, + 4.84132, + 4.80748, + 5.21088, + 4.96589, + 4.97416, + 5.16597, + 5.25251, + 5.03592, + 4.83475, + 5.02735, + 4.93159, + 5.05248, + 5.17543, + 4.80193, + 5.1131, + 4.90378, + 4.85971, + 5.0546, + 5.04334, + 5.27759, + 4.92365, + 4.89075, + 5.16811, + 5.01965, + 5.06456, + 5.14603, + 5.16879, + 5.09529, + 5.10454, + 5.05635, + 4.53411, + 5.07558, + 4.82818, + 4.88269, + 4.7988, + 4.68321, + 4.74254, + 4.9743, + 4.62914, + 5.12113, + 4.73134, + 4.93406, + 4.90908, + 4.99734, + 5.01593, + 5.1358, + 5.01363, + 4.77115, + 5.01894, + 5.06754, + 4.73138, + 4.80455, + 5.09105, + 5.10281, + 4.95376, + 4.8858, + 5.02813, + 4.99256, + 4.96902, + 5.093, + 5.02664, + 5.29191, + 4.78074, + 4.87302, + 5.10413, + 4.66668, + 4.82994, + 4.92253, + 4.83069, + 5.08006, + 5.0081, + 4.87278, + 5.15447, + 5.10193, + 4.79101, + 4.97045, + 4.54486, + 5.10066, + 4.98344, + 5.0343, + 4.87791, + 5.21634, + 4.73051, + 5.03258, + 4.93226, + 5.17863, + 5.13533, + 4.82572, + 4.91473, + 4.76871, + 5.21024, + 4.89084, + 5.08113, + 4.84413, + 4.44255, + 4.9425, + 5.08367, + 4.7724, + 5.05834, + 4.74969, + 5.1975, + 4.87664, + 5.29003, + 4.5149, + 5.07023, + 4.96571, + 4.87528, + 4.77754, + 4.96962, + 4.91404, + 4.97801, + 4.92095, + 5.09617, + 5.15809, + 4.96239, + 5.00682, + 4.96028, + 5.09169, + 4.91383, + 4.88825, + 4.86715, + 4.83316, + 4.8298, + 4.82378, + 5.14118, + 4.78437, + 4.9359, + 5.27034, + 4.921, + 4.91902, + 4.98046, + 4.83012, + 4.94606, + 4.81653, + 5.1004, + 5.41017, + 5.14683, + 4.95879, + 4.87306, + 4.65655, + 4.78916, + 4.72125, + 4.54738, + 4.91692, + 5.18034, + 4.70348, + 4.90975, + 4.95122, + 5.06394, + 5.02376, + 5.05532, + 5.04508, + 4.59928, + 4.9365, + 5.16124, + 4.71402, + 5.05203, + 5.02425, + 5.06861, + 4.90856, + 4.8473, + 5.15348, + 4.82198, + 4.81148, + 4.87736, + 4.47952, + 4.99979, + 5.05571, + 5.06448, + 4.91699, + 4.94095, + 4.84269, + 5.12532, + 5.17372, + 5.08943, + 4.78796, + 4.73726, + 5.08513, + 4.76847, + 4.83308, + 4.69508, + 4.97773, + 5.24142, + 4.70306, + 4.76075, + 5.00465, + 4.93198, + 4.90839, + 4.96146, + 4.88986, + 5.06478, + 4.71712, + 4.8866, + 4.7257, + 5.14443, + 5.01238, + 4.94674, + 5.08232, + 5.06557, + 4.93642, + 4.93931, + 5.00897, + 5.02607, + 5.1895, + 4.62555, + 4.67647, + 4.78412, + 4.9345, + 5.00181, + 4.38944, + 4.78613, + 4.67168, + 4.94825, + 4.88356, + 4.73723, + 4.8337, + 4.84584, + 5.0559, + 4.76538, + 5.0068, + 4.84726, + 4.88129, + 5.17266, + 4.97863, + 4.83507, + 4.81127, + 4.91613, + 5.10594, + 4.85955, + 4.70434, + 5.156, + 4.58406, + 4.82188, + 4.90649, + 4.90668, + 4.77126, + 4.65307, + 4.79509, + 4.90096, + 4.84404, + 4.72258, + 4.96985, + 4.77938, + 4.74915, + 4.98339, + 4.84078, + 5.0713, + 4.95893, + 4.90614, + 4.82556, + 4.91752, + 4.66343, + 4.96711, + 4.68912, + 5.19357, + 4.92203, + 5.00221, + 4.69711, + 4.99184, + 4.9466, + 4.80699, + 5.0241, + 4.9194, + 4.6358, + 4.75728, + 4.63757, + 4.52199, + 4.778, + 4.85672, + 4.63766, + 4.65555, + 4.72331, + 5.00417, + 4.80136, + 4.5361, + 4.67642, + 4.61238, + 4.67066, + 4.82711, + 4.81724, + 5.03966, + 4.83222, + 5.04273, + 4.81673, + 4.75459, + 4.82335, + 4.79586, + 4.65742, + 4.74808, + 4.73714, + 4.77027, + 4.75121, + 4.93997, + 4.8925, + 4.39002, + 4.92446, + 4.96318, + 5.00597, + 4.83865, + 4.6797, + 4.84466, + 4.94055, + 4.88453, + 4.75694, + 4.91654, + 4.74394, + 4.81844, + 4.65404, + 4.94135, + 5.08495, + 4.86586, + 4.54448, + 4.94368, + 4.74296, + 4.9177, + 4.7828, + 4.89469, + 4.5575, + 4.85725, + 4.75316, + 4.4663, + 4.82665, + 4.93471, + 4.79203, + 4.69683, + 4.89445, + 4.54644, + 5.13239, + 4.78354, + 5.11798, + 4.71728, + 4.70348, + 4.82905, + 4.99073, + 4.99948, + 5.06421, + 4.74041, + 4.94062, + 4.7151, + 4.7583, + 4.88676, + 4.93765, + 4.54342, + 5.02781, + 4.88414, + 4.68454, + 4.72184, + 4.80538, + 4.74273, + 4.82498, + 5.03501, + 4.95931, + 4.98155, + 4.65003, + 4.94067, + 5.0547, + 5.03427, + 5.02286, + 4.81962, + 4.46941, + 4.555, + 4.71148, + 4.78092, + 5.02172, + 4.6691, + 4.97242, + 5.03252, + 4.7693, + 4.72714, + 4.74454, + 4.52712, + 4.87817, + 4.97618, + 4.82325, + 4.89448, + 4.7722, + 4.7574, + 4.94012, + 4.80216, + 4.70374, + 4.63951, + 4.71194, + 4.53908, + 4.69429, + 4.861, + 4.57406, + 4.83336, + 4.66998, + 4.69417, + 4.86433, + 4.86116, + 4.74981, + 4.59613, + 4.52309, + 4.81233, + 4.65262, + 4.82424, + 4.96584, + 5.13492, + 4.96271, + 4.74474, + 4.86967, + 4.89519, + 4.74874, + 4.93905, + 4.87187, + 4.79374, + 4.65773, + 4.46698, + 4.94658, + 5.01018, + 4.90586, + 4.79818, + 4.98402, + 4.71705, + 4.76742, + 4.79861, + 4.89004, + 4.97913, + 4.97592, + 4.62694, + 4.91304, + 4.98108, + 4.6234, + 4.7483, + 4.7996, + 4.81552, + 4.66072, + 4.86883, + 4.91147, + 4.73557, + 4.67527, + 4.96173, + 4.44699, + 4.95205, + 4.87557, + 4.89906, + 4.8322, + 4.92491, + 4.74044, + 4.64675, + 4.98908, + 4.77825, + 4.84855, + 4.53119, + 4.64729, + 4.80561, + 4.78764, + 5.17715, + 4.88161, + 4.96489, + 4.63451, + 4.96533, + 4.95231, + 4.48666, + 4.7945, + 4.65895, + 4.89201, + 4.68694, + 4.83585, + 4.76494, + 4.92638, + 4.75004, + 4.8721, + 4.62253, + 4.93577, + 4.49888, + 4.61243, + 4.92968, + 5.06833, + 4.84828, + 4.52167, + 4.83418, + 4.91635, + 4.43402, + 4.77372, + 4.75635, + 4.707, + 4.92021, + 4.50904, + 4.37403, + 4.76815, + 4.89243, + 4.95943, + 4.89886, + 4.78121, + 4.70513, + 4.72536, + 4.92538, + 4.59533, + 5.023, + 4.99462, + 4.78206, + 4.95085, + 4.68048, + 4.76939, + 4.87899, + 5.01258, + 4.76375, + 4.94918, + 4.81489, + 4.71644, + 4.47068, + 4.7182, + 5.00182, + 4.62038, + 4.93849, + 4.64511, + 4.89392, + 4.77172, + 4.65113, + 4.51912, + 4.76061, + 4.74293, + 4.74822, + 4.61258, + 4.95684, + 4.52337, + 4.94982, + 4.82506, + 4.65957, + 4.5881, + 4.76422, + 4.6201, + 4.70994, + 4.68428, + 4.61941, + 4.83295, + 4.36561, + 4.71132, + 4.8693, + 4.87761, + 4.76732, + 5.03105, + 4.72661, + 4.81114, + 4.71259, + 4.79226, + 4.47782, + 4.81517, + 4.86782, + 4.79763, + 4.79323, + 4.41935, + 4.50036, + 4.66148, + 4.61712, + 4.61785, + 4.57584, + 4.83758, + 4.73585, + 4.67555, + 4.77691, + 4.3531, + 4.78898, + 4.5717, + 4.72766, + 4.91778, + 4.86587, + 4.68556, + 4.62733, + 4.75051, + 4.69219, + 4.8262, + 4.76579, + 4.72255, + 5.0305, + 4.62665, + 4.87705, + 5.01315, + 4.95132, + 5.02254, + 4.79979, + 4.8721, + 4.63789, + 4.90881, + 4.5045, + 4.57007, + 4.58481, + 4.72475, + 4.58987, + 4.85788, + 4.7184, + 4.53701, + 4.6616, + 4.74751, + 4.55185, + 4.96845, + 4.80527, + 4.48706, + 4.64222, + 4.33111, + 4.34967, + 4.60991, + 4.82004, + 4.80822, + 4.75912, + 4.58271, + 4.76306, + 4.71321, + 4.65191, + 4.87146, + 4.75706, + 4.74148, + 4.68519, + 5.22143, + 4.82863, + 4.68958, + 4.53666, + 4.41878, + 4.8403, + 4.56877, + 4.61385, + 4.71419, + 4.68691, + 4.72142, + 4.40812, + 4.53968, + 4.83983, + 4.46803, + 4.88892, + 4.87992, + 4.64638, + 4.55693, + 4.91001, + 4.94812, + 4.62278, + 4.46418, + 5.13242, + 4.5809, + 4.8932, + 4.44557, + 4.93227, + 4.54996, + 4.90009, + 4.74107, + 4.88603, + 4.79131, + 4.84945, + 4.84955, + 4.69556, + 4.69301, + 4.59143, + 5.0594, + 4.70418, + 4.49565, + 4.95933, + 4.80063, + 4.69193, + 4.80112, + 4.99278, + 4.60273, + 4.60156, + 4.43148, + 4.66987, + 4.45753, + 4.72563, + 4.63314, + 4.35455, + 4.79335, + 4.78181, + 4.33556, + 4.69456, + 4.39282, + 4.88724, + 4.79315, + 4.80039, + 4.98918, + 4.88499, + 4.74577, + 4.28626, + 4.47457, + 4.75531, + 4.87661, + 4.81327, + 4.93896, + 4.63541, + 4.68472, + 4.80384, + 4.79265, + 4.39345, + 4.78201, + 4.59908, + 4.53096, + 4.56259, + 4.68667, + 4.73226, + 4.49424, + 4.51258, + 4.71925, + 4.29151, + 4.64394, + 4.6886, + 4.48675, + 4.60874, + 4.7459, + 4.59167, + 4.90537, + 4.86302, + 4.56329, + 4.5443, + 4.90112, + 4.74544, + 4.61742, + 4.64106, + 4.72808, + 4.61122, + 4.55426, + 4.52968, + 4.74333, + 4.70813, + 4.58609, + 4.77309, + 4.78556, + 4.74205, + 4.805, + 4.76053, + 4.72292, + 4.82051, + 4.61096, + 4.68862, + 4.98225, + 4.82846, + 4.88524, + 4.4182, + 4.6069, + 4.92732, + 4.52734, + 4.72748, + 4.19319, + 4.77101, + 4.87247, + 4.64524, + 4.53306, + 4.41046, + 4.71623, + 4.56602, + 4.68073, + 4.75376, + 4.62444, + 4.8382, + 4.54385, + 4.67121, + 4.69427, + 4.62846, + 4.68533, + 4.60622, + 4.78252, + 4.76775, + 4.87897, + 4.73587, + 4.83745, + 4.70528, + 4.89501, + 4.71472, + 4.61637, + 4.737, + 4.87617, + 4.90083, + 4.7506, + 4.5588, + 4.75967, + 4.85087, + 4.73015, + 4.81145, + 4.76526, + 4.63366, + 4.48227, + 4.69849, + 4.81696, + 4.88352, + 4.47812, + 4.82544, + 4.47752, + 4.56241, + 4.93227, + 4.604, + 4.9483, + 4.74325, + 4.53395, + 4.38275, + 4.59088, + 4.81957, + 4.86267, + 4.69082, + 4.6183, + 4.48508, + 4.47777, + 4.92044, + 4.41567, + 4.66611, + 4.50956, + 4.70706, + 4.46791, + 4.2489, + 4.79212, + 4.63609, + 4.66782, + 4.57674, + 4.52574, + 4.52076, + 4.68811, + 4.4077, + 4.59505, + 4.78101, + 4.82134, + 4.5967, + 4.5699, + 4.70792, + 4.45263, + 4.75155, + 4.59565, + 4.56182, + 4.541, + 4.848, + 4.98041, + 4.46207, + 4.52584, + 4.542, + 4.62486, + 4.84567, + 4.61011, + 4.54748, + 4.79613, + 4.52581, + 4.7345, + 4.4271, + 4.56367, + 4.69218, + 4.53595, + 4.6854, + 4.72463, + 4.48842, + 4.35671, + 4.61183, + 4.74, + 4.54254, + 4.84418, + 4.61797, + 4.38779, + 4.81359, + 4.56183, + 4.65887, + 4.46191, + 4.91723, + 4.39569, + 4.26122, + 4.56759, + 4.47002, + 4.43217, + 4.60467, + 4.65903, + 4.93846, + 4.72059, + 4.49106, + 4.55911, + 4.79906, + 4.57175, + 4.48215, + 5.01651, + 4.72988, + 4.45189, + 4.47739, + 4.56989, + 4.53543, + 4.79091, + 4.57685, + 4.78508, + 4.63958, + 4.30987, + 4.69767, + 4.50267, + 4.83635, + 4.65866, + 4.43906, + 4.40794, + 4.93722, + 4.42928, + 4.6151, + 4.76406, + 4.67267, + 4.35968, + 4.62109, + 4.70921, + 4.68381, + 4.82514, + 4.43462, + 4.78986, + 4.89696, + 4.63493, + 4.71161, + 4.63502, + 4.49747, + 4.38738, + 4.60161, + 4.63366, + 4.36558, + 4.94521, + 4.45435, + 4.42434, + 4.42549, + 4.66513, + 4.3614, + 4.87194, + 4.80276, + 4.57408, + 4.65278, + 4.478, + 4.67068, + 4.84789, + 4.7331, + 4.73461, + 4.45543, + 4.4324, + 4.56908, + 5.0239, + 4.40491, + 4.72816, + 4.74429, + 4.76328, + 4.47376, + 4.54905, + 4.52905, + 4.70333, + 4.66749, + 4.71595, + 4.84529, + 4.76991, + 4.66143, + 4.6457, + 4.66828, + 4.49731, + 4.47723, + 4.64761, + 4.76292, + 4.59988, + 4.4697, + 4.48628, + 4.72915, + 5.03539, + 4.6724, + 4.56098, + 4.55105, + 4.51542, + 4.35568, + 4.36428, + 4.62232, + 4.82502, + 4.59015, + 4.50845, + 4.71907, + 4.56084, + 4.42371, + 4.53453, + 4.5273, + 4.5586, + 4.79538, + 4.6946, + 4.72487, + 4.64867, + 4.44516, + 4.4869, + 4.5549, + 4.56073, + 4.64884, + 4.593, + 4.44246, + 4.44805, + 4.48248, + 4.66544, + 4.60929, + 4.50112, + 4.89481, + 4.73763, + 4.60314, + 4.57416, + 4.515, + 4.8013, + 4.44046, + 4.91568, + 4.36267, + 4.79157, + 4.46044, + 4.64113, + 4.74023, + 4.6115, + 4.44135, + 4.71949, + 4.42112, + 4.43986, + 4.54536, + 4.74759, + 4.5645, + 4.55679, + 4.74879, + 4.65864, + 4.59111, + 4.73591, + 4.69282, + 4.43475, + 4.66154, + 4.72677, + 4.67251, + 4.58189, + 4.65369, + 4.58673, + 4.40185, + 4.74522, + 4.49567, + 4.71353, + 4.56231, + 4.80139, + 4.58642, + 4.56526, + 4.54183, + 4.82074, + 4.54095, + 4.61208, + 4.43126, + 4.50204, + 4.48587, + 4.58407, + 4.75226, + 4.74894, + 4.47329, + 4.8106, + 4.41234, + 4.70224, + 4.57454, + 4.34152, + 4.50839, + 4.81964, + 4.52417, + 4.75229, + 4.64581, + 4.60497, + 4.56196, + 4.72701, + 4.61652, + 4.57347, + 4.52607, + 4.58864, + 4.43967, + 4.67806, + 4.6198, + 4.38904, + 4.53537, + 4.74797, + 4.67546, + 4.63032, + 4.60263, + 4.47735, + 4.85353, + 4.68097, + 4.55998, + 4.59091, + 4.28012, + 4.53379, + 4.63203, + 4.42094, + 4.72058, + 4.57502, + 4.53373, + 4.88208, + 4.47912, + 4.5987, + 4.76404, + 4.65396, + 4.52262, + 4.60806, + 4.53406, + 4.54706, + 4.27153, + 4.68066, + 4.6388, + 4.62344, + 4.34446, + 4.68423, + 4.28831, + 4.71138, + 4.56775, + 4.63956, + 4.49829, + 4.59388, + 4.53957, + 4.56707, + 4.48297, + 4.44764, + 4.6296, + 4.79919, + 4.46619, + 4.49137, + 4.3554, + 4.55926, + 4.59021, + 4.44268, + 4.60352, + 4.27378, + 4.56353, + 4.85971, + 4.80342, + 4.54588, + 4.56813, + 4.45779, + 4.4597, + 4.41689, + 4.63198, + 4.57405, + 4.45318, + 4.39915, + 4.63769, + 4.58178, + 4.79781, + 4.54699, + 4.5028, + 4.3809, + 4.25286, + 4.52546, + 4.58908, + 4.4455, + 4.68798, + 4.62052, + 4.8059, + 4.61084, + 4.72655, + 4.349, + 4.5331, + 4.2214, + 4.46107, + 4.79963, + 4.57864, + 4.75136, + 4.48273, + 4.4063, + 4.58783, + 4.59082, + 4.73156, + 4.54108, + 4.67216, + 4.40101, + 4.27656, + 4.65825, + 4.39989, + 4.68994, + 4.87981, + 4.6742, + 4.53359, + 4.71608, + 4.55351, + 4.64623, + 4.54775, + 4.37172, + 4.34842, + 4.47342, + 4.45296, + 4.54425, + 4.39586, + 4.54531, + 4.57998, + 4.61329, + 4.68849, + 4.49336, + 4.43721, + 4.46949, + 4.46216, + 4.57963, + 4.65987, + 4.3264, + 4.83465, + 4.2933, + 4.57975, + 4.62796, + 4.4096, + 4.63794, + 4.53411, + 4.61003, + 4.63975, + 4.64614, + 4.64884, + 4.57341, + 4.80396, + 4.37951, + 4.69415, + 4.58082, + 4.44623, + 4.55358, + 4.66278, + 4.53898, + 4.5471, + 4.84726, + 4.76963, + 4.93944, + 4.62704, + 4.57939, + 4.53964, + 4.44884, + 4.65882, + 4.75029, + 4.24253, + 4.42151, + 4.42955, + 4.67957, + 4.38614, + 4.61184, + 4.7456, + 4.37707, + 4.61539, + 4.69776, + 4.62103, + 4.34537, + 4.63357, + 4.78883, + 4.57809, + 4.28562, + 4.57732, + 4.4425, + 4.70698, + 4.49877, + 4.87636, + 4.40855, + 4.69371, + 4.61033, + 4.55689, + 4.65983, + 4.55797, + 4.27554, + 4.36855, + 4.59587, + 4.65479, + 4.47291, + 4.83287, + 4.51652, + 4.81102, + 4.34443, + 4.4466, + 4.50246, + 4.51167, + 4.65952, + 4.40659, + 4.59014, + 4.58451, + 4.26414, + 4.50805, + 4.62851, + 4.69117, + 4.61571, + 4.67024, + 4.90178, + 4.63149, + 4.61894, + 4.5956, + 4.55105, + 4.81719, + 4.44747, + 4.65896, + 4.81707, + 4.48081, + 4.58143, + 4.2798, + 4.25732, + 4.67628, + 4.32044, + 4.86509, + 4.56112, + 4.3144, + 4.51759, + 4.51046, + 4.66738, + 4.44102, + 4.29765, + 4.51393, + 4.70011, + 4.66309, + 4.40031, + 4.65412, + 4.59278, + 4.59517, + 4.20692, + 4.56527, + 4.59982, + 4.41203, + 4.39541, + 4.75475, + 4.64187, + 4.55217, + 4.52682, + 4.35298, + 4.48622, + 4.27236, + 4.64916, + 4.82192, + 4.66877, + 4.31221, + 4.81902, + 4.43135, + 4.44814, + 4.57743, + 4.52274, + 4.4689, + 4.62529, + 4.52039, + 4.70982, + 4.83861, + 4.48021, + 4.46196, + 4.35326, + 4.20743, + 4.50147, + 4.48667, + 4.43376, + 4.61605, + 4.42338, + 4.49059, + 4.67029, + 4.41904, + 4.6984, + 4.30837, + 4.31457, + 4.48496, + 4.43267, + 4.71633, + 4.37138, + 4.24058, + 4.52674, + 4.54254, + 4.39031, + 4.27544, + 4.71477, + 4.57593, + 4.58545, + 4.3442, + 4.37436, + 4.62931, + 4.36112, + 4.66586, + 4.8601, + 4.50136, + 4.26173, + 4.30726, + 4.69426 + ] + }, + "mem-allocated-bytes": { + "start_step": 0, + "end_step": 20000, + "step_interval": 5, + "values": [ + 146450944.0, + 146451456.0, + 146451456.0, + 225728000.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225203712.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225203712.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224679424.0, + 224679424.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225203712.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 225203712.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225203712.0, + 224679424.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225203712.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 225334784.0, + 225334784.0, + 224286208.0, + 225203712.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225203712.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224679424.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225203712.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224679424.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 225334784.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 225334784.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224679424.0, + 224810496.0, + 224286208.0, + 225334784.0, + 224679424.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 225334784.0, + 224810496.0, + 224810496.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224286208.0, + 224810496.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 200, + "step_interval": 5, + "values": [ + 0.91292, + 0.3432, + 0.34293, + 0.33763, + 0.34388, + 0.3393, + 0.35151, + 0.34797, + 0.34896, + 0.34251, + 0.34037, + 0.34118, + 0.34167, + 0.34039, + 0.34949, + 0.3385, + 0.34197, + 0.34513, + 0.33495, + 0.34333, + 0.33903, + 0.34152, + 0.33892, + 0.33816, + 0.33393, + 0.33258, + 0.33664, + 0.34074, + 0.33756, + 0.33902, + 0.33969, + 0.3437, + 0.33646, + 0.33934, + 0.33157, + 0.33564, + 0.33895, + 0.3388, + 0.33348, + 0.33456 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml index 5c92fbf7d..b9de9dc01 100644 --- a/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml @@ -3,52 +3,46 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: '1' NVTE_FLASH_ATTN: '0' NVTE_FUSED_ATTN: '0' - TEST_TYPE: 'release' - MODEL_ARGS: # Bert model args - --num-layers: 24 - --hidden-size: 1024 - --num-attention-heads: 16 - --seq-length: 512 - --max-position-embeddings: 512 - + --num-layers: 24 + --hidden-size: 1024 + --num-attention-heads: 16 + --seq-length: 512 + --max-position-embeddings: 512 # Training args - --micro-batch-size: 4 - --global-batch-size: 32 - --train-iters: 20000 - --weight-decay: 1e-2 - --clip-grad: 1.0 + --micro-batch-size: 4 + --global-batch-size: 32 + --train-iters: 20000 + --weight-decay: 1e-2 + --clip-grad: 1.0 --fp16: true --lr: 0.0001 - --lr-decay-style: linear - --min-lr: 1.0e-5 - --lr-warmup-fraction: .01 + --lr-decay-style: linear + --min-lr: 1.0e-5 + --lr-warmup-fraction: .01 --bert-no-binary-head: true - # Model parallel - --tensor-model-parallel-size: 8 - --pipeline-model-parallel-size: 8 - + --tensor-model-parallel-size: 8 + --pipeline-model-parallel-size: 8 # Data args --data-path: ${DATA_BLEND} - --vocab-file: ${DATA_PATH}/vocab.txt + --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 --data-cache-path: ${DATA_CACHE_PATH} - # EVAL_AND_LOGGING_ARGS --log-interval: 100 --save-interval: 2000 - --eval-interval: 1000 + --eval-interval: 1000 --save: ${CHECKPOINT_PATH} --load: ${CHECKPOINT_PATH} --eval-iters: 10 - --tensorboard-dir: ${TENSORBOARD_PATH} + --tensorboard-dir: ${TENSORBOARD_PATH} --log-timers-to-tensorboard: true --log-memory-to-tensorboard: true --log-num-zeros-in-grad: true --log-params-norm: true --log-validation-ppl-to-tensorboard: true --wandb-project: megatron-core-release-runs - --wandb-exp-name: ${WANDB_EXPERIMENT} \ No newline at end of file + --wandb-exp-name: ${WANDB_EXPERIMENT} diff --git a/tests/functional_tests/test_cases/common/ckpt_converter/__main__.py b/tests/functional_tests/test_cases/common/ckpt_converter/__main__.py new file mode 100644 index 000000000..ac5482bcc --- /dev/null +++ b/tests/functional_tests/test_cases/common/ckpt_converter/__main__.py @@ -0,0 +1,630 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import os +import shutil +import subprocess +import sys +import time +import types +import typing as T +from collections import namedtuple + +import numpy as np +import torch + +from megatron.core import parallel_state +from megatron.core.datasets.gpt_dataset import _get_ltor_masks_and_position_ids +from megatron.core.enums import ModelType +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.pipeline_parallel import get_forward_backward_func +from megatron.core.tensor_parallel.mappings import gather_from_tensor_model_parallel_region +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.utils import get_attr_wrapped_model +from megatron.training import get_args, get_tokenizer +from megatron.training.arguments import parse_args, validate_args +from megatron.training.checkpointing import load_checkpoint as _load_checkpoint +from megatron.training.checkpointing import save_checkpoint as _save_checkpoint +from megatron.training.global_vars import set_global_variables, unset_global_variables +from megatron.training.training import get_model +from pretrain_gpt import model_provider +from tests.unit_tests.test_utilities import Utils + +CHECKPOINTS_DIR = "/tmp/ckpt-converter-tests" +FORWARD_ITERS = 1 # *3 +SKIP_CONVERSION = False + + +class TempSharedDir: + """Context that makes & removes a directory to hold the checkpoints.""" + + def __enter__(self): + """Make checkpoint directory.""" + torch.distributed.barrier() + if torch.distributed.get_rank() == 0: + shutil.rmtree(CHECKPOINTS_DIR, ignore_errors=True) + os.mkdir(CHECKPOINTS_DIR) + torch.distributed.barrier() + + def __exit__(self, exc_type, exc_value, exc_tb): + """Remove checkpoint directory.""" + torch.distributed.barrier() + if torch.distributed.get_rank() == 0: + shutil.rmtree(CHECKPOINTS_DIR, ignore_errors=True) + torch.distributed.barrier() + + +_ModelParallelState = namedtuple("_ModelParallelState", "tp pp ep") + + +class ModelParallelState(_ModelParallelState): + """Parallel state struct, that contains TP, PP, and EP.""" + + def __new__(cls, tp=1, pp=1, ep=1): + return super(ModelParallelState, cls).__new__(cls, tp, pp, ep) + + +class ModelMeta: + """Basic information about a model. + + Args: + format (str): 'mcore', 'megatron', 'meta', or 'hf'. + mp (ModelParallelState): Defines TP, PP, EP. + transformer_impl (str): 'transformer_engine' or 'local'. + """ + + def __init__(self, format: str, mp: ModelParallelState, transformer_impl: str = None): + + if isinstance(mp, tuple): + mp = ModelParallelState(*mp) + if transformer_impl is None: + transformer_impl = "transformer_engine" if format == "mcore" else "local" + + assert format in ("mcore", "megatron", "meta", "hf") + assert isinstance(mp, ModelParallelState) + assert transformer_impl in ("transformer_engine", "local") + + self.format = format + self.mp = mp + self.transformer_impl = transformer_impl + + +class Pipeline: + """A pipeline manages a single conversion and validation. + + The pipeline consists of the following steps: + - Initialize model & inference pass. + - Save model. + - Convert model. + - Load model & inference pass. + - Validate before/after output tensors. + + Args: + src (ModelMeta): Model meta for loading. + dst (ModelMeta): Model meta for storing. + """ + + def __init__(self, src: ModelMeta, dst: ModelMeta): + """Source & destination metas.""" + assert isinstance(src, ModelMeta) + assert isinstance(dst, ModelMeta) + self.src = src + self.dst = dst + + def get_model_argv(self): + """Get argv list for customizing initialization.""" + raise NotImplementedError(self.__class__.__name__ + ".get_model_argv()") + + def get_converter_model_type(self): + """Get converter type: 'GPT' or 'Bert'.""" + raise NotImplementedError(self.__class__.__name__ + ".get_converter_model_type()") + + def get_meta(self, key): + """Get meta from key, which must be either 'src' or 'dst'.""" + assert key in ("src", "dst") + return getattr(self, f"{key}") + + def init_args_and_model(self, key): + """Initialize Megatron and build model.""" + + meta = self.get_meta(key) + + # Destroy & initialize new parallel state. + unset_global_variables() + Utils.destroy_model_parallel() + Utils.initialize_model_parallel( + tensor_model_parallel_size=meta.mp.tp, + pipeline_model_parallel_size=meta.mp.pp, + expert_model_parallel_size=meta.mp.ep, + ) + + # Environment vars. + os.environ["CUDA_DEVICE_MAX_CONNECTIONS"] = "1" + os.environ["NVTE_ALLOW_NONDETERMINISTIC_ALGO"] = "0" + + # Command line args. + sys.argv = [ + "[script]", + *self.get_model_argv(), + "--tensor-model-parallel-size", + str(meta.mp.tp), + "--pipeline-model-parallel-size", + str(meta.mp.pp), + "--expert-model-parallel-size", + str(meta.mp.ep), + "--save-interval", + "2", + "--save", + os.path.join(CHECKPOINTS_DIR, "src"), + "--load", + os.path.join(CHECKPOINTS_DIR, "dst" if not SKIP_CONVERSION else "src"), + "--ckpt-format", + "torch", + "--use-checkpoint-args", + "--no-save-optim", + "--no-save-rng", + "--no-load-optim", + "--no-load-rng", + "--bf16", + "--use-cpu-initialization", + "--no-one-logger", + "--transformer-impl", + meta.transformer_impl, + ] + + # Fail on missing checkpoint. + if key == "dst": + sys.argv.append("--exit-on-missing-checkpoint") + + # Use legacy. + if meta.format == "megatron": + sys.argv.append("--use-legacy-models") + + # Parse args. + args = parse_args() + validate_args(args) + + # Set global args, build tokenizer. + unset_global_variables() + set_global_variables(args) + + # Random seed. + torch.manual_seed(123) + model_parallel_cuda_manual_seed(123) + + # Model. + models = get_model( + model_provider_func=model_provider, model_type=ModelType.encoder_or_decoder + ) + [m.eval() for m in models] + + return args, models + + @classmethod + def is_model_parallel_rank_0(cls): + return ( + parallel_state.get_tensor_model_parallel_rank() == 0 + and parallel_state.get_pipeline_model_parallel_rank() == 0 + ) + + @classmethod + def get_input_ids(cls): + """Randomly initialize input token IDs.""" + if cls.is_model_parallel_rank_0(): + # Generate different data on each DP rank. + args = get_args() + + orig_numpy_seed = np.random.get_state()[1][0] + temp_numpy_seed = orig_numpy_seed + torch.distributed.get_rank() + + np.random.seed(temp_numpy_seed) + numpy_input_ids = np.random.randint( + low=0, high=args.vocab_size, size=(args.seq_length,), dtype=np.int64 + ) + np.random.seed(orig_numpy_seed) + + torch_input_ids = torch.from_numpy(numpy_input_ids).to("cuda") + + return torch_input_ids + else: + return None + + @classmethod + def _broadcast(cls, item): + """Broadcast data from TP rank 0 to other ranks.""" + if item is not None: + torch.distributed.broadcast( + item, + parallel_state.get_tensor_model_parallel_src_rank(), + group=parallel_state.get_tensor_model_parallel_group(), + ) + + @classmethod + def get_batch(cls, input_ids): + """Get batch of data, from input token IDs.""" + + args = get_args() + + # TP rank 0, PP rank 0. + # (Note: mimics megatron/training/utils.py:get_batch_on_this_tp_rank().) + if cls.is_model_parallel_rank_0(): + + tokenizer = get_tokenizer() + + attention_mask, loss_mask, position_ids = _get_ltor_masks_and_position_ids( + data=input_ids, + eod_token=tokenizer.eod, + reset_position_ids=args.reset_position_ids, + reset_attention_mask=args.reset_attention_mask, + eod_mask_loss=args.eod_mask_loss, + create_attention_mask=args.create_attention_mask_in_dataloader, + ) + input_ids = input_ids.unsqueeze(0) + position_ids = position_ids.unsqueeze(0) + attention_mask = attention_mask.unsqueeze(0) + + # Other TP ranks on PP rank 0. + elif parallel_state.is_pipeline_first_stage(): + input_ids = torch.empty( + (args.micro_batch_size, args.seq_length), + dtype=torch.int64, + device=torch.cuda.current_device(), + ) + position_ids = torch.empty( + (args.micro_batch_size, args.seq_length), + dtype=torch.int64, + device=torch.cuda.current_device(), + ) + if args.create_attention_mask_in_dataloader: + attention_mask = torch.empty( + (args.micro_batch_size, 1, args.seq_length, args.seq_length), + dtype=torch.bool, + device=torch.cuda.current_device(), + ) + else: + attention_mask = None + + # Other PP ranks. + # (Note: mimics pretrain_gpt.py:get_batch().) + else: + input_ids = None + position_ids = None + attention_mask = None + + # Broadcast. + if parallel_state.is_pipeline_first_stage(): + cls._broadcast(input_ids) + cls._broadcast(attention_mask) + cls._broadcast(position_ids) + + return input_ids, position_ids, attention_mask + + @classmethod + def forward_step(cls, orig_input_ids: T.Iterator, model: torch.nn.Module): + """Forward step. + + Args: + orig_input_ids (T.Iterator): Input token IDs. + model (GPTModel): The GPT Model. + """ + + # Unpack input ids. + orig_input_ids = list(orig_input_ids)[0] + + # Get batch. + input_ids, position_ids, attention_mask = cls.get_batch(orig_input_ids) + + # Forward pass test data (multi iters for JIT warm-up). + for _ in range(FORWARD_ITERS): + output_tensor = model(input_ids, position_ids, attention_mask) + + # Aggregate data, for validation. + data = { + "orig_input_ids": orig_input_ids, + "input_ids": input_ids, + "position_ids": position_ids, + "attention_mask": attention_mask, + "output_tensor": output_tensor, + } + + return output_tensor, lambda _, non_loss_data: data + + @classmethod + def forward_model(cls, models, orig_input_ids): + """Forward pass data, and gather parallel output tensors.""" + + args = get_args() + + # Forward pass. + forward_backward_func = get_forward_backward_func() + data = forward_backward_func( + forward_step_func=cls.forward_step, + data_iterator=iter([orig_input_ids]), + model=models, + num_microbatches=1, + seq_length=args.seq_length, + micro_batch_size=args.micro_batch_size, + forward_only=True, + collect_non_loss_data=True, + ) + if parallel_state.is_pipeline_last_stage(): + output_tensor = data[0]["output_tensor"] + else: + output_tensor = None + + # All-gather across the partitions. + if parallel_state.is_pipeline_last_stage(): + output_tensor_gathered = gather_from_tensor_model_parallel_region(output_tensor) + else: + output_tensor_gathered = None + + return output_tensor_gathered + + def rand_init_model_params(self, key, models): + """Randomly initialize model params.""" + + meta = self.get_meta(key) + + with torch.no_grad(): + + # Randomly initialize all params. + for m in models: + for p in m.parameters(): + p.normal_(0, 0.1) + + # Synchronize embeddings. + if meta.mp.pp != 1 and parallel_state.is_rank_in_embedding_group(): + if parallel_state.is_pipeline_first_stage(): + emb = models[0].module.module.shared_embedding_or_output_weight() + elif parallel_state.is_pipeline_last_stage(): + emb = models[-1].module.module.shared_embedding_or_output_weight() + else: + raise Exception("should be either first/last pipeline rank.") + torch.distributed.all_reduce(emb, group=parallel_state.get_embedding_group()) + + def save_checkpoint(self): + """Initialize params, forward pass data, and save checkpoint.""" + + args, models = self.init_args_and_model("src") + + # Init params. + self.rand_init_model_params("src", models) + + # Test data. + orig_input_ids = self.get_input_ids() + output_tensor = self.forward_model(models, orig_input_ids) + + # Save checkpoint. + _save_checkpoint( + iteration=2, + model=models, + optimizer=None, + opt_param_scheduler=None, + num_floating_point_operations_so_far=None, + ) + + return output_tensor, orig_input_ids + + def load_checkpoint(self, orig_input_ids): + """Load checkpoint, and forward pass data.""" + + args, models = self.init_args_and_model("dst") + + # Load checkpoint. + args.iteration, args.num_floating_point_operations_so_far = _load_checkpoint( + models, optimizer=None, opt_param_scheduler=None + ) + + # Test data. + output_tensor_real = self.forward_model(models, orig_input_ids) + + # Random output tensor. + # Note: need two random initializations to differ from `save_checkpoint()` above. + self.rand_init_model_params("dst", models) + self.rand_init_model_params("dst", models) + output_tensor_fake = self.forward_model(models, orig_input_ids) + + return output_tensor_real, output_tensor_fake + + def convert_checkpoint(self): + """Convert checkpoint""" + + args = get_args() + + torch.distributed.barrier() + + # Convert. + if torch.distributed.get_rank() == 0: + + cmd = [ + "python", + "tools/checkpoint/convert.py", + "--model-type", + self.get_converter_model_type(), + "--loader", + self.src.format, + "--load-dir", + args.save, + "--loader-transformer-impl", + self.src.transformer_impl, + "--saver", + self.dst.format, + "--save-dir", + args.load, + "--saver-transformer-impl", + self.dst.transformer_impl, + "--target-tensor-parallel-size", + str(self.dst.mp.tp), + "--target-pipeline-parallel-size", + str(self.dst.mp.pp), + "--megatron-path", + os.getcwd(), + ] + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print("convert checkpoint cmd: %s" % " ".join(cmd)) + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + + result = subprocess.run(cmd) + + assert result.returncode == 0, "checkpoint conversion failed." + + torch.distributed.barrier() + + def run(self): + """Run pipeline. + + Running a pipeline consists of: + + - Save checkpoint (includes initializing params & forward passing data). + - Convert checkpoint. + - Load checkpoint (includes forward passing data). + - Validate before/after output tensors. + """ + + Utils.initialize_model_parallel( + tensor_model_parallel_size=self.src.mp.tp, + pipeline_model_parallel_size=self.src.mp.pp, + expert_model_parallel_size=self.src.mp.ep, + ) + with TempSharedDir(): + + # Save checkpoint. + src_output_tensor, input_ids = self.save_checkpoint() + + # Convert checkpoint. + if not SKIP_CONVERSION: + self.convert_checkpoint() + + # Load checkpoint. + dst_output_tensor_real, dst_output_tensor_fake = self.load_checkpoint(input_ids) + + # Validate output tensor. + torch.distributed.barrier() + rank = torch.distributed.get_rank() + world_size = torch.distributed.get_world_size() + if rank == world_size - 1: + args = get_args() + get_mse = lambda dst_output_tensor: torch.nn.MSELoss()( + src_output_tensor[:, :, : args.vocab_size], + dst_output_tensor[:, :, : args.vocab_size], + ).item() + mse_real = get_mse(dst_output_tensor_real) + mse_fake = get_mse(dst_output_tensor_fake) + assert mse_real < 0.01 * mse_fake, "mse_real (%e) >= 0.01 mse_fake (%e)." % ( + mse_real, + mse_fake, + ) + torch.distributed.barrier() + + # Teardown. + unset_global_variables() + Utils.destroy_model_parallel() + + # Broadcast MSE's. + mses = torch.zeros((2,), dtype=torch.float, device="cuda") + if rank == world_size - 1: + mses[0] = mse_real + mses[1] = mse_fake + torch.distributed.broadcast(mses, world_size - 1) + + return mses.tolist() + + +class GPTPipeline(Pipeline): + """GPT-specific pipeline customizations. + + Args: + src (Union[ModelMeta, Tuple]): Model meta for loading. + dst (Union[ModelMeta, Tuple]): Model meta for storing. + num_moe_experts (Optional[int]): Number of MoE experts. + """ + + def __init__(self, src: ModelMeta, dst: ModelMeta, num_moe_experts: T.Optional[int] = None): + super().__init__(ModelMeta(*src), ModelMeta(*dst)) + assert isinstance(num_moe_experts, (int, types.NoneType)) + self.num_moe_experts = num_moe_experts + + def get_model_argv(self): + """GPT model args.""" + args = [ + "--num-layers", + "8", + "--hidden-size", + "16", + "--num-attention-heads", + "8", + "--seq-length", + "16", + "--max-position-embeddings", + "16", + "--micro-batch-size", + "1", # single sample generated. + "--tokenizer-type", + "NullTokenizer", + "--vocab-size", + "127", # ... NullTokenizer adds +1 EOD token. + "--make-vocab-size-divisible-by", + "1", + ] + if self.num_moe_experts is not None and self.num_moe_experts > 1: + args.extend(["--num-experts", str(self.num_moe_experts or 1), "--sequence-parallel"]) + return args + + def get_converter_model_type(self): + return "GPT" + + +def get_gpt_pipelines(): + """Get GPT (non-MoE) pipelines.""" + return [ + GPTPipeline(("mcore", (8, 1)), ("mcore", (1, 8))), + GPTPipeline(("mcore", (4, 2)), ("mcore", (2, 4))), + GPTPipeline(("mcore", (2, 4)), ("mcore", (4, 2))), + GPTPipeline(("mcore", (1, 8)), ("mcore", (8, 1))), + GPTPipeline(("mcore", (4, 2)), ("mcore", (2, 4), "local")), + GPTPipeline(("megatron", (4, 2)), ("mcore", (2, 4))), + GPTPipeline(("mcore", (4, 2), "local"), ("mcore", (2, 4), "local")), + GPTPipeline(("mcore", (4, 2), "local"), ("mcore", (2, 4))), + # [todo] GPTPipeline(("megatron", (4, 2)), ("megatron", (2, 4))), + # [todo] GPTPipeline(("megatron", (4, 2), "te"), ("megatron", (2, 4), "te")), + # [todo] GPTPipeline("meta", "mcore", None, (8, 1)), + # [todo] GPTPipeline("hf", "mcore", None, (8, 1)), + ] + + +def get_moe_pipelines(): + """Get MoE pipelines.""" + return [ + GPTPipeline(("mcore", (2, 1, 2)), ("mcore", (1, 4, 1)), num_moe_experts=8), + GPTPipeline(("mcore", (1, 4, 1)), ("mcore", (2, 1, 2)), num_moe_experts=4), + ] + + +def test_all_pipelines(): + """Run all pipelines.""" + + # Collect pipelines. + pipelines = [ + *get_gpt_pipelines(), + # [todo] *get_moe_pipelines(), # todo: MoE support in loader_mcore.py. + # [todo] *get_bert_pipelines(), + # [todo] *get_t5_pipelines(), + ] + + # Run pipelines. + results = [] + for pipeline in pipelines: + t = time.time() + mses = pipeline.run() + elapsed_time = time.time() - t + results.append((elapsed_time, *mses)) + + # Print results. + if int(os.environ["RANK"]) == 0: + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print("checkpoint converter results:") + [print(" t %.1f sec ... mse %.1e, %.1e." % (t, r, f)) for t, r, f in results] + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + + +if __name__ == "__main__": + test_all_pipelines() diff --git a/tests/functional_tests/test_cases/common/ckpt_converter/model_config.yaml b/tests/functional_tests/test_cases/common/ckpt_converter/model_config.yaml new file mode 100644 index 000000000..2ac5db114 --- /dev/null +++ b/tests/functional_tests/test_cases/common/ckpt_converter/model_config.yaml @@ -0,0 +1,7 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_tp2_pp4_vp3_seq_par_overlap_p2p_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_tp2_pp4_vp3_seq_par_overlap_p2p_dgx_a100_1N8G/model_config.yaml index 89c71f629..51dbdfd67 100644 --- a/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_tp2_pp4_vp3_seq_par_overlap_p2p_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_tp2_pp4_vp3_seq_par_overlap_p2p_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,6 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 SKIP_PYTEST: 1 - N_REPEATS: 1 MODEL_ARGS: trainer.num_nodes: 1 trainer.devices: 8 diff --git a/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs4_gbs64_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs4_gbs64_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml index d7e926e96..a48bfeae7 100644 --- a/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs4_gbs64_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs4_gbs64_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,6 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 SKIP_PYTEST: 1 - N_REPEATS: 1 MODEL_ARGS: trainer.num_nodes: 1 trainer.devices: 8 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.8.0.json b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.8.0.json index de1f0fc4c..4a06ff6cd 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.8.0.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.8.0.json @@ -1,7 +1,7 @@ -{ +{ "lm loss": { "start_step": 0, - "end_step": 2924, + "end_step": 51541, "step_interval": 5, "values": [ 12.98403, @@ -588,12 +588,9736 @@ 2.95374, 2.99872, 2.9698, - 2.94731 + 2.94731, + 3.10816, + 3.12097, + 3.08655, + 3.15784, + 3.11555, + 3.09052, + 3.03837, + 3.08217, + 3.03873, + 3.09892, + 3.09171, + 3.0746, + 3.06585, + 3.03454, + 3.05471, + 3.07809, + 3.03162, + 3.02148, + 2.98224, + 3.04664, + 3.03632, + 3.03243, + 3.0148, + 2.99808, + 2.99367, + 3.06154, + 3.05874, + 3.01815, + 3.06744, + 2.95133, + 3.02859, + 3.10656, + 3.07802, + 3.02324, + 2.99101, + 3.01708, + 3.04316, + 3.03839, + 3.02589, + 3.02411, + 3.00734, + 2.99448, + 3.02702, + 2.94795, + 3.03093, + 2.99878, + 3.03426, + 2.98039, + 3.04694, + 2.97525, + 3.01652, + 3.01372, + 3.01629, + 2.96429, + 2.97547, + 2.98977, + 3.02636, + 3.03177, + 2.95814, + 2.93316, + 2.99728, + 2.99372, + 2.94736, + 3.00283, + 3.02057, + 3.00827, + 2.95906, + 2.91765, + 3.08027, + 2.97515, + 2.91684, + 2.95951, + 2.96445, + 2.99524, + 2.94514, + 2.87396, + 2.93213, + 2.96313, + 2.91973, + 3.00013, + 2.95845, + 2.98779, + 2.9132, + 2.96419, + 2.95009, + 2.92511, + 2.91932, + 2.92232, + 2.97133, + 2.95495, + 2.95949, + 2.95494, + 3.03727, + 2.92669, + 2.87124, + 2.92029, + 2.93942, + 2.9403, + 2.96296, + 2.91824, + 2.98836, + 2.93321, + 2.91178, + 2.89979, + 2.88178, + 2.99162, + 2.92806, + 2.9062, + 2.8449, + 2.92693, + 2.91343, + 2.94516, + 2.89118, + 2.92818, + 2.9514, + 2.96482, + 2.96771, + 2.8881, + 2.86099, + 2.91092, + 2.90461, + 2.9018, + 2.87285, + 2.89507, + 2.88439, + 2.89062, + 2.9092, + 2.93522, + 2.88198, + 2.89242, + 2.87618, + 2.8501, + 2.92057, + 2.88039, + 2.88368, + 2.85898, + 2.92522, + 2.89569, + 2.89814, + 2.83774, + 2.90795, + 2.86884, + 2.89947, + 2.90676, + 2.84861, + 2.89672, + 2.83247, + 2.89059, + 2.87153, + 2.8738, + 2.91191, + 2.84214, + 2.88703, + 2.8881, + 2.89718, + 2.80979, + 2.87016, + 2.90995, + 2.89972, + 2.87293, + 2.89329, + 2.81138, + 2.82742, + 2.94097, + 2.87722, + 2.85292, + 2.84917, + 2.83313, + 2.7956, + 2.88486, + 2.91215, + 2.81223, + 2.84774, + 2.84661, + 2.87683, + 2.83038, + 2.85441, + 2.87726, + 2.84368, + 2.82555, + 2.87478, + 2.88374, + 2.829, + 2.82847, + 2.8351, + 2.85073, + 2.86865, + 2.81189, + 2.86038, + 2.81833, + 2.85709, + 2.79692, + 2.84563, + 2.82731, + 2.78244, + 2.87598, + 2.82566, + 2.83375, + 2.82213, + 2.75678, + 2.82235, + 2.80582, + 2.86929, + 2.7598, + 2.80844, + 2.81432, + 2.82535, + 2.85032, + 2.85345, + 2.76587, + 2.79948, + 2.84617, + 2.84239, + 2.75924, + 2.79258, + 2.79156, + 2.76512, + 2.83454, + 2.82744, + 2.85831, + 2.7905, + 2.80446, + 2.83538, + 2.82856, + 2.87019, + 2.83061, + 2.82669, + 2.81767, + 2.7626, + 2.82075, + 2.82698, + 2.81416, + 2.77567, + 2.78215, + 2.79939, + 2.83093, + 2.77727, + 2.7906, + 2.83899, + 2.78899, + 2.82128, + 2.78841, + 2.78191, + 2.7887, + 2.74473, + 2.76601, + 2.77272, + 2.81996, + 2.7869, + 2.77704, + 2.75224, + 2.75621, + 2.76608, + 2.77826, + 2.84537, + 2.78183, + 2.75735, + 2.7567, + 2.80078, + 2.76975, + 2.74874, + 2.75217, + 2.72119, + 2.80595, + 2.7981, + 2.79145, + 2.76656, + 2.7634, + 2.77107, + 2.76695, + 2.80219, + 2.80329, + 2.75386, + 2.75176, + 2.774, + 2.75002, + 2.74368, + 2.77979, + 2.78015, + 2.75064, + 2.74808, + 2.7432, + 2.75262, + 2.76237, + 2.78062, + 2.81719, + 2.77, + 2.74841, + 2.71805, + 2.69594, + 2.78587, + 2.80476, + 2.7614, + 2.72044, + 2.75631, + 2.74862, + 2.77974, + 2.76551, + 2.73742, + 2.69921, + 2.72775, + 2.75244, + 2.7918, + 2.70923, + 2.68243, + 2.72437, + 2.76063, + 2.77987, + 2.75805, + 2.71199, + 2.70685, + 2.75679, + 2.76997, + 2.74035, + 2.70133, + 2.7335, + 2.7252, + 2.78742, + 2.75481, + 2.72338, + 2.78384, + 2.71326, + 2.73578, + 2.724, + 2.67999, + 2.73259, + 2.68942, + 2.70163, + 2.76271, + 2.71729, + 2.78038, + 2.66567, + 2.71629, + 2.71958, + 2.73239, + 2.72314, + 2.73463, + 2.70641, + 2.7355, + 2.73646, + 2.71544, + 2.69402, + 2.69542, + 2.67256, + 2.75983, + 2.73934, + 2.72299, + 2.7317, + 2.73093, + 2.73215, + 2.73617, + 2.69029, + 2.75961, + 2.68408, + 2.73535, + 2.70576, + 2.7243, + 2.70455, + 2.69352, + 2.7219, + 2.73434, + 2.70392, + 2.69857, + 2.71872, + 2.74067, + 2.72805, + 2.67934, + 2.72023, + 2.74979, + 2.6687, + 2.73338, + 2.70337, + 2.69659, + 2.68337, + 2.73644, + 2.70698, + 2.72757, + 2.67258, + 2.68004, + 2.6693, + 2.69404, + 2.69068, + 2.71109, + 2.68876, + 2.67286, + 2.63695, + 2.70994, + 2.71521, + 2.71145, + 2.71281, + 2.68316, + 2.72372, + 2.73423, + 2.68663, + 2.65953, + 2.64945, + 2.68392, + 2.68934, + 2.70684, + 2.70383, + 2.68208, + 2.66521, + 2.72705, + 2.66094, + 2.67367, + 2.73571, + 2.68643, + 2.70468, + 2.69637, + 2.65225, + 2.74376, + 2.67434, + 2.67401, + 2.70116, + 2.67094, + 2.6278, + 2.67554, + 2.67673, + 2.70991, + 2.62994, + 2.6819, + 2.67804, + 2.65307, + 2.72569, + 2.67119, + 2.69595, + 2.67429, + 2.70094, + 2.68062, + 2.69246, + 2.65225, + 2.65863, + 2.66549, + 2.64659, + 2.69509, + 2.70673, + 2.62881, + 2.65658, + 2.69822, + 2.68381, + 2.61327, + 2.63224, + 2.64956, + 2.62056, + 2.64634, + 2.67432, + 2.61837, + 2.64623, + 2.65205, + 2.66231, + 2.70519, + 2.63336, + 2.58863, + 2.69043, + 2.70324, + 2.69006, + 2.66103, + 2.59689, + 2.66795, + 2.71161, + 2.73267, + 2.66837, + 2.61162, + 2.57833, + 2.62046, + 2.69014, + 2.64308, + 2.73678, + 2.68468, + 2.64076, + 2.64773, + 2.65408, + 2.60734, + 2.64137, + 2.69058, + 2.59545, + 2.66837, + 2.65741, + 2.59768, + 2.62064, + 2.62896, + 2.66511, + 2.6523, + 2.66253, + 2.61752, + 2.64246, + 2.64005, + 2.64028, + 2.65505, + 2.62184, + 2.61889, + 2.61182, + 2.67913, + 2.63267, + 2.61416, + 2.6442, + 2.67081, + 2.63952, + 2.63449, + 2.60337, + 2.6113, + 2.64308, + 2.60746, + 2.66401, + 2.5749, + 2.60854, + 2.65254, + 2.62008, + 2.63516, + 2.60425, + 2.62778, + 2.60973, + 2.58735, + 2.68087, + 2.64198, + 2.58838, + 2.58752, + 2.60206, + 2.61386, + 2.65482, + 2.60876, + 2.6384, + 2.64259, + 2.58876, + 2.64315, + 2.65005, + 2.65401, + 2.60772, + 2.6513, + 2.59763, + 2.65729, + 2.67432, + 2.60022, + 2.60397, + 2.64396, + 2.62791, + 2.58591, + 2.56812, + 2.64195, + 2.60035, + 2.61991, + 2.59824, + 2.62319, + 2.66949, + 2.63025, + 2.63497, + 2.59433, + 2.58049, + 2.56866, + 2.63494, + 2.63671, + 2.64405, + 2.63021, + 2.63427, + 2.56149, + 2.60747, + 2.65837, + 2.58688, + 2.57804, + 2.58796, + 2.58539, + 2.55493, + 2.62582, + 2.6199, + 2.59616, + 2.63639, + 2.62284, + 2.63035, + 2.61848, + 2.62593, + 2.58737, + 2.63649, + 2.563, + 2.58548, + 2.57991, + 2.55859, + 2.5493, + 2.6132, + 2.62414, + 2.56101, + 2.61055, + 2.62897, + 2.62941, + 2.68873, + 2.58485, + 2.64526, + 2.5378, + 2.6124, + 2.62876, + 2.59316, + 2.57233, + 2.57683, + 2.56151, + 2.63848, + 2.56829, + 2.61595, + 2.58115, + 2.60032, + 2.59891, + 2.59576, + 2.61186, + 2.56267, + 2.60809, + 2.60278, + 2.55305, + 2.58233, + 2.54135, + 2.54825, + 2.55177, + 2.61921, + 2.6122, + 2.60306, + 2.59237, + 2.58115, + 2.59472, + 2.56343, + 2.60271, + 2.60783, + 2.62331, + 2.57962, + 2.5999, + 2.58955, + 2.57372, + 2.58388, + 2.59087, + 2.56584, + 2.55378, + 2.57505, + 2.59781, + 2.53771, + 2.58886, + 2.53013, + 2.53568, + 2.58721, + 2.56963, + 2.62799, + 2.6105, + 2.58217, + 2.59706, + 2.55983, + 2.61556, + 2.6048, + 2.55507, + 2.60422, + 2.57116, + 2.57087, + 2.5792, + 2.64494, + 2.60138, + 2.52993, + 2.58892, + 2.56157, + 2.62091, + 2.59101, + 2.58091, + 2.5785, + 2.57823, + 2.61883, + 2.59137, + 2.55946, + 2.53474, + 2.64984, + 2.59845, + 2.59182, + 2.61328, + 2.58165, + 2.55727, + 2.56442, + 2.54128, + 2.53001, + 2.58124, + 2.56988, + 2.554, + 2.59489, + 2.6229, + 2.54452, + 2.54096, + 2.5384, + 2.59686, + 2.57353, + 2.53009, + 2.55928, + 2.567, + 2.5971, + 2.54228, + 2.59946, + 2.53329, + 2.54497, + 2.50117, + 2.56036, + 2.574, + 2.5821, + 2.51619, + 2.55464, + 2.56109, + 2.59272, + 2.47982, + 2.56552, + 2.55891, + 2.58151, + 2.52698, + 2.53715, + 2.53934, + 2.5239, + 2.59954, + 2.56962, + 2.55696, + 2.58608, + 2.55709, + 2.55042, + 2.6101, + 2.55133, + 2.53321, + 2.55897, + 2.54459, + 2.61569, + 2.53035, + 2.55594, + 2.54309, + 2.53276, + 2.58327, + 2.57576, + 2.53436, + 2.5907, + 2.53985, + 2.53595, + 2.55685, + 2.49897, + 2.54713, + 2.52034, + 2.51481, + 2.54634, + 2.47634, + 2.52979, + 2.47673, + 2.52263, + 2.57861, + 2.52689, + 2.54751, + 2.54894, + 2.53076, + 2.56025, + 2.53059, + 2.56515, + 2.54482, + 2.53631, + 2.53589, + 2.52029, + 2.51447, + 2.53985, + 2.54016, + 2.51366, + 2.55636, + 2.49933, + 2.51689, + 2.53967, + 2.56852, + 2.55148, + 2.54572, + 2.53561, + 2.51406, + 2.53771, + 2.5616, + 2.56804, + 2.54641, + 2.56799, + 2.49333, + 2.53062, + 2.54701, + 2.51702, + 2.50103, + 2.51132, + 2.561, + 2.5905, + 2.53869, + 2.55118, + 2.54445, + 2.53007, + 2.56218, + 2.55568, + 2.5231, + 2.57378, + 2.55075, + 2.51998, + 2.50963, + 2.50105, + 2.56859, + 2.50312, + 2.53717, + 2.5419, + 2.53935, + 2.50608, + 2.57236, + 2.52052, + 2.5646, + 2.4947, + 2.49951, + 2.4933, + 2.53444, + 2.55836, + 2.57009, + 2.55638, + 2.48611, + 2.49208, + 2.5225, + 2.53958, + 2.47733, + 2.50434, + 2.49689, + 2.52079, + 2.52352, + 2.51672, + 2.45446, + 2.50849, + 2.48736, + 2.55874, + 2.5111, + 2.45278, + 2.50725, + 2.48928, + 2.46864, + 2.56141, + 2.50856, + 2.53828, + 2.50726, + 2.55644, + 2.50501, + 2.50239, + 2.57924, + 2.47898, + 2.53794, + 2.48626, + 2.53305, + 2.5261, + 2.51292, + 2.53775, + 2.52576, + 2.52874, + 2.49201, + 2.51585, + 2.51043, + 2.54095, + 2.56297, + 2.46852, + 2.47191, + 2.47953, + 2.49676, + 2.51807, + 2.54636, + 2.49048, + 2.48207, + 2.49757, + 2.46719, + 2.52175, + 2.49199, + 2.538, + 2.48299, + 2.54316, + 2.53758, + 2.50483, + 2.55736, + 2.53328, + 2.47955, + 2.49962, + 2.54418, + 2.53937, + 2.49506, + 2.50199, + 2.51324, + 2.50278, + 2.55192, + 2.51447, + 2.48794, + 2.51318, + 2.50868, + 2.51188, + 2.5334, + 2.49943, + 2.44985, + 2.50235, + 2.49591, + 2.45698, + 2.48009, + 2.52481, + 2.53874, + 2.53226, + 2.50728, + 2.50383, + 2.51488, + 2.51996, + 2.50349, + 2.48751, + 2.5153, + 2.51934, + 2.51006, + 2.55478, + 2.5033, + 2.46623, + 2.51793, + 2.49374, + 2.51316, + 2.48485, + 2.41579, + 2.46977, + 2.53614, + 2.49374, + 2.5219, + 2.50654, + 2.5072, + 2.50565, + 2.48463, + 2.53023, + 2.48262, + 2.4827, + 2.4922, + 2.5072, + 2.47881, + 2.49629, + 2.51091, + 2.48016, + 2.53091, + 2.47284, + 2.50006, + 2.48727, + 2.49893, + 2.52669, + 2.48441, + 2.49287, + 2.50647, + 2.45784, + 2.49682, + 2.48718, + 2.46117, + 2.4885, + 2.46638, + 2.45848, + 2.51819, + 2.51254, + 2.53228, + 2.44314, + 2.46984, + 2.47354, + 2.42897, + 2.51829, + 2.46688, + 2.46386, + 2.48436, + 2.44535, + 2.52975, + 2.50617, + 2.43605, + 2.47315, + 2.4511, + 2.46822, + 2.51033, + 2.50203, + 2.46868, + 2.49846, + 2.52919, + 2.50622, + 2.4863, + 2.47123, + 2.45715, + 2.47031, + 2.52175, + 2.47213, + 2.44661, + 2.48266, + 2.47116, + 2.49387, + 2.43073, + 2.46649, + 2.43554, + 2.51518, + 2.46868, + 2.51657, + 2.48845, + 2.49449, + 2.49326, + 2.48203, + 2.48125, + 2.4484, + 2.49655, + 2.47812, + 2.45066, + 2.48542, + 2.49453, + 2.49132, + 2.43532, + 2.42509, + 2.48809, + 2.48677, + 2.48084, + 2.46157, + 2.46435, + 2.49044, + 2.48657, + 2.48724, + 2.46996, + 2.49955, + 2.47274, + 2.5041, + 2.48064, + 2.46157, + 2.46688, + 2.4288, + 2.46969, + 2.43649, + 2.46446, + 2.49066, + 2.44719, + 2.46448, + 2.48424, + 2.50628, + 2.47368, + 2.46615, + 2.46249, + 2.4809, + 2.43923, + 2.48508, + 2.48214, + 2.48168, + 2.47345, + 2.4678, + 2.45583, + 2.48723, + 2.47864, + 2.51669, + 2.49669, + 2.51052, + 2.40123, + 2.4452, + 2.46704, + 2.50268, + 2.49151, + 2.47883, + 2.3931, + 2.45711, + 2.46832, + 2.49233, + 2.46979, + 2.46957, + 2.4457, + 2.47127, + 2.475, + 2.50183, + 2.4421, + 2.48969, + 2.52567, + 2.50778, + 2.41897, + 2.47446, + 2.45114, + 2.49691, + 2.48495, + 2.47338, + 2.47208, + 2.48817, + 2.46647, + 2.48609, + 2.49568, + 2.43326, + 2.4467, + 2.48607, + 2.44624, + 2.43417, + 2.48171, + 2.40918, + 2.45642, + 2.47064, + 2.44659, + 2.46503, + 2.47314, + 2.44615, + 2.4381, + 2.46473, + 2.4848, + 2.41938, + 2.43062, + 2.47577, + 2.48868, + 2.49228, + 2.42776, + 2.48962, + 2.48737, + 2.46294, + 2.47892, + 2.47705, + 2.47175, + 2.43891, + 2.47184, + 2.45781, + 2.4341, + 2.43933, + 2.44683, + 2.47782, + 2.42597, + 2.48077, + 2.48348, + 2.41973, + 2.42408, + 2.47229, + 2.44972, + 2.42299, + 2.45186, + 2.47362, + 2.43024, + 2.4806, + 2.45543, + 2.43895, + 2.42822, + 2.42961, + 2.44196, + 2.4524, + 2.44367, + 2.46188, + 2.44842, + 2.44655, + 2.45174, + 2.46148, + 2.45871, + 2.47278, + 2.39687, + 2.45917, + 2.45901, + 2.43393, + 2.42435, + 2.47205, + 2.4415, + 2.42902, + 2.43513, + 2.48281, + 2.41308, + 2.45505, + 2.49247, + 2.4959, + 2.43244, + 2.46196, + 2.3977, + 2.44007, + 2.41206, + 2.44082, + 2.43214, + 2.47426, + 2.46489, + 2.46056, + 2.4841, + 2.36848, + 2.45986, + 2.50818, + 2.44976, + 2.47296, + 2.45725, + 2.43936, + 2.48751, + 2.42229, + 2.47382, + 2.41499, + 2.47365, + 2.468, + 2.43652, + 2.42431, + 2.41778, + 2.43381, + 2.41182, + 2.47182, + 2.47046, + 2.455, + 2.40909, + 2.43545, + 2.42197, + 2.42329, + 2.40322, + 2.39746, + 2.41701, + 2.46273, + 2.45073, + 2.42149, + 2.42605, + 2.4155, + 2.42182, + 2.45505, + 2.45403, + 2.43771, + 2.40675, + 2.43286, + 2.41574, + 2.47334, + 2.44253, + 2.44758, + 2.42374, + 2.43589, + 2.43717, + 2.45288, + 2.41935, + 2.45466, + 2.42263, + 2.42906, + 2.42719, + 2.44174, + 2.44432, + 2.41188, + 2.42853, + 2.48273, + 2.40278, + 2.42126, + 2.43101, + 2.44679, + 2.43871, + 2.40996, + 2.41231, + 2.44852, + 2.45756, + 2.45742, + 2.47439, + 2.39881, + 2.4377, + 2.43117, + 2.47927, + 2.42207, + 2.45135, + 2.37555, + 2.4217, + 2.40987, + 2.49686, + 2.42833, + 2.44935, + 2.41659, + 2.39482, + 2.41536, + 2.41522, + 2.47559, + 2.45171, + 2.4405, + 2.44843, + 2.39798, + 2.40287, + 2.42851, + 2.47188, + 2.44789, + 2.45982, + 2.39331, + 2.39122, + 2.41039, + 2.39721, + 2.44357, + 2.40684, + 2.44387, + 2.37255, + 2.39323, + 2.43589, + 2.40242, + 2.35703, + 2.38522, + 2.44099, + 2.41788, + 2.42884, + 2.40322, + 2.38758, + 2.42448, + 2.41145, + 2.40717, + 2.40643, + 2.43357, + 2.42674, + 2.37575, + 2.46173, + 2.41647, + 2.42189, + 2.43383, + 2.41011, + 2.41903, + 2.43388, + 2.40424, + 2.45379, + 2.43964, + 2.4471, + 2.39053, + 2.42693, + 2.39775, + 2.42082, + 2.43923, + 2.4446, + 2.45796, + 2.45883, + 2.42878, + 2.41346, + 2.42693, + 2.42617, + 2.41534, + 2.45987, + 2.45934, + 2.39595, + 2.43565, + 2.41616, + 2.39643, + 2.37839, + 2.45358, + 2.45351, + 2.43583, + 2.46795, + 2.3476, + 2.43286, + 2.43602, + 2.42252, + 2.40652, + 2.37375, + 2.34412, + 2.39207, + 2.43603, + 2.39118, + 2.39984, + 2.3884, + 2.4207, + 2.3968, + 2.39944, + 2.41521, + 2.38999, + 2.41303, + 2.38454, + 2.45854, + 2.41841, + 2.37952, + 2.41614, + 2.44719, + 2.43381, + 2.42971, + 2.41938, + 2.39896, + 2.45079, + 2.42209, + 2.40237, + 2.43318, + 2.4069, + 2.40848, + 2.43561, + 2.41012, + 2.38132, + 2.37908, + 2.44476, + 2.43717, + 2.42629, + 2.39901, + 2.40988, + 2.37637, + 2.43649, + 2.41236, + 2.3769, + 2.39936, + 2.4032, + 2.37324, + 2.45772, + 2.40408, + 2.43101, + 2.43316, + 2.36628, + 2.4208, + 2.44251, + 2.41768, + 2.38952, + 2.41791, + 2.40722, + 2.44961, + 2.40379, + 2.41665, + 2.38932, + 2.36079, + 2.43889, + 2.39695, + 2.39257, + 2.41141, + 2.42375, + 2.42532, + 2.40443, + 2.40222, + 2.4175, + 2.40089, + 2.40115, + 2.39663, + 2.40287, + 2.38184, + 2.4013, + 2.40137, + 2.42848, + 2.39554, + 2.40954, + 2.38964, + 2.41687, + 2.44062, + 2.43539, + 2.41327, + 2.35726, + 2.40355, + 2.41873, + 2.38951, + 2.40406, + 2.37324, + 2.39578, + 2.38332, + 2.43293, + 2.37411, + 2.38391, + 2.44274, + 2.34786, + 2.42595, + 2.37474, + 2.4216, + 2.40094, + 2.36248, + 2.38568, + 2.40937, + 2.39658, + 2.36312, + 2.37492, + 2.38804, + 2.39906, + 2.39363, + 2.41344, + 2.39456, + 2.38522, + 2.38976, + 2.38036, + 2.45024, + 2.40052, + 2.39364, + 2.4332, + 2.42972, + 2.36476, + 2.40128, + 2.41312, + 2.4096, + 2.43933, + 2.3906, + 2.37237, + 2.36941, + 2.36284, + 2.40433, + 2.32559, + 2.38626, + 2.39369, + 2.39768, + 2.40707, + 2.42371, + 2.39212, + 2.34965, + 2.38335, + 2.37555, + 2.40827, + 2.39739, + 2.40419, + 2.37029, + 2.38232, + 2.43031, + 2.40139, + 2.41455, + 2.38662, + 2.38593, + 2.40352, + 2.37749, + 2.3879, + 2.35356, + 2.41582, + 2.36653, + 2.37359, + 2.40251, + 2.4036, + 2.36594, + 2.39263, + 2.40991, + 2.4028, + 2.35239, + 2.42146, + 2.40527, + 2.42013, + 2.35961, + 2.32835, + 2.42759, + 2.37912, + 2.42635, + 2.41741, + 2.40406, + 2.34474, + 2.35861, + 2.39279, + 2.41191, + 2.34465, + 2.40426, + 2.36674, + 2.42495, + 2.41191, + 2.3623, + 2.38931, + 2.40397, + 2.37682, + 2.39601, + 2.38363, + 2.39467, + 2.36883, + 2.35878, + 2.42687, + 2.42009, + 2.38618, + 2.346, + 2.35977, + 2.37582, + 2.37316, + 2.36726, + 2.38925, + 2.39621, + 2.36378, + 2.41097, + 2.4003, + 2.43697, + 2.38723, + 2.42497, + 2.40269, + 2.36326, + 2.38121, + 2.42097, + 2.38556, + 2.39118, + 2.39702, + 2.31928, + 2.38336, + 2.4035, + 2.37902, + 2.3815, + 2.38399, + 2.3674, + 2.39393, + 2.39254, + 2.41507, + 2.40219, + 2.40328, + 2.37581, + 2.35426, + 2.43783, + 2.42495, + 2.35156, + 2.39757, + 2.34254, + 2.44408, + 2.42669, + 2.39789, + 2.39379, + 2.38917, + 2.35858, + 2.364, + 2.3228, + 2.41564, + 2.35527, + 2.40741, + 2.31893, + 2.38785, + 2.33488, + 2.36225, + 2.33131, + 2.39921, + 2.36962, + 2.30922, + 2.33897, + 2.37306, + 2.35353, + 2.36299, + 2.36498, + 2.34539, + 2.3625, + 2.36245, + 2.36279, + 2.42279, + 2.34258, + 2.35998, + 2.36343, + 2.37319, + 2.41415, + 2.38686, + 2.38272, + 2.33307, + 2.40362, + 2.37938, + 2.35918, + 2.3855, + 2.34224, + 2.34716, + 2.38785, + 2.3837, + 2.38359, + 2.34178, + 2.39632, + 2.38653, + 2.36959, + 2.35137, + 2.3351, + 2.34774, + 2.35196, + 2.4013, + 2.38773, + 2.37799, + 2.35875, + 2.38301, + 2.3677, + 2.40898, + 2.4039, + 2.37117, + 2.38288, + 2.36887, + 2.39475, + 2.38321, + 2.37634, + 2.35435, + 2.39161, + 2.35868, + 2.37605, + 2.36668, + 2.38694, + 2.3398, + 2.40034, + 2.39344, + 2.34234, + 2.32538, + 2.3955, + 2.3879, + 2.36257, + 2.37432, + 2.37923, + 2.32474, + 2.3378, + 2.37413, + 2.36359, + 2.39711, + 2.37046, + 2.36555, + 2.40291, + 2.37168, + 2.32833, + 2.34569, + 2.33224, + 2.33477, + 2.35203, + 2.36476, + 2.37395, + 2.33348, + 2.35172, + 2.39557, + 2.41994, + 2.35677, + 2.40052, + 2.36935, + 2.35881, + 2.36097, + 2.32348, + 2.31921, + 2.30551, + 2.38366, + 2.33841, + 2.32617, + 2.37549, + 2.36689, + 2.37089, + 2.36607, + 2.33665, + 2.33225, + 2.33606, + 2.35614, + 2.37486, + 2.36, + 2.36803, + 2.34957, + 2.32795, + 2.35366, + 2.33655, + 2.35051, + 2.34895, + 2.31222, + 2.35499, + 2.37176, + 2.34318, + 2.3584, + 2.36836, + 2.34678, + 2.36575, + 2.36871, + 2.34236, + 2.32502, + 2.30717, + 2.3965, + 2.36149, + 2.34675, + 2.33529, + 2.32002, + 2.36607, + 2.33632, + 2.30338, + 2.34206, + 2.33016, + 2.36288, + 2.33769, + 2.3363, + 2.37822, + 2.37013, + 2.35409, + 2.34923, + 2.3358, + 2.38028, + 2.32687, + 2.37465, + 2.40024, + 2.3679, + 2.31979, + 2.37888, + 2.37085, + 2.3425, + 2.35952, + 2.3354, + 2.36638, + 2.31504, + 2.37361, + 2.34554, + 2.32957, + 2.35303, + 2.35073, + 2.31186, + 2.35584, + 2.36257, + 2.32891, + 2.34771, + 2.365, + 2.34689, + 2.33712, + 2.33802, + 2.32834, + 2.296, + 2.34532, + 2.35375, + 2.36399, + 2.35602, + 2.33117, + 2.33069, + 2.30342, + 2.33018, + 2.35695, + 2.38318, + 2.3183, + 2.39501, + 2.33601, + 2.3274, + 2.28609, + 2.32849, + 2.34898, + 2.33874, + 2.32016, + 2.34004, + 2.39091, + 2.34788, + 2.32542, + 2.37337, + 2.34815, + 2.31379, + 2.37221, + 2.37073, + 2.39353, + 2.30667, + 2.29534, + 2.32145, + 2.36158, + 2.32239, + 2.32876, + 2.33251, + 2.36321, + 2.34489, + 2.37563, + 2.35842, + 2.29144, + 2.33234, + 2.34676, + 2.38294, + 2.3577, + 2.30992, + 2.34817, + 2.36519, + 2.36469, + 2.3637, + 2.32144, + 2.34969, + 2.38023, + 2.3487, + 2.33723, + 2.32098, + 2.35379, + 2.34257, + 2.30251, + 2.38235, + 2.36421, + 2.33262, + 2.35747, + 2.29181, + 2.36747, + 2.3705, + 2.34352, + 2.36505, + 2.29889, + 2.32236, + 2.34691, + 2.35718, + 2.30783, + 2.32323, + 2.30852, + 2.34422, + 2.31516, + 2.30117, + 2.31959, + 2.34785, + 2.36906, + 2.34921, + 2.36549, + 2.3381, + 2.25903, + 2.30382, + 2.3128, + 2.28228, + 2.3439, + 2.3146, + 2.35962, + 2.36825, + 2.30679, + 2.3135, + 2.31402, + 2.32699, + 2.31781, + 2.33872, + 2.33485, + 2.3303, + 2.36026, + 2.35746, + 2.37863, + 2.32345, + 2.31022, + 2.31975, + 2.34958, + 2.34325, + 2.36213, + 2.298, + 2.32804, + 2.34519, + 2.35005, + 2.32478, + 2.35364, + 2.26496, + 2.33585, + 2.34076, + 2.32994, + 2.34252, + 2.3288, + 2.28395, + 2.32313, + 2.3677, + 2.37014, + 2.3356, + 2.34917, + 2.31603, + 2.37457, + 2.31697, + 2.34081, + 2.32016, + 2.36001, + 2.27903, + 2.31667, + 2.29043, + 2.27438, + 2.34682, + 2.32252, + 2.33194, + 2.32171, + 2.31672, + 2.30266, + 2.32141, + 2.343, + 2.28762, + 2.35557, + 2.29385, + 2.33566, + 2.34783, + 2.32444, + 2.33831, + 2.35358, + 2.31658, + 2.34844, + 2.32498, + 2.3375, + 2.25427, + 2.26617, + 2.33314, + 2.38748, + 2.27527, + 2.3436, + 2.3343, + 2.30712, + 2.32175, + 2.33274, + 2.27059, + 2.31721, + 2.34957, + 2.36364, + 2.39099, + 2.35601, + 2.30657, + 2.32918, + 2.3299, + 2.33955, + 2.31628, + 2.35285, + 2.30626, + 2.31731, + 2.33622, + 2.31725, + 2.31189, + 2.30563, + 2.30083, + 2.33612, + 2.34878, + 2.31925, + 2.30883, + 2.31485, + 2.30719, + 2.30821, + 2.33162, + 2.3378, + 2.29152, + 2.31626, + 2.3092, + 2.27037, + 2.28796, + 2.25966, + 2.27103, + 2.3227, + 2.28396, + 2.31079, + 2.30333, + 2.31833, + 2.3512, + 2.38782, + 2.33604, + 2.30789, + 2.32801, + 2.32554, + 2.3152, + 2.33817, + 2.34926, + 2.31656, + 2.29865, + 2.3106, + 2.27178, + 2.23674, + 2.33142, + 2.29755, + 2.36179, + 2.34046, + 2.2684, + 2.24613, + 2.2883, + 2.31173, + 2.3091, + 2.26908, + 2.29491, + 2.30538, + 2.29338, + 2.3059, + 2.26001, + 2.27529, + 2.25717, + 2.32175, + 2.33085, + 2.29796, + 2.33301, + 2.33681, + 2.28845, + 2.30498, + 2.31165, + 2.28578, + 2.2948, + 2.33998, + 2.34102, + 2.32941, + 2.27112, + 2.32536, + 2.2422, + 2.31458, + 2.29785, + 2.32631, + 2.26938, + 2.28294, + 2.29986, + 2.2711, + 2.29961, + 2.28587, + 2.29484, + 2.28002, + 2.27563, + 2.3159, + 2.32381, + 2.31631, + 2.30407, + 2.30357, + 2.29929, + 2.32536, + 2.33171, + 2.29244, + 2.30256, + 2.30002, + 2.28565, + 2.29131, + 2.3168, + 2.28127, + 2.32639, + 2.31557, + 2.31152, + 2.3112, + 2.31671, + 2.30851, + 2.33664, + 2.33142, + 2.29477, + 2.25132, + 2.24265, + 2.32097, + 2.29407, + 2.28793, + 2.3045, + 2.26647, + 2.26437, + 2.34659, + 2.26252, + 2.29514, + 2.31319, + 2.32807, + 2.27966, + 2.28113, + 2.27129, + 2.27355, + 2.32205, + 2.26893, + 2.28212, + 2.28624, + 2.28571, + 2.29535, + 2.27967, + 2.31597, + 2.27198, + 2.26879, + 2.25824, + 2.27126, + 2.33246, + 2.31861, + 2.31789, + 2.26786, + 2.30783, + 2.30413, + 2.24099, + 2.29273, + 2.27482, + 2.24425, + 2.3202, + 2.33229, + 2.2774, + 2.29585, + 2.28817, + 2.28906, + 2.31714, + 2.30136, + 2.27145, + 2.28753, + 2.32861, + 2.305, + 2.30171, + 2.2961, + 2.27118, + 2.26809, + 2.29594, + 2.29189, + 2.30136, + 2.28752, + 2.26229, + 2.29691, + 2.31228, + 2.31774, + 2.30009, + 2.28076, + 2.30298, + 2.24947, + 2.2874, + 2.2677, + 2.27839, + 2.279, + 2.32538, + 2.28798, + 2.31393, + 2.30435, + 2.2873, + 2.29489, + 2.32668, + 2.30469, + 2.27764, + 2.26858, + 2.29076, + 2.26088, + 2.31631, + 2.26388, + 2.30374, + 2.28147, + 2.29016, + 2.23693, + 2.30932, + 2.2365, + 2.26122, + 2.28961, + 2.29521, + 2.26528, + 2.27669, + 2.22816, + 2.26425, + 2.2976, + 2.30578, + 2.29441, + 2.24789, + 2.33382, + 2.3059, + 2.27599, + 2.24562, + 2.29109, + 2.30481, + 2.25692, + 2.27845, + 2.28768, + 2.25322, + 2.28072, + 2.31251, + 2.335, + 2.27906, + 2.22876, + 2.26747, + 2.24104, + 2.32092, + 2.24254, + 2.26054, + 2.26189, + 2.28387, + 2.25391, + 2.2502, + 2.31302, + 2.32049, + 2.25145, + 2.32104, + 2.27552, + 2.28939, + 2.28309, + 2.31221, + 2.28121, + 2.26434, + 2.3144, + 2.26061, + 2.30382, + 2.31351, + 2.30664, + 2.27604, + 2.24317, + 2.29916, + 2.29524, + 2.28495, + 2.31964, + 2.29826, + 2.28335, + 2.25693, + 2.26003, + 2.30455, + 2.24532, + 2.25383, + 2.24709, + 2.28794, + 2.25108, + 2.28518, + 2.30444, + 2.2245, + 2.28955, + 2.29605, + 2.29492, + 2.2898, + 2.27655, + 2.24474, + 2.28661, + 2.27446, + 2.25572, + 2.2808, + 2.27541, + 2.28539, + 2.30453, + 2.25671, + 2.28716, + 2.27972, + 2.2344, + 2.27181, + 2.29316, + 2.31126, + 2.22047, + 2.27671, + 2.22281, + 2.25275, + 2.27665, + 2.23923, + 2.2874, + 2.25773, + 2.29519, + 2.25709, + 2.28715, + 2.26321, + 2.29406, + 2.29471, + 2.25117, + 2.21339, + 2.28681, + 2.2436, + 2.2741, + 2.27006, + 2.30533, + 2.25993, + 2.27284, + 2.27898, + 2.28361, + 2.28589, + 2.32882, + 2.24904, + 2.25228, + 2.30894, + 2.24599, + 2.23118, + 2.24451, + 2.27852, + 2.26173, + 2.25475, + 2.28974, + 2.21874, + 2.24916, + 2.2977, + 2.26072, + 2.24516, + 2.29648, + 2.27744, + 2.29541, + 2.29863, + 2.23964, + 2.23878, + 2.29433, + 2.27798, + 2.3087, + 2.25681, + 2.29536, + 2.29383, + 2.26659, + 2.29805, + 2.3018, + 2.27852, + 2.27941, + 2.27032, + 2.22961, + 2.24658, + 2.29104, + 2.28868, + 2.2472, + 2.28082, + 2.28852, + 2.26144, + 2.26193, + 2.27764, + 2.2808, + 2.26659, + 2.23742, + 2.25543, + 2.29684, + 2.29447, + 2.29072, + 2.29651, + 2.28905, + 2.23933, + 2.24693, + 2.29092, + 2.28717, + 2.26653, + 2.25176, + 2.23153, + 2.29117, + 2.27021, + 2.26909, + 2.27481, + 2.28566, + 2.27902, + 2.24018, + 2.26794, + 2.26721, + 2.26986, + 2.23546, + 2.26174, + 2.30765, + 2.28069, + 2.24224, + 2.24285, + 2.2818, + 2.2386, + 2.27038, + 2.2967, + 2.21856, + 2.26273, + 2.25687, + 2.28072, + 2.25431, + 2.29034, + 2.22381, + 2.26109, + 2.29288, + 2.27536, + 2.26489, + 2.21574, + 2.27925, + 2.26939, + 2.28235, + 2.25068, + 2.268, + 2.25456, + 2.28611, + 2.26574, + 2.27921, + 2.21543, + 2.29493, + 2.24039, + 2.33717, + 2.23783, + 2.23687, + 2.27269, + 2.26361, + 2.26721, + 2.23433, + 2.26627, + 2.26136, + 2.26634, + 2.28787, + 2.2426, + 2.29079, + 2.229, + 2.29312, + 2.25524, + 2.23532, + 2.29834, + 2.27358, + 2.26594, + 2.26039, + 2.23679, + 2.26547, + 2.2916, + 2.24776, + 2.25938, + 2.27078, + 2.27573, + 2.29456, + 2.29434, + 2.22162, + 2.29619, + 2.19893, + 2.25969, + 2.28238, + 2.2857, + 2.22224, + 2.27902, + 2.30178, + 2.26467, + 2.23927, + 2.25691, + 2.27574, + 2.27641, + 2.25892, + 2.24397, + 2.28888, + 2.29956, + 2.2986, + 2.25993, + 2.2545, + 2.24914, + 2.29936, + 2.26799, + 2.28842, + 2.22557, + 2.27761, + 2.26835, + 2.2509, + 2.22697, + 2.28149, + 2.2122, + 2.2701, + 2.31524, + 2.24547, + 2.27606, + 2.25981, + 2.27208, + 2.23555, + 2.24697, + 2.24793, + 2.26567, + 2.2831, + 2.25445, + 2.25628, + 2.24469, + 2.22772, + 2.25741, + 2.24449, + 2.22926, + 2.25736, + 2.26772, + 2.25631, + 2.22385, + 2.27196, + 2.25684, + 2.2606, + 2.28256, + 2.29563, + 2.22879, + 2.3196, + 2.23194, + 2.25746, + 2.22836, + 2.29436, + 2.27672, + 2.21973, + 2.24224, + 2.23062, + 2.26849, + 2.3006, + 2.24144, + 2.25236, + 2.24628, + 2.23892, + 2.24296, + 2.26644, + 2.18277, + 2.21913, + 2.25708, + 2.26274, + 2.25505, + 2.27729, + 2.27641, + 2.23476, + 2.22561, + 2.25057, + 2.30375, + 2.24669, + 2.23935, + 2.2221, + 2.19112, + 2.22649, + 2.22945, + 2.27091, + 2.2878, + 2.25782, + 2.2752, + 2.20252, + 2.26465, + 2.26096, + 2.24351, + 2.24393, + 2.22334, + 2.23214, + 2.23207, + 2.26396, + 2.28154, + 2.22596, + 2.27069, + 2.26623, + 2.28499, + 2.26373, + 2.30189, + 2.24304, + 2.24217, + 2.24244, + 2.24238, + 2.26513, + 2.25902, + 2.23344, + 2.24042, + 2.24115, + 2.24011, + 2.27196, + 2.16669, + 2.28174, + 2.26286, + 2.21743, + 2.23355, + 2.22449, + 2.17687, + 2.23977, + 2.25044, + 2.27163, + 2.27735, + 2.21934, + 2.22665, + 2.19364, + 2.25939, + 2.23314, + 2.26013, + 2.23623, + 2.23344, + 2.23622, + 2.21872, + 2.27343, + 2.24511, + 2.2876, + 2.25425, + 2.2833, + 2.27155, + 2.23462, + 2.20466, + 2.22433, + 2.26009, + 2.18991, + 2.2265, + 2.26803, + 2.24863, + 2.22273, + 2.27028, + 2.24513, + 2.2143, + 2.2453, + 2.2429, + 2.26907, + 2.23421, + 2.21927, + 2.24346, + 2.21853, + 2.24724, + 2.22617, + 2.21835, + 2.23919, + 2.26225, + 2.21922, + 2.27904, + 2.23476, + 2.18933, + 2.20515, + 2.21593, + 2.25189, + 2.25325, + 2.21038, + 2.2717, + 2.27607, + 2.25677, + 2.17012, + 2.22577, + 2.24056, + 2.19787, + 2.24246, + 2.24208, + 2.27385, + 2.24608, + 2.2021, + 2.25398, + 2.29289, + 2.21402, + 2.23079, + 2.22184, + 2.22497, + 2.28475, + 2.26642, + 2.21071, + 2.26953, + 2.24862, + 2.2771, + 2.20514, + 2.28854, + 2.24184, + 2.26459, + 2.23526, + 2.24307, + 2.20244, + 2.23128, + 2.20623, + 2.24828, + 2.25163, + 2.23184, + 2.20407, + 2.27241, + 2.22112, + 2.24825, + 2.24605, + 2.22648, + 2.22205, + 2.20385, + 2.21138, + 2.24489, + 2.20862, + 2.22885, + 2.23506, + 2.24592, + 2.23134, + 2.21822, + 2.28616, + 2.23473, + 2.19991, + 2.23518, + 2.21933, + 2.23718, + 2.25255, + 2.24651, + 2.24621, + 2.23044, + 2.24318, + 2.21404, + 2.25137, + 2.27605, + 2.24428, + 2.23943, + 2.20169, + 2.22621, + 2.19904, + 2.20193, + 2.2224, + 2.24443, + 2.25409, + 2.29001, + 2.22427, + 2.24949, + 2.23264, + 2.24383, + 2.24193, + 2.22773, + 2.25394, + 2.22131, + 2.22128, + 2.24328, + 2.21036, + 2.25751, + 2.20886, + 2.23157, + 2.2218, + 2.25032, + 2.18784, + 2.20303, + 2.22106, + 2.16759, + 2.2616, + 2.21968, + 2.24166, + 2.28196, + 2.19037, + 2.2596, + 2.17975, + 2.24518, + 2.22422, + 2.27392, + 2.21963, + 2.23756, + 2.25248, + 2.22671, + 2.22088, + 2.20057, + 2.22754, + 2.22743, + 2.26397, + 2.23561, + 2.19452, + 2.21779, + 2.25147, + 2.26052, + 2.24185, + 2.21342, + 2.21054, + 2.2645, + 2.25615, + 2.18742, + 2.244, + 2.22991, + 2.21965, + 2.22318, + 2.28008, + 2.22827, + 2.20392, + 2.20658, + 2.25723, + 2.23788, + 2.2379, + 2.24261, + 2.21894, + 2.22665, + 2.21129, + 2.20489, + 2.25458, + 2.24042, + 2.21568, + 2.26013, + 2.28897, + 2.22009, + 2.23864, + 2.21215, + 2.2411, + 2.23638, + 2.24032, + 2.25537, + 2.22937, + 2.20124, + 2.23325, + 2.19337, + 2.23595, + 2.25837, + 2.22968, + 2.24441, + 2.26153, + 2.20325, + 2.24041, + 2.27044, + 2.2579, + 2.25212, + 2.25221, + 2.19779, + 2.16263, + 2.21645, + 2.25448, + 2.22785, + 2.21746, + 2.22689, + 2.2103, + 2.24567, + 2.23162, + 2.24228, + 2.21433, + 2.19237, + 2.19912, + 2.23962, + 2.2168, + 2.21191, + 2.23389, + 2.18339, + 2.22902, + 2.2142, + 2.23256, + 2.2423, + 2.20906, + 2.28474, + 2.25455, + 2.21814, + 2.21534, + 2.27409, + 2.25278, + 2.21613, + 2.25334, + 2.2394, + 2.23672, + 2.17529, + 2.23706, + 2.22142, + 2.20357, + 2.17453, + 2.19651, + 2.27038, + 2.16077, + 2.18559, + 2.22565, + 2.213, + 2.24135, + 2.2344, + 2.20412, + 2.24672, + 2.20399, + 2.24431, + 2.26942, + 2.23007, + 2.21329, + 2.20683, + 2.18536, + 2.24785, + 2.21068, + 2.25197, + 2.2103, + 2.20275, + 2.27822, + 2.23582, + 2.21538, + 2.22222, + 2.23528, + 2.21306, + 2.20353, + 2.18529, + 2.2408, + 2.20858, + 2.2283, + 2.2521, + 2.2108, + 2.22775, + 2.2076, + 2.2033, + 2.20702, + 2.23118, + 2.21617, + 2.19015, + 2.22808, + 2.20603, + 2.25881, + 2.23744, + 2.22787, + 2.21064, + 2.22815, + 2.26796, + 2.22888, + 2.2596, + 2.21778, + 2.23586, + 2.21856, + 2.2597, + 2.23478, + 2.20793, + 2.26575, + 2.24914, + 2.23791, + 2.2162, + 2.28091, + 2.17768, + 2.21766, + 2.23386, + 2.20972, + 2.18684, + 2.1955, + 2.19595, + 2.1805, + 2.21884, + 2.19803, + 2.20681, + 2.24524, + 2.24268, + 2.21091, + 2.24197, + 2.25159, + 2.24962, + 2.20526, + 2.22063, + 2.21129, + 2.19761, + 2.26634, + 2.18034, + 2.24348, + 2.19812, + 2.2077, + 2.21729, + 2.23251, + 2.2513, + 2.22636, + 2.22624, + 2.23429, + 2.24524, + 2.20969, + 2.2262, + 2.26163, + 2.1994, + 2.216, + 2.23875, + 2.19803, + 2.24969, + 2.19961, + 2.2346, + 2.21044, + 2.22994, + 2.24332, + 2.18589, + 2.2249, + 2.1897, + 2.23016, + 2.22889, + 2.20313, + 2.18412, + 2.23265, + 2.18644, + 2.22929, + 2.22171, + 2.25593, + 2.20066, + 2.22994, + 2.22128, + 2.20591, + 2.23252, + 2.24404, + 2.21585, + 2.20229, + 2.22403, + 2.22983, + 2.20862, + 2.19786, + 2.21029, + 2.19596, + 2.22651, + 2.19373, + 2.20979, + 2.22627, + 2.22804, + 2.22523, + 2.18518, + 2.20035, + 2.18907, + 2.20673, + 2.23779, + 2.21536, + 2.17071, + 2.23903, + 2.22105, + 2.21409, + 2.24528, + 2.19222, + 2.14752, + 2.17206, + 2.22001, + 2.22438, + 2.21075, + 2.1854, + 2.20414, + 2.22382, + 2.24514, + 2.23526, + 2.23946, + 2.18517, + 2.20793, + 2.20648, + 2.2156, + 2.25088, + 2.22459, + 2.20492, + 2.18814, + 2.22953, + 2.18143, + 2.18414, + 2.21707, + 2.18941, + 2.17763, + 2.20733, + 2.25752, + 2.19973, + 2.22766, + 2.24139, + 2.21984, + 2.21741, + 2.22117, + 2.22521, + 2.23906, + 2.22628, + 2.21444, + 2.22475, + 2.20971, + 2.1987, + 2.20381, + 2.23647, + 2.23205, + 2.20193, + 2.23588, + 2.19735, + 2.20429, + 2.19208, + 2.15642, + 2.25138, + 2.23867, + 2.2252, + 2.21131, + 2.23222, + 2.21557, + 2.18211, + 2.20844, + 2.19461, + 2.22589, + 2.21342, + 2.18973, + 2.22035, + 2.17724, + 2.25336, + 2.25215, + 2.22145, + 2.21263, + 2.19195, + 2.19913, + 2.22423, + 2.18347, + 2.22006, + 2.23049, + 2.21586, + 2.23724, + 2.17564, + 2.20603, + 2.19569, + 2.22371, + 2.19839, + 2.23992, + 2.22694, + 2.19133, + 2.22156, + 2.20497, + 2.18658, + 2.21467, + 2.20576, + 2.21949, + 2.22925, + 2.2169, + 2.19388, + 2.25082, + 2.23184, + 2.20076, + 2.18648, + 2.20227, + 2.20444, + 2.21208, + 2.25164, + 2.24857, + 2.23961, + 2.24166, + 2.21017, + 2.25569, + 2.15824, + 2.21264, + 2.2209, + 2.23875, + 2.21263, + 2.21677, + 2.19047, + 2.17592, + 2.17385, + 2.22094, + 2.20265, + 2.21012, + 2.19903, + 2.19069, + 2.1721, + 2.14782, + 2.22381, + 2.25901, + 2.1757, + 2.19106, + 2.1908, + 2.17453, + 2.22536, + 2.19188, + 2.16819, + 2.21316, + 2.21795, + 2.18572, + 2.18725, + 2.23224, + 2.19896, + 2.18643, + 2.23959, + 2.19844, + 2.18332, + 2.26285, + 2.18723, + 2.20252, + 2.1961, + 2.18638, + 2.18201, + 2.20377, + 2.20524, + 2.19414, + 2.22302, + 2.25895, + 2.19906, + 2.20156, + 2.2203, + 2.20891, + 2.18189, + 2.15905, + 2.18041, + 2.19802, + 2.19038, + 2.20949, + 2.18784, + 2.20693, + 2.16693, + 2.18677, + 2.19076, + 2.21072, + 2.23218, + 2.22494, + 2.20815, + 2.1949, + 2.19634, + 2.24951, + 2.23994, + 2.21679, + 2.21317, + 2.25155, + 2.22107, + 2.23289, + 2.18229, + 2.16857, + 2.21288, + 2.23556, + 2.18314, + 2.18315, + 2.19207, + 2.18971, + 2.1995, + 2.21045, + 2.23254, + 2.17193, + 2.19368, + 2.18648, + 2.15854, + 2.16756, + 2.24743, + 2.1777, + 2.18985, + 2.20463, + 2.19405, + 2.18837, + 2.19885, + 2.19974, + 2.2316, + 2.18937, + 2.21128, + 2.15196, + 2.16538, + 2.21733, + 2.19482, + 2.19396, + 2.21127, + 2.17839, + 2.20797, + 2.19367, + 2.1821, + 2.19552, + 2.13417, + 2.22016, + 2.21983, + 2.21083, + 2.22334, + 2.18535, + 2.1706, + 2.23819, + 2.1768, + 2.21799, + 2.19817, + 2.21155, + 2.18396, + 2.25174, + 2.20175, + 2.23037, + 2.16418, + 2.18943, + 2.20633, + 2.2366, + 2.20868, + 2.18673, + 2.19287, + 2.21774, + 2.15535, + 2.19579, + 2.20048, + 2.19681, + 2.2336, + 2.22733, + 2.16999, + 2.21106, + 2.20642, + 2.20975, + 2.2579, + 2.20361, + 2.18624, + 2.19471, + 2.23081, + 2.23723, + 2.19832, + 2.18652, + 2.17789, + 2.26439, + 2.18741, + 2.18736, + 2.213, + 2.19123, + 2.13578, + 2.22132, + 2.1799, + 2.16087, + 2.18081, + 2.2118, + 2.20138, + 2.19667, + 2.18303, + 2.18506, + 2.17581, + 2.2385, + 2.19041, + 2.20302, + 2.21444, + 2.24426, + 2.17881, + 2.19977, + 2.25266, + 2.19046, + 2.20443, + 2.163, + 2.15721, + 2.20321, + 2.18716, + 2.20659, + 2.22524, + 2.23423, + 2.18987, + 2.1822, + 2.18665, + 2.18702, + 2.17784, + 2.20666, + 2.25237, + 2.18553, + 2.21926, + 2.20807, + 2.18812, + 2.26572, + 2.23962, + 2.17903, + 2.19578, + 2.18188, + 2.17317, + 2.22734, + 2.18515, + 2.16215, + 2.15013, + 2.18275, + 2.19201, + 2.15775, + 2.20167, + 2.18933, + 2.17922, + 2.19553, + 2.13454, + 2.23874, + 2.19698, + 2.14338, + 2.20723, + 2.18985, + 2.20002, + 2.2034, + 2.19812, + 2.19811, + 2.17787, + 2.2215, + 2.18331, + 2.21127, + 2.2172, + 2.18037, + 2.1855, + 2.17622, + 2.1665, + 2.15714, + 2.20801, + 2.18821, + 2.19073, + 2.22474, + 2.22232, + 2.1972, + 2.23359, + 2.17974, + 2.19292, + 2.16186, + 2.17803, + 2.1946, + 2.24416, + 2.2008, + 2.18637, + 2.196, + 2.27442, + 2.16876, + 2.16889, + 2.17138, + 2.19948, + 2.21756, + 2.21132, + 2.20514, + 2.18276, + 2.17788, + 2.18392, + 2.21728, + 2.1916, + 2.20449, + 2.19566, + 2.14846, + 2.2032, + 2.19373, + 2.17628, + 2.23466, + 2.14419, + 2.21517, + 2.17379, + 2.19462, + 2.1959, + 2.12789, + 2.20956, + 2.20563, + 2.18406, + 2.19587, + 2.18, + 2.20102, + 2.21716, + 2.24822, + 2.22048, + 2.17239, + 2.19635, + 2.2451, + 2.19347, + 2.17662, + 2.15645, + 2.18851, + 2.18559, + 2.19945, + 2.21885, + 2.18362, + 2.20523, + 2.2423, + 2.22438, + 2.19267, + 2.19043, + 2.18749, + 2.20618, + 2.18777, + 2.14661, + 2.17276, + 2.1663, + 2.18347, + 2.20748, + 2.15718, + 2.24577, + 2.13856, + 2.14234, + 2.15768, + 2.24937, + 2.20664, + 2.20479, + 2.18799, + 2.18268, + 2.23239, + 2.14239, + 2.16549, + 2.16313, + 2.18902, + 2.23174, + 2.19514, + 2.16686, + 2.17929, + 2.21813, + 2.18586, + 2.19031, + 2.19339, + 2.15826, + 2.15853, + 2.17445, + 2.18872, + 2.16148, + 2.14266, + 2.15394, + 2.16899, + 2.17466, + 2.18504, + 2.17751, + 2.16628, + 2.14596, + 2.22526, + 2.20197, + 2.17932, + 2.15595, + 2.1784, + 2.21167, + 2.24478, + 2.21639, + 2.15959, + 2.16772, + 2.14214, + 2.15803, + 2.17793, + 2.18866, + 2.21818, + 2.18234, + 2.19013, + 2.16058, + 2.16236, + 2.14408, + 2.21529, + 2.23642, + 2.19615, + 2.18461, + 2.18828, + 2.20918, + 2.13595, + 2.20937, + 2.14463, + 2.1347, + 2.16833, + 2.16401, + 2.17961, + 2.21937, + 2.20527, + 2.16386, + 2.2062, + 2.16986, + 2.18786, + 2.17712, + 2.175, + 2.17248, + 2.16316, + 2.23425, + 2.18638, + 2.20668, + 2.14758, + 2.18304, + 2.2294, + 2.21136, + 2.20544, + 2.18279, + 2.18811, + 2.23903, + 2.15484, + 2.20563, + 2.12044, + 2.15395, + 2.16187, + 2.20111, + 2.17861, + 2.16507, + 2.1688, + 2.17388, + 2.16835, + 2.13731, + 2.17732, + 2.16456, + 2.14912, + 2.17688, + 2.14177, + 2.18767, + 2.15131, + 2.18878, + 2.20567, + 2.19394, + 2.20034, + 2.16613, + 2.16281, + 2.16322, + 2.17403, + 2.19972, + 2.17969, + 2.17048, + 2.19248, + 2.18211, + 2.18894, + 2.18113, + 2.23973, + 2.17994, + 2.15895, + 2.1864, + 2.20981, + 2.20637, + 2.14974, + 2.18538, + 2.18107, + 2.16454, + 2.17704, + 2.19218, + 2.19365, + 2.16477, + 2.20429, + 2.18371, + 2.14134, + 2.20156, + 2.20991, + 2.2034, + 2.16422, + 2.19724, + 2.17008, + 2.16849, + 2.20043, + 2.17918, + 2.14481, + 2.19427, + 2.18952, + 2.15406, + 2.14144, + 2.19974, + 2.22798, + 2.19504, + 2.16977, + 2.15887, + 2.21372, + 2.1548, + 2.13299, + 2.15434, + 2.19575, + 2.20146, + 2.1733, + 2.17732, + 2.17918, + 2.16982, + 2.16555, + 2.18178, + 2.18073, + 2.21137, + 2.20035, + 2.14336, + 2.20221, + 2.15358, + 2.1906, + 2.14764, + 2.20214, + 2.10371, + 2.18167, + 2.20322, + 2.19192, + 2.20863, + 2.18726, + 2.13941, + 2.16541, + 2.1895, + 2.19918, + 2.1636, + 2.15836, + 2.18514, + 2.19581, + 2.15702, + 2.15243, + 2.16394, + 2.20025, + 2.13625, + 2.18997, + 2.20534, + 2.18034, + 2.16111, + 2.1799, + 2.17998, + 2.16062, + 2.18968, + 2.18898, + 2.17718, + 2.17062, + 2.16499, + 2.14615, + 2.13706, + 2.20729, + 2.15776, + 2.16536, + 2.1455, + 2.18979, + 2.19485, + 2.16468, + 2.17462, + 2.18813, + 2.14247, + 2.20587, + 2.16008, + 2.18584, + 2.1953, + 2.17741, + 2.14456, + 2.18229, + 2.17791, + 2.16921, + 2.11164, + 2.18482, + 2.13129, + 2.17623, + 2.18996, + 2.13215, + 2.18674, + 2.18471, + 2.19115, + 2.16671, + 2.22096, + 2.17273, + 2.15796, + 2.19803, + 2.14247, + 2.2182, + 2.20083, + 2.18698, + 2.16767, + 2.15398, + 2.2043, + 2.19245, + 2.2296, + 2.16527, + 2.18004, + 2.17712, + 2.16719, + 2.18131, + 2.17586, + 2.17929, + 2.17649, + 2.16808, + 2.21671, + 2.18425, + 2.14328, + 2.15506, + 2.19365, + 2.19018, + 2.14637, + 2.16909, + 2.17387, + 2.17093, + 2.18452, + 2.15452, + 2.2119, + 2.16499, + 2.21106, + 2.17934, + 2.18513, + 2.16015, + 2.20239, + 2.16377, + 2.1753, + 2.16584, + 2.16727, + 2.18553, + 2.17247, + 2.13847, + 2.16913, + 2.19889, + 2.16857, + 2.17824, + 2.14226, + 2.16057, + 2.18712, + 2.1891, + 2.1499, + 2.1806, + 2.18856, + 2.19242, + 2.18092, + 2.1342, + 2.17282, + 2.14335, + 2.1978, + 2.19178, + 2.14426, + 2.17409, + 2.17692, + 2.17109, + 2.18733, + 2.14273, + 2.23854, + 2.20267, + 2.19198, + 2.22032, + 2.16714, + 2.16194, + 2.15893, + 2.17724, + 2.1943, + 2.1531, + 2.19109, + 2.21565, + 2.16798, + 2.159, + 2.13256, + 2.12482, + 2.16768, + 2.20851, + 2.20723, + 2.18573, + 2.12662, + 2.18613, + 2.18998, + 2.20282, + 2.17526, + 2.15312, + 2.17027, + 2.16953, + 2.19276, + 2.23113, + 2.19921, + 2.13235, + 2.15745, + 2.17779, + 2.16142, + 2.13318, + 2.2112, + 2.14375, + 2.15974, + 2.22427, + 2.19781, + 2.16627, + 2.19785, + 2.1649, + 2.15312, + 2.15993, + 2.14537, + 2.19888, + 2.15912, + 2.10991, + 2.19287, + 2.14983, + 2.19263, + 2.19254, + 2.1515, + 2.20054, + 2.18417, + 2.17306, + 2.14048, + 2.18262, + 2.18699, + 2.21211, + 2.15484, + 2.20438, + 2.15329, + 2.14484, + 2.12647, + 2.17605, + 2.16455, + 2.16998, + 2.18284, + 2.176, + 2.18846, + 2.14187, + 2.18181, + 2.14733, + 2.2093, + 2.14611, + 2.17321, + 2.12424, + 2.19864, + 2.1711, + 2.20428, + 2.10913, + 2.16763, + 2.16023, + 2.15743, + 2.17862, + 2.20969, + 2.17414, + 2.1972, + 2.16371, + 2.14745, + 2.17819, + 2.17966, + 2.1707, + 2.1651, + 2.17634, + 2.13562, + 2.19046, + 2.18255, + 2.18407, + 2.17906, + 2.17501, + 2.13548, + 2.19239, + 2.14424, + 2.20191, + 2.20517, + 2.16746, + 2.18437, + 2.16521, + 2.13656, + 2.21699, + 2.21639, + 2.1518, + 2.16066, + 2.15736, + 2.15664, + 2.15378, + 2.18112, + 2.16138, + 2.16862, + 2.15514, + 2.15235, + 2.21429, + 2.18068, + 2.12402, + 2.18795, + 2.20033, + 2.18007, + 2.13288, + 2.14374, + 2.16437, + 2.14165, + 2.15673, + 2.15965, + 2.16014, + 2.14991, + 2.16474, + 2.19169, + 2.18534, + 2.20578, + 2.20656, + 2.16876, + 2.18165, + 2.1909, + 2.16408, + 2.20646, + 2.16255, + 2.15612, + 2.17456, + 2.14222, + 2.19445, + 2.17965, + 2.16361, + 2.16461, + 2.15829, + 2.18644, + 2.21663, + 2.19671, + 2.15893, + 2.16449, + 2.16146, + 2.14194, + 2.16559, + 2.18417, + 2.19364, + 2.19377, + 2.138, + 2.11181, + 2.1799, + 2.19617, + 2.1099, + 2.18466, + 2.1845, + 2.13361, + 2.19125, + 2.1877, + 2.16571, + 2.16011, + 2.14427, + 2.1735, + 2.19033, + 2.18431, + 2.18597, + 2.16991, + 2.17564, + 2.20747, + 2.17829, + 2.14918, + 2.16565, + 2.19644, + 2.13363, + 2.16687, + 2.14585, + 2.19644, + 2.17109, + 2.1265, + 2.19037, + 2.11615, + 2.16956, + 2.18818, + 2.22355, + 2.18591, + 2.13205, + 2.14702, + 2.13256, + 2.14374, + 2.1633, + 2.19225, + 2.14027, + 2.20048, + 2.19293, + 2.23247, + 2.14068, + 2.20182, + 2.13, + 2.11992, + 2.17505, + 2.17273, + 2.17395, + 2.14205, + 2.16506, + 2.14286, + 2.18399, + 2.18841, + 2.13827, + 2.12757, + 2.1482, + 2.16349, + 2.13915, + 2.20952, + 2.15516, + 2.13329, + 2.19707, + 2.1842, + 2.18543, + 2.13744, + 2.10519, + 2.17996, + 2.19095, + 2.15505, + 2.15993, + 2.16545, + 2.17542, + 2.14668, + 2.13391, + 2.14365, + 2.15091, + 2.15263, + 2.13413, + 2.1884, + 2.17975, + 2.17145, + 2.13487, + 2.1689, + 2.16548, + 2.09749, + 2.18216, + 2.16082, + 2.1544, + 2.16982, + 2.1759, + 2.1413, + 2.15244, + 2.13784, + 2.15666, + 2.18524, + 2.13905, + 2.17716, + 2.16606, + 2.1614, + 2.20271, + 2.1833, + 2.11334, + 2.14819, + 2.16706, + 2.1616, + 2.17599, + 2.17367, + 2.15405, + 2.14382, + 2.20585, + 2.19129, + 2.14335, + 2.15907, + 2.17566, + 2.12335, + 2.19882, + 2.13648, + 2.18516, + 2.18415, + 2.1457, + 2.15721, + 2.15145, + 2.16014, + 2.17559, + 2.17475, + 2.18221, + 2.17437, + 2.1724, + 2.16278, + 2.17388, + 2.12998, + 2.18032, + 2.15339, + 2.16408, + 2.15461, + 2.15939, + 2.18303, + 2.12779, + 2.18378, + 2.13119, + 2.16465, + 2.13628, + 2.15713, + 2.19838, + 2.1443, + 2.17293, + 2.15536, + 2.21596, + 2.13642, + 2.16655, + 2.09947, + 2.17045, + 2.20749, + 2.19362, + 2.14372, + 2.1677, + 2.17589, + 2.16115, + 2.14, + 2.18818, + 2.12138, + 2.18458, + 2.18155, + 2.17925, + 2.18839, + 2.10955, + 2.13776, + 2.16777, + 2.17671, + 2.13416, + 2.14982, + 2.16712, + 2.15599, + 2.12686, + 2.18567, + 2.14908, + 2.16586, + 2.14705, + 2.14601, + 2.18297, + 2.17647, + 2.10845, + 2.15726, + 2.15619, + 2.15872, + 2.13355, + 2.14162, + 2.16431, + 2.13941, + 2.18188, + 2.15558, + 2.17687, + 2.15895, + 2.18464, + 2.21138, + 2.17905, + 2.16561, + 2.13746, + 2.12229, + 2.16367, + 2.15027, + 2.15079, + 2.16855, + 2.10745, + 2.14313, + 2.13318, + 2.1872, + 2.14103, + 2.14814, + 2.14918, + 2.17764, + 2.16261, + 2.12183, + 2.17776, + 2.17615, + 2.18445, + 2.20404, + 2.1714, + 2.13744, + 2.18133, + 2.13208, + 2.17944, + 2.13581, + 2.12801, + 2.13302, + 2.20065, + 2.16117, + 2.16431, + 2.16372, + 2.14509, + 2.16572, + 2.16565, + 2.1742, + 2.16875, + 2.17453, + 2.15721, + 2.18936, + 2.16952, + 2.17344, + 2.16956, + 2.15418, + 2.14544, + 2.2508, + 2.10945, + 2.11537, + 2.14073, + 2.12436, + 2.18356, + 2.14256, + 2.1682, + 2.14934, + 2.16042, + 2.16215, + 2.11832, + 2.12575, + 2.15652, + 2.13744, + 2.15209, + 2.1443, + 2.16228, + 2.11166, + 2.1271, + 2.19005, + 2.17387, + 2.17901, + 2.19507, + 2.17293, + 2.14656, + 2.18871, + 2.1545, + 2.18462, + 2.15833, + 2.15946, + 2.16459, + 2.16632, + 2.12463, + 2.17395, + 2.1353, + 2.13356, + 2.16388, + 2.13674, + 2.1836, + 2.13674, + 2.15117, + 2.17343, + 2.17971, + 2.10903, + 2.17042, + 2.15939, + 2.20513, + 2.18562, + 2.16609, + 2.14883, + 2.12232, + 2.14467, + 2.19041, + 2.11555, + 2.15771, + 2.13615, + 2.13595, + 2.12738, + 2.16703, + 2.13957, + 2.1518, + 2.1476, + 2.14794, + 2.12887, + 2.16834, + 2.11906, + 2.18657, + 2.17968, + 2.11678, + 2.15045, + 2.14014, + 2.17603, + 2.13534, + 2.18224, + 2.1435, + 2.17603, + 2.15526, + 2.1304, + 2.20709, + 2.18242, + 2.15027, + 2.14324, + 2.09833, + 2.15787, + 2.14128, + 2.15722, + 2.15959, + 2.14152, + 2.11303, + 2.15528, + 2.12874, + 2.1691, + 2.14142, + 2.16002, + 2.13564, + 2.18092, + 2.1237, + 2.13545, + 2.1799, + 2.11508, + 2.17005, + 2.15555, + 2.13649, + 2.18644, + 2.14481, + 2.12481, + 2.1551, + 2.17603, + 2.15063, + 2.14463, + 2.15322, + 2.13888, + 2.18955, + 2.1411, + 2.1385, + 2.17802, + 2.13542, + 2.1122, + 2.17266, + 2.17851, + 2.15549, + 2.13408, + 2.20421, + 2.13562, + 2.15649, + 2.15881, + 2.19176, + 2.15035, + 2.13407, + 2.14567, + 2.12894, + 2.16356, + 2.16619, + 2.12354, + 2.14846, + 2.20499, + 2.1389, + 2.16522, + 2.15953, + 2.15477, + 2.15727, + 2.19714, + 2.15108, + 2.14206, + 2.16066, + 2.13943, + 2.14169, + 2.18057, + 2.13551, + 2.12831, + 2.13071, + 2.14196, + 2.13044, + 2.18136, + 2.10889, + 2.12414, + 2.19109, + 2.16546, + 2.18773, + 2.13547, + 2.16083, + 2.18022, + 2.14795, + 2.17396, + 2.10588, + 2.12427, + 2.12749, + 2.12347, + 2.15269, + 2.18424, + 2.14573, + 2.14703, + 2.13611, + 2.15795, + 2.1821, + 2.16556, + 2.15056, + 2.16884, + 2.17452, + 2.15566, + 2.21328, + 2.17278, + 2.17519, + 2.16571, + 2.1399, + 2.16979, + 2.13628, + 2.17498, + 2.1204, + 2.15026, + 2.12603, + 2.09256, + 2.14643, + 2.15502, + 2.1398, + 2.15093, + 2.15223, + 2.11917, + 2.11081, + 2.12487, + 2.16432, + 2.19761, + 2.14192, + 2.12507, + 2.17512, + 2.12675, + 2.16028, + 2.12278, + 2.12922, + 2.16038, + 2.13129, + 2.13976, + 2.18631, + 2.17397, + 2.1719, + 2.10918, + 2.1478, + 2.12241, + 2.11817, + 2.15051, + 2.15309, + 2.16145, + 2.18298, + 2.1235, + 2.15533, + 2.1015, + 2.15264, + 2.14779, + 2.17558, + 2.15286, + 2.12246, + 2.1501, + 2.12337, + 2.10704, + 2.13134, + 2.13875, + 2.1724, + 2.15847, + 2.1754, + 2.20537, + 2.14859, + 2.10505, + 2.18816, + 2.15697, + 2.11379, + 2.17665, + 2.19676, + 2.14547, + 2.16752, + 2.13933, + 2.10497, + 2.14235, + 2.15259, + 2.20198, + 2.15815, + 2.12113, + 2.17258, + 2.1393, + 2.18587, + 2.18401, + 2.1481, + 2.14819, + 2.14024, + 2.15066, + 2.1931, + 2.12552, + 2.15896, + 2.15269, + 2.14712, + 2.12475, + 2.16896, + 2.19778, + 2.11973, + 2.15823, + 2.12269, + 2.12657, + 2.18053, + 2.15969, + 2.11706, + 2.17419, + 2.14332, + 2.16049, + 2.13311, + 2.13373, + 2.13287, + 2.14466, + 2.17073, + 2.1071, + 2.12988, + 2.15317, + 2.11705, + 2.18387, + 2.15329, + 2.13113, + 2.14519, + 2.16273, + 2.17392, + 2.13245, + 2.13181, + 2.12544, + 2.12304, + 2.14373, + 2.12895, + 2.13535, + 2.10019, + 2.11673, + 2.16796, + 2.17526, + 2.13149, + 2.15821, + 2.15149, + 2.17532, + 2.15254, + 2.1792, + 2.15382, + 2.14168, + 2.12947, + 2.14378, + 2.11026, + 2.1463, + 2.11073, + 2.16429, + 2.13961, + 2.14526, + 2.145, + 2.1292, + 2.17569, + 2.14336, + 2.12586, + 2.11564, + 2.10945, + 2.09574, + 2.1605, + 2.06541, + 2.08923, + 2.1536, + 2.15675, + 2.15756, + 2.15221, + 2.11654, + 2.09414, + 2.15359, + 2.14945, + 2.19247, + 2.2086, + 2.1524, + 2.12773, + 2.11537, + 2.16917, + 2.14242, + 2.1687, + 2.16485, + 2.13634, + 2.12918, + 2.18365, + 2.13184, + 2.15899, + 2.1137, + 2.12214, + 2.18438, + 2.15794, + 2.14757, + 2.13727, + 2.13519, + 2.13067, + 2.12917, + 2.20241, + 2.15023, + 2.15943, + 2.10862, + 2.14876, + 2.12743, + 2.11215, + 2.1511, + 2.16237, + 2.14047, + 2.1679, + 2.12271, + 2.15025, + 2.13631, + 2.20571, + 2.19753, + 2.14288, + 2.11593, + 2.14586, + 2.1686, + 2.16167, + 2.17502, + 2.1598, + 2.13971, + 2.14051, + 2.14591, + 2.16499, + 2.13742, + 2.15453, + 2.14428, + 2.1608, + 2.14889, + 2.18583, + 2.13641, + 2.13277, + 2.14166, + 2.15512, + 2.15919, + 2.13826, + 2.15096, + 2.10243, + 2.17726, + 2.14828, + 2.11285, + 2.08004, + 2.19972, + 2.15404, + 2.1549, + 2.14774, + 2.13622, + 2.18258, + 2.13337, + 2.13691, + 2.09698, + 2.10574, + 2.07041, + 2.11714, + 2.15966, + 2.1434, + 2.14961, + 2.16407, + 2.13485, + 2.14685, + 2.13795, + 2.12783, + 2.15433, + 2.14052, + 2.11499, + 2.18707, + 2.08661, + 2.15031, + 2.16191, + 2.1359, + 2.15534, + 2.14626, + 2.14863, + 2.09642, + 2.15842, + 2.13321, + 2.09741, + 2.14506, + 2.12362, + 2.15864, + 2.15367, + 2.14453, + 2.13714, + 2.1852, + 2.17823, + 2.11649, + 2.16059, + 2.17538, + 2.13047, + 2.13616, + 2.12843, + 2.1325, + 2.13113, + 2.16936, + 2.17611, + 2.1622, + 2.14872, + 2.15427, + 2.13773, + 2.12847, + 2.17956, + 2.15279, + 2.15907, + 2.10202, + 2.14785, + 2.14136, + 2.12219, + 2.21453, + 2.11577, + 2.11785, + 2.11679, + 2.13401, + 2.16964, + 2.1625, + 2.11449, + 2.13659, + 2.15537, + 2.14511, + 2.18554, + 2.11938, + 2.15888, + 2.09792, + 2.11497, + 2.16297, + 2.12793, + 2.14915, + 2.15409, + 2.1209, + 2.126, + 2.15876, + 2.12757, + 2.15004, + 2.1345, + 2.15863, + 2.15566, + 2.13833, + 2.14925, + 2.12641, + 2.11506, + 2.11002, + 2.1566, + 2.14197, + 2.16008, + 2.171, + 2.13219, + 2.07883, + 2.17414, + 2.16646, + 2.1378, + 2.10163, + 2.16187, + 2.14982, + 2.14212, + 2.12183, + 2.16118, + 2.13278, + 2.11654, + 2.15606, + 2.12797, + 2.15195, + 2.16103, + 2.13524, + 2.1127, + 2.10853, + 2.16037, + 2.16548, + 2.12941, + 2.15271, + 2.128, + 2.14691, + 2.1531, + 2.14288, + 2.11602, + 2.13625, + 2.1645, + 2.1247, + 2.13002, + 2.14192, + 2.14065, + 2.12201, + 2.15277, + 2.14152, + 2.12588, + 2.1381, + 2.10044, + 2.0971, + 2.11362, + 2.16036, + 2.07572, + 2.12287, + 2.08336, + 2.11981, + 2.1393, + 2.13454, + 2.154, + 2.13134, + 2.11328, + 2.17916, + 2.14391, + 2.10069, + 2.13245, + 2.12376, + 2.11351, + 2.1648, + 2.13686, + 2.15799, + 2.13904, + 2.10644, + 2.17069, + 2.14798, + 2.10086, + 2.09552, + 2.13783, + 2.15006, + 2.13494, + 2.14858, + 2.1251, + 2.11427, + 2.1392, + 2.14423, + 2.14112, + 2.15466, + 2.15798, + 2.17466, + 2.16386, + 2.12757, + 2.12756, + 2.15743, + 2.10727, + 2.12206, + 2.13068, + 2.12223, + 2.10918, + 2.15578, + 2.14467, + 2.18922, + 2.15394, + 2.13647, + 2.11757, + 2.10205, + 2.17508, + 2.09667, + 2.14446, + 2.12437, + 2.17102, + 2.14774, + 2.11769, + 2.14057, + 2.13977, + 2.09259, + 2.15701, + 2.16077, + 2.14913, + 2.17288, + 2.1291, + 2.15624, + 2.11118, + 2.13231, + 2.16968, + 2.1208, + 2.11092, + 2.16226, + 2.12082, + 2.14614, + 2.10623, + 2.11773, + 2.15973, + 2.12062, + 2.13321, + 2.15715, + 2.14495, + 2.1589, + 2.14069, + 2.1613, + 2.12504, + 2.12785, + 2.12885, + 2.14716, + 2.11997, + 2.15063, + 2.09857, + 2.18048, + 2.13415, + 2.17341, + 2.15859, + 2.07841, + 2.12244, + 2.13578, + 2.13529, + 2.16938, + 2.1109, + 2.11925, + 2.11978, + 2.14577, + 2.11446, + 2.12308, + 2.14315, + 2.12418, + 2.14294, + 2.13201, + 2.10578, + 2.14674, + 2.10201, + 2.08084, + 2.12001, + 2.14528, + 2.15191, + 2.096, + 2.16344, + 2.13121, + 2.1168, + 2.12897, + 2.13217, + 2.12261, + 2.15454, + 2.13586, + 2.11344, + 2.09113, + 2.15731, + 2.1405, + 2.14345, + 2.12429, + 2.12968, + 2.16037, + 2.14905, + 2.15994, + 2.1025, + 2.11934, + 2.15013, + 2.13912, + 2.14427, + 2.12326, + 2.10742, + 2.12134, + 2.13744, + 2.11586, + 2.12168, + 2.12857, + 2.13691, + 2.14273, + 2.16435, + 2.10422, + 2.1458, + 2.16813, + 2.14692, + 2.20062, + 2.17576, + 2.14458, + 2.11456, + 2.12866, + 2.12219, + 2.15308, + 2.1507, + 2.13089, + 2.13425, + 2.10742, + 2.15812, + 2.10186, + 2.15158, + 2.11522, + 2.13366, + 2.17783, + 2.14005, + 2.1074, + 2.13303, + 2.16557, + 2.15353, + 2.14727, + 2.14482, + 2.12907, + 2.1367, + 2.0958, + 2.19732, + 2.1302, + 2.10973, + 2.11186, + 2.09534, + 2.10555, + 2.14491, + 2.10266, + 2.10855, + 2.13505, + 2.10934, + 2.16454, + 2.12085, + 2.15301, + 2.11765, + 2.12203, + 2.18237, + 2.1192, + 2.13733, + 2.12486, + 2.09151, + 2.11155, + 2.0887, + 2.17401, + 2.13992, + 2.0968, + 2.10769, + 2.11379, + 2.0973, + 2.10247, + 2.15915, + 2.11487, + 2.14736, + 2.14101, + 2.14616, + 2.14289, + 2.13903, + 2.13197, + 2.15247, + 2.12995, + 2.13098, + 2.13927, + 2.14692, + 2.12584, + 2.13742, + 2.13819, + 2.09272, + 2.16369, + 2.15652, + 2.09399, + 2.11422, + 2.14504, + 2.13595, + 2.13671, + 2.08331, + 2.11127, + 2.11109, + 2.13885, + 2.15761, + 2.12756, + 2.10646, + 2.14351, + 2.14136, + 2.11515, + 2.19241, + 2.14724, + 2.1274, + 2.13709, + 2.08703, + 2.14885, + 2.12014, + 2.16028, + 2.17462, + 2.15985, + 2.15086, + 2.07889, + 2.16239, + 2.16469, + 2.13942, + 2.14833, + 2.16525, + 2.14259, + 2.09163, + 2.12822, + 2.10628, + 2.1295, + 2.10122, + 2.15351, + 2.09208, + 2.12217, + 2.16483, + 2.17132, + 2.11989, + 2.12316, + 2.1472, + 2.13168, + 2.11733, + 2.14168, + 2.12105, + 2.14129, + 2.17993, + 2.13225, + 2.07964, + 2.11673, + 2.14105, + 2.09523, + 2.13991, + 2.15539, + 2.15128, + 2.13309, + 2.1369, + 2.13354, + 2.17112, + 2.15367, + 2.147, + 2.0939, + 2.1154, + 2.09244, + 2.13682, + 2.09806, + 2.15336, + 2.1249, + 2.19452, + 2.10983, + 2.13569, + 2.12787, + 2.12638, + 2.16562, + 2.13821, + 2.12211, + 2.15735, + 2.16246, + 2.10059, + 2.12032, + 2.16401, + 2.11724, + 2.14455, + 2.11602, + 2.12884, + 2.11726, + 2.11161, + 2.10856, + 2.16043, + 2.16838, + 2.12763, + 2.06264, + 2.15302, + 2.09871, + 2.11288, + 2.13553, + 2.13927, + 2.08679, + 2.14425, + 2.08739, + 2.16027, + 2.14356, + 2.16138, + 2.15372, + 2.12475, + 2.15504, + 2.09912, + 2.14585, + 2.1539, + 2.13996, + 2.13376, + 2.15666, + 2.1299, + 2.13238, + 2.14714, + 2.19044, + 2.14854, + 2.09799, + 2.1407, + 2.1023, + 2.15021, + 2.13617, + 2.16581, + 2.10285, + 2.19494, + 2.10484, + 2.13345, + 2.14567, + 2.07702, + 2.08405, + 2.07326, + 2.09146, + 2.11902, + 2.11167, + 2.1217, + 2.09706, + 2.14488, + 2.11704, + 2.0996, + 2.08097, + 2.07112, + 2.17382, + 2.12444, + 2.10869, + 2.164, + 2.13685, + 2.12602, + 2.20311, + 2.12207, + 2.11325, + 2.11537, + 2.13821, + 2.09633, + 2.09056, + 2.14613, + 2.11923, + 2.11545, + 2.13484, + 2.13176, + 2.14141, + 2.13717, + 2.15433, + 2.14202, + 2.12038, + 2.14132, + 2.13697, + 2.10718, + 2.09575, + 2.13511, + 2.20415, + 2.12288, + 2.14532, + 2.13973, + 2.1258, + 2.12495, + 2.12046, + 2.06811, + 2.12291, + 2.11604, + 2.133, + 2.13206, + 2.07625, + 2.11847, + 2.14044, + 2.1602, + 2.16309, + 2.11402, + 2.12931, + 2.07008, + 2.13251, + 2.158, + 2.13183, + 2.1634, + 2.1325, + 2.12666, + 2.15075, + 2.13771, + 2.14219, + 2.16178, + 2.10999, + 2.09355, + 2.13606, + 2.1254, + 2.13008, + 2.08332, + 2.09319, + 2.1455, + 2.15135, + 2.08544, + 2.1019, + 2.14624, + 2.1386, + 2.12851, + 2.10116, + 2.12626, + 2.13105, + 2.12674, + 2.06435, + 2.14981, + 2.17129, + 2.15069, + 2.16403, + 2.1575, + 2.11257, + 2.10956, + 2.11528, + 2.13845, + 2.19023, + 2.13474, + 2.13675, + 2.12664, + 2.14095, + 2.15984, + 2.10423, + 2.0908, + 2.12528, + 2.10923, + 2.11423, + 2.11958, + 2.09942, + 2.13341, + 2.11148, + 2.10526, + 2.15261, + 2.13529, + 2.14097, + 2.15094, + 2.12972, + 2.204, + 2.12836, + 2.12394, + 2.1151, + 2.15144, + 2.10724, + 2.11371, + 2.12975, + 2.14764, + 2.15406, + 2.12679, + 2.08869, + 2.15042, + 2.16364, + 2.15197, + 2.12345, + 2.17082, + 2.09034, + 2.1198, + 2.10084, + 2.10022, + 2.16285, + 2.1127, + 2.10041, + 2.11871, + 2.13117, + 2.17531, + 2.08691, + 2.12119, + 2.12321, + 2.12252, + 2.11914, + 2.17076, + 2.10024, + 2.18026, + 2.14997, + 2.10633, + 2.14082, + 2.12084, + 2.11463, + 2.12312, + 2.10347, + 2.1411, + 2.14489, + 2.17147, + 2.12559, + 2.13432, + 2.1252, + 2.13558, + 2.13908, + 2.12064, + 2.16734, + 2.13117, + 2.14784, + 2.15857, + 2.07776, + 2.12282, + 2.11873, + 2.12931, + 2.16235, + 2.09879, + 2.15145, + 2.1097, + 2.15042, + 2.12923, + 2.09661, + 2.13729, + 2.14603, + 2.11508, + 2.18448, + 2.1229, + 2.08814, + 2.13324, + 2.09298, + 2.13496, + 2.12788, + 2.15963, + 2.11915, + 2.11895, + 2.12609, + 2.16517, + 2.13608, + 2.15671, + 2.13001, + 2.10013, + 2.13949, + 2.09961, + 2.12101, + 2.10723, + 2.14995, + 2.11762, + 2.08444, + 2.11677, + 2.13214, + 2.13759, + 2.08617, + 2.13768, + 2.13102, + 2.13721, + 2.15962, + 2.10742, + 2.1464, + 2.05929, + 2.10806, + 2.11787, + 2.16273, + 2.11305, + 2.16384, + 2.15355, + 2.10869, + 2.13537, + 2.14837, + 2.16141, + 2.13606, + 2.10124, + 2.13188, + 2.06957, + 2.11403, + 2.12015, + 2.15244, + 2.10179, + 2.14778, + 2.12238, + 2.12861, + 2.15167, + 2.11764, + 2.07566, + 2.13554, + 2.08535, + 2.09653, + 2.14127, + 2.14071, + 2.10669, + 2.10432, + 2.12018, + 2.15847, + 2.13214, + 2.11683, + 2.15219, + 2.10277, + 2.13042, + 2.12034, + 2.1305, + 2.11818, + 2.14986, + 2.13434, + 2.09921, + 2.11563, + 2.10621, + 2.16086, + 2.07591, + 2.14026, + 2.11138, + 2.11651, + 2.12129, + 2.08909, + 2.11175, + 2.11858, + 2.16817, + 2.11622, + 2.12497, + 2.11645, + 2.14495, + 2.12897, + 2.18526, + 2.1571, + 2.06612, + 2.1242, + 2.15393, + 2.09111, + 2.13338, + 2.08573, + 2.13219, + 2.08022, + 2.12921, + 2.14583, + 2.15382, + 2.10098, + 2.16352, + 2.09266, + 2.11296, + 2.10211, + 2.12356, + 2.10632, + 2.15062, + 2.1133, + 2.05318, + 2.07741, + 2.12523, + 2.15546, + 2.09871, + 2.09293, + 2.12083, + 2.0634, + 2.10695, + 2.10082, + 2.15409, + 2.10191, + 2.07679, + 2.13902, + 2.12054, + 2.09775, + 2.12566, + 2.09976, + 2.17294, + 2.12212, + 2.10969, + 2.1456, + 2.10956, + 2.14115, + 2.07354, + 2.09288, + 2.17899, + 2.11109, + 2.13575, + 2.10959, + 2.13087, + 2.11922, + 2.11476, + 2.13057, + 2.12971, + 2.12459, + 2.09732, + 2.15032, + 2.15937, + 2.10389, + 2.07533, + 2.13555, + 2.15993, + 2.10788, + 2.11589, + 2.10041, + 2.15404, + 2.13819, + 2.11697, + 2.15609, + 2.07536, + 2.14239, + 2.10889, + 2.11872, + 2.13676, + 2.07537, + 2.10057, + 2.11005, + 2.13239, + 2.13643, + 2.13379, + 2.12937, + 2.11956, + 2.12347, + 2.14751, + 2.13515, + 2.08635, + 2.11372, + 2.12764, + 2.11331, + 2.09815, + 2.16169, + 2.1294, + 2.07462, + 2.15084, + 2.09465, + 2.10461, + 2.14416, + 2.10061, + 2.13416, + 2.10871, + 2.09109, + 2.11927, + 2.11376, + 2.13506, + 2.114, + 2.12858, + 2.15939, + 2.14913, + 2.12104, + 2.07058, + 2.14289, + 2.08265, + 2.10936, + 2.12421, + 2.13815, + 2.07507, + 2.07438, + 2.10296, + 2.11125, + 2.13978, + 2.13133, + 2.08702, + 2.13097, + 2.12554, + 2.10635, + 2.12904, + 2.0986, + 2.13401, + 2.11475, + 2.05525, + 2.14885, + 2.10068, + 2.09608, + 2.08114, + 2.15445, + 2.09412, + 2.09664, + 2.12456, + 2.11972, + 2.13619, + 2.07422, + 2.09634, + 2.11995, + 2.12491, + 2.11723, + 2.12706, + 2.17577, + 2.10708, + 2.1082, + 2.11677, + 2.10888, + 2.13541, + 2.1357, + 2.13654, + 2.12372, + 2.14267, + 2.13379, + 2.11061, + 2.15776, + 2.15161, + 2.11233, + 2.10773, + 2.17656, + 2.13284, + 2.12617, + 2.13198, + 2.09776, + 2.16109, + 2.08808, + 2.12486, + 2.10091, + 2.16138, + 2.10994, + 2.11885, + 2.12378, + 2.1068, + 2.09454, + 2.16945, + 2.12941, + 2.13118, + 2.13681, + 2.16167, + 2.12075, + 2.12694, + 2.13344, + 2.13967, + 2.12363, + 2.11535, + 2.1651, + 2.14632, + 2.12474, + 2.06898, + 2.13082, + 2.11901, + 2.15611, + 2.1464, + 2.03165, + 2.08027, + 2.10214, + 2.12521, + 2.14, + 2.11837, + 2.10147, + 2.13277, + 2.12028, + 2.09445, + 2.1167, + 2.13634, + 2.10006, + 2.17484, + 2.12453, + 2.11144, + 2.11177, + 2.16744, + 2.10927, + 2.13011, + 2.13117, + 2.08708, + 2.12338, + 2.09207, + 2.12983, + 2.15543, + 2.13593, + 2.11644, + 2.13614, + 2.06936, + 2.11767, + 2.09445, + 2.08572, + 2.04617, + 2.07572, + 2.14163, + 2.12959, + 2.12245, + 2.10332, + 2.08094, + 2.16742, + 2.09652, + 2.1244, + 2.12968, + 2.09317, + 2.09495, + 2.11962, + 2.10697, + 2.10106, + 2.11928, + 2.09643, + 2.09243, + 2.10836, + 2.07506, + 2.141, + 2.13225, + 2.15296, + 2.14151, + 2.10743, + 2.12744, + 2.08643, + 2.12159, + 2.10431, + 2.10518, + 2.11548, + 2.11631, + 2.07122, + 2.12841, + 2.11104, + 2.11764, + 2.09864, + 2.11084, + 2.10014, + 2.09829, + 2.10811, + 2.11446, + 2.08514, + 2.1241, + 2.11801, + 2.0888, + 2.10191, + 2.13017, + 2.1545, + 2.1242, + 2.14145, + 2.16895, + 2.11764, + 2.10961, + 2.11497, + 2.14018, + 2.10707, + 2.12064, + 2.06262, + 2.15222, + 2.05648, + 2.13124, + 2.15045, + 2.09856, + 2.12098, + 2.12981, + 2.13198, + 2.13146, + 2.09995, + 2.1001, + 2.10395, + 2.13484, + 2.12694, + 2.08743, + 2.12023, + 2.14864, + 2.06431, + 2.08203, + 2.15431, + 2.11286, + 2.13785, + 2.0816, + 2.10192, + 2.1506, + 2.12361, + 2.11034, + 2.13463, + 2.10949, + 2.12632, + 2.1106, + 2.11229, + 2.08576, + 2.10276, + 2.11446, + 2.08502, + 2.11351, + 2.11681, + 2.10703, + 2.10942, + 2.11885, + 2.07337, + 2.12339, + 2.12607, + 2.1437, + 2.08066, + 2.06564, + 2.08298, + 2.11386, + 2.10682, + 2.11614, + 2.10963, + 2.13148, + 2.13286, + 2.11243, + 2.14162, + 2.12778, + 2.11204, + 2.12253, + 2.10511, + 2.12222, + 2.12427, + 2.15609, + 2.14073, + 2.13066, + 2.14891, + 2.12669, + 2.08032, + 2.13433, + 2.11958, + 2.08221, + 2.10916, + 2.11759, + 2.12546, + 2.1518, + 2.07901, + 2.09401, + 2.07808, + 2.11145, + 2.12341, + 2.10106, + 2.10531, + 2.13069, + 2.1434, + 2.13794, + 2.16192, + 2.07508, + 2.11797, + 2.13529, + 2.10403, + 2.13338, + 2.13366, + 2.12011, + 2.05895, + 2.0716, + 2.0597, + 2.07533, + 2.12394, + 2.11978, + 2.12056, + 2.06924, + 2.0976, + 2.15041, + 2.13613, + 2.13843, + 2.11515, + 2.13574, + 2.10205, + 2.15061, + 2.12921, + 2.10048, + 2.10676, + 2.09159, + 2.09054, + 2.08774, + 2.06093, + 2.12782, + 2.15822, + 2.12412, + 2.08823, + 2.09272, + 2.11678, + 2.14332, + 2.07751, + 2.12043, + 2.08454, + 2.14403, + 2.11099, + 2.13415, + 2.07512, + 2.10526, + 2.07389, + 2.13267, + 2.12823, + 2.1187, + 2.10399, + 2.13999, + 2.12038, + 2.10244, + 2.11416, + 2.129, + 2.05703, + 2.13245, + 2.09012, + 2.11193, + 2.09967, + 2.07517, + 2.12811, + 2.07754, + 2.13667, + 2.0951, + 2.11273, + 2.05618, + 2.08921, + 2.10328, + 2.10338, + 2.09137, + 2.09173, + 2.09196, + 2.11489, + 2.12982, + 2.06772, + 2.1091, + 2.13695, + 2.11228, + 2.11951, + 2.09841, + 2.11137, + 2.08431, + 2.0898, + 2.13988, + 2.09352, + 2.10312, + 2.0918, + 2.10846, + 2.1241, + 2.13521, + 2.15834, + 2.09346, + 2.14654, + 2.09146, + 2.08112, + 2.12833, + 2.11276, + 2.12485, + 2.08905, + 2.12139, + 2.1109, + 2.07209, + 2.12524, + 2.1062, + 2.13338, + 2.08104, + 2.07972, + 2.08936, + 2.10514, + 2.12007, + 2.12876, + 2.11723, + 2.12914, + 2.06868, + 2.13917, + 2.11006, + 2.0935, + 2.10336, + 2.09448, + 2.09145, + 2.14443, + 2.15021, + 2.09144, + 2.10503, + 2.14318, + 2.13801, + 2.13771, + 2.09664, + 2.11501, + 2.08786, + 2.11933, + 2.1477, + 2.13008, + 2.06345, + 2.14155, + 2.10104, + 2.14792, + 2.05715, + 2.07627, + 2.08398, + 2.12317, + 2.11179, + 2.12101, + 2.11606, + 2.09071, + 2.1399, + 2.14734, + 2.08778, + 2.12659, + 2.12182, + 2.11069, + 2.09773, + 2.11628, + 2.08056, + 2.08237, + 2.09016, + 2.15391, + 2.13262, + 2.09606, + 2.08911, + 2.08678, + 2.10113, + 2.15873, + 2.14982, + 2.10031, + 2.11483, + 2.10779, + 2.13252, + 2.11626, + 2.07458, + 2.12195, + 2.11838, + 2.12959, + 2.13684, + 2.09786, + 2.13904, + 2.05383, + 2.07324, + 2.13238, + 2.1138, + 2.11165, + 2.12821, + 2.10453, + 2.11739, + 2.11394, + 2.13778, + 2.0874, + 2.14923, + 2.11272, + 2.1279, + 2.08104, + 2.12068, + 2.11258, + 2.11384, + 2.1267, + 2.09091, + 2.07756, + 2.10071, + 2.11853, + 2.08749, + 2.0726, + 2.10001, + 2.08683, + 2.10189, + 2.08502, + 2.11206, + 2.10132, + 2.07062, + 2.07365, + 2.13976, + 2.14154, + 2.09614, + 2.12489, + 2.0534, + 2.1255, + 2.10354, + 2.15257, + 2.09167, + 2.12771, + 2.06365, + 2.12279, + 2.11131, + 2.10315, + 2.09638, + 2.08308, + 2.08333, + 2.11307, + 2.12531, + 2.06867, + 2.12496, + 2.09003, + 2.08744, + 2.0781, + 2.11351, + 2.09749, + 2.07593, + 2.10754, + 2.13223, + 2.12003, + 2.06586, + 2.08621, + 2.12735, + 2.09583, + 2.13008, + 2.0555, + 2.10769, + 2.10081, + 2.11721, + 2.06302, + 2.13119, + 2.12083, + 2.11165, + 2.09268, + 2.10496, + 2.11994, + 2.11616, + 2.08321, + 2.09395, + 2.16731, + 2.13324, + 2.11008, + 2.07513, + 2.08977, + 2.12273, + 2.1233, + 2.09491, + 2.10289, + 2.09581, + 2.15202, + 2.16457, + 2.11798, + 2.08213, + 2.13186, + 2.09785, + 2.10765, + 2.07158, + 2.0899, + 2.07773, + 2.14113, + 2.12476, + 2.08195, + 2.10936, + 2.10005, + 2.09888, + 2.11338, + 2.0933, + 2.09341, + 2.11639, + 2.04614, + 2.11035, + 2.1301, + 2.08054, + 2.09444, + 2.08091, + 2.08747, + 2.11433, + 2.13895, + 2.10705, + 2.07562, + 2.07226, + 2.10148, + 2.15024, + 2.11434, + 2.06023, + 2.07196, + 2.11569, + 2.09421, + 2.09298, + 2.09988, + 2.08383, + 2.07322, + 2.13877, + 2.11709, + 2.13649, + 2.12004, + 2.06492, + 2.12048, + 2.11403, + 2.16053, + 2.08674, + 2.0955, + 2.13564, + 2.12813, + 2.13836, + 2.11898, + 2.14418, + 2.10257, + 2.11769, + 2.13768, + 2.08631, + 2.09455, + 2.13141, + 2.12651, + 2.11755, + 2.1414, + 2.07892, + 2.12062, + 2.14163, + 2.08833, + 2.14985, + 2.09376, + 2.09854, + 2.10568, + 2.14831, + 2.12432, + 2.10148, + 2.11514, + 2.11799, + 2.09074, + 2.1197, + 2.1008, + 2.0856, + 2.06021, + 2.12791, + 2.11561, + 2.11732, + 2.10805, + 2.16139, + 2.11307, + 2.14837, + 2.09035, + 2.07087, + 2.14392, + 2.09591, + 2.16261, + 2.08851, + 2.10044, + 2.10339, + 2.10714, + 2.11473, + 2.11843, + 2.10266, + 2.07589, + 2.11279, + 2.09033, + 2.09018, + 2.08776, + 2.07187, + 2.10077, + 2.10954, + 2.12362, + 2.08484, + 2.06242, + 2.11832, + 2.07617, + 2.12252, + 2.07673, + 2.10073, + 2.12055, + 2.13108, + 2.10141, + 2.1013, + 2.1014, + 2.0863, + 2.0718, + 2.13587, + 2.12499, + 2.13068, + 2.06545, + 2.09513, + 2.07889, + 2.17369, + 2.0759, + 2.0885, + 2.12179, + 2.07394, + 2.09281, + 2.12555, + 2.14409, + 2.15114, + 2.09911, + 2.09519, + 2.10427, + 2.11671, + 2.08025, + 2.11687, + 2.12165, + 2.15528, + 2.13336, + 2.10307, + 2.10802, + 2.1218, + 2.13321, + 2.12381, + 2.11331, + 2.09482, + 2.10773, + 2.1257, + 2.07556, + 2.11358, + 2.10751, + 2.06882, + 2.05805, + 2.08193, + 2.10255, + 2.07801, + 2.08132, + 2.1468, + 2.10781, + 2.09078, + 2.07265, + 2.11251, + 2.06315, + 2.06435, + 2.0838, + 2.12704, + 2.12125, + 2.08087, + 2.13601, + 2.11782, + 2.18322, + 2.07759, + 2.14069, + 2.08429, + 2.1083, + 2.11871, + 2.10031, + 2.11009, + 2.0868, + 2.13371, + 2.07737, + 2.12017, + 2.08144, + 2.08542, + 2.12768, + 2.12329, + 2.08165, + 2.1272, + 2.09461, + 2.07084, + 2.11717, + 2.11355, + 2.12159, + 2.09206, + 2.10753, + 2.1116, + 2.07853, + 2.08872, + 2.07564, + 2.10415, + 2.11043, + 2.11595, + 2.05921, + 2.12238, + 2.12328, + 2.11678, + 2.13414, + 2.09832, + 2.08917, + 2.12924, + 2.11915, + 2.09456, + 2.06224, + 2.03989, + 2.07821, + 2.11258, + 2.14929, + 2.13938, + 2.09378, + 2.08923, + 2.11, + 2.08066, + 2.15451, + 2.08156, + 2.11604, + 2.0825, + 2.07727, + 2.11463, + 2.1101, + 2.12005, + 2.09527, + 2.11972, + 2.10338, + 2.0687, + 2.1318, + 2.09968, + 2.12424, + 2.11699, + 2.11668, + 2.14419, + 2.09684, + 2.07291, + 2.10689, + 2.16584, + 2.12895, + 2.08678, + 2.12026, + 2.09961, + 2.10727, + 2.10949, + 2.11564, + 2.07362, + 2.12375, + 2.12939, + 2.12892, + 2.11434, + 2.08391, + 2.07212, + 2.07275, + 2.11164, + 2.11699, + 2.13929, + 2.11223, + 2.08741, + 2.11102, + 2.1009, + 2.10989, + 2.11307, + 2.14255, + 2.11393, + 2.13517, + 2.08341, + 2.11825, + 2.08546, + 2.11817, + 2.13208, + 2.07079, + 2.13205, + 2.08414, + 2.11257, + 2.13451, + 2.1108, + 2.09831, + 2.08831, + 2.08729, + 2.06947, + 2.08118, + 2.08767, + 2.11563, + 2.09644, + 2.04334, + 2.12443, + 2.13064, + 2.12222, + 2.07376, + 2.11338, + 2.1736, + 2.10076, + 2.12504, + 2.09981, + 2.0578, + 2.134, + 2.05841, + 2.08409, + 2.07333, + 2.11904, + 2.09613, + 2.1266, + 2.08319, + 2.07251, + 2.11888, + 2.11518, + 2.09644, + 2.09095, + 2.0872, + 2.13848, + 2.09329, + 2.12522, + 2.07199, + 2.11443, + 2.09806, + 2.09901, + 2.09912, + 2.07986, + 2.10542, + 2.10272, + 2.08071, + 2.03468, + 2.07142, + 2.10676, + 2.08268, + 2.11796, + 2.15024, + 2.11453, + 2.08275, + 2.11696, + 2.1121, + 2.12007, + 2.13844, + 2.11073, + 2.12585, + 2.09291, + 2.0869, + 2.10618, + 2.14689, + 2.0572, + 2.07937, + 2.09769, + 2.08711, + 2.12352, + 2.13705, + 2.06396, + 2.06662, + 2.11741, + 2.11427, + 2.09697, + 2.12242, + 2.10404, + 2.0908, + 2.10502, + 2.13244, + 2.09946, + 2.08945, + 2.09207, + 2.11426, + 2.10195, + 2.10053, + 2.12573, + 2.10787, + 2.12336, + 2.11744, + 2.16376, + 2.09421, + 2.10713, + 2.10873, + 2.10399, + 2.09671, + 2.14448, + 2.11368, + 2.11137, + 2.12794, + 2.09345, + 2.07993, + 2.09353, + 2.1353, + 2.07683, + 2.14869, + 2.12875, + 2.10985, + 2.12085, + 2.12578, + 2.13341, + 2.1506, + 2.05588, + 2.09691, + 2.07182, + 2.11074, + 2.10709, + 2.08353, + 2.10224, + 2.06379, + 2.07051, + 2.15362, + 2.14883, + 2.10474, + 2.09605, + 2.06507, + 2.13121, + 2.08565, + 2.06157, + 2.14989, + 2.11239, + 2.09184, + 2.07691, + 2.1221, + 2.11453, + 2.13135, + 2.0867, + 2.12618, + 2.10653, + 2.09454, + 2.11055, + 2.10394, + 2.08926, + 2.09062, + 2.15596, + 2.07366, + 2.11278, + 2.11281, + 2.12233, + 2.08198, + 2.08886, + 2.1312, + 2.09677, + 2.12645, + 2.09053, + 2.09718, + 2.09884, + 2.05802, + 2.12267, + 2.09611, + 2.06892, + 2.10247, + 2.0762, + 2.11294, + 2.09648, + 2.11359, + 2.15232, + 2.13229, + 2.0702, + 2.07866, + 2.10046, + 2.10429, + 2.09281, + 2.1315, + 2.1109, + 2.09301, + 2.10549, + 2.10657, + 2.09745, + 2.13504, + 2.11206, + 2.10896, + 2.14843, + 2.11963, + 2.10256, + 2.12147, + 2.13472, + 2.117, + 2.09738, + 2.08622, + 2.09252, + 2.10513, + 2.09914, + 2.1102, + 2.06442, + 2.05393, + 2.08168, + 2.03913, + 2.09554, + 2.08629, + 2.09063, + 2.12508, + 2.07225, + 2.06854, + 2.05302, + 2.09105, + 2.1214, + 2.10876, + 2.09394, + 2.0956, + 2.05083, + 2.09024, + 2.1158, + 2.15934, + 2.10935, + 2.1017, + 2.09887, + 2.13087, + 2.07785, + 2.09765, + 2.09515, + 2.09899, + 2.10794, + 2.12655, + 2.08188, + 2.06948, + 2.13929, + 2.12565, + 2.07834, + 2.1058, + 2.09906, + 2.09578, + 2.12554, + 2.10065, + 2.12746, + 2.08023, + 2.10634, + 2.06705, + 2.10967, + 2.11299, + 2.1159, + 2.0979, + 2.09583, + 2.10956, + 2.11091, + 2.03435, + 2.09957, + 2.08369, + 2.11715, + 2.07702, + 2.08847, + 2.08936, + 2.06742, + 2.09019, + 2.09049, + 2.07393, + 2.08663, + 2.10092, + 2.07598, + 2.13575, + 2.07353, + 2.05579, + 2.08095, + 2.08603, + 2.09461, + 2.1235, + 2.10835, + 2.11546, + 2.12794, + 2.10496, + 2.12038, + 2.10848, + 2.14159, + 2.11848, + 2.10548, + 2.1425, + 2.07753, + 2.08298, + 2.08193, + 2.1401, + 2.12869, + 2.09304, + 2.09545, + 2.08905, + 2.0913, + 2.10591, + 2.10817, + 2.0929, + 2.11388, + 2.09366, + 2.09369, + 2.11619, + 2.08436, + 2.08665, + 2.08296, + 2.0783, + 2.12029, + 2.0928, + 2.09283, + 2.0758, + 2.06405, + 2.10967, + 2.06319, + 2.11376, + 2.09905, + 2.10321, + 2.11421, + 2.08737, + 2.0839, + 2.06127, + 2.0786, + 2.08094, + 2.06156, + 2.07044, + 2.07309, + 2.09068, + 2.11273, + 2.10504, + 2.12958, + 2.0734, + 2.09605, + 2.11075, + 2.12188, + 2.10795, + 2.10053, + 2.09981, + 2.0841, + 2.11238, + 2.1097, + 2.09085, + 2.09674, + 2.11239, + 2.06268, + 2.09582, + 2.08951, + 2.10861, + 2.11918, + 2.10825, + 2.10718, + 2.08195, + 2.0988, + 2.0972, + 2.07257, + 2.0534, + 2.1524, + 2.14386, + 2.0627, + 2.09987, + 2.09707, + 2.06023, + 2.10267, + 2.07888, + 2.11552, + 2.06462, + 2.09833, + 2.10547, + 2.12373, + 2.09718, + 2.07981, + 2.09191, + 2.09982, + 2.08857, + 2.12091, + 2.07948, + 2.10531, + 2.11187, + 2.08332, + 2.02219, + 2.09977, + 2.10686, + 2.12528, + 2.06267, + 2.09681, + 2.07973, + 2.10868, + 2.10611, + 2.11642, + 2.07076, + 2.06207, + 2.08076, + 2.06705, + 2.10703, + 2.04267, + 2.09917, + 2.06978, + 2.09134, + 2.11487, + 2.07782, + 2.14321, + 2.0874, + 2.07928, + 2.10786, + 2.07804, + 2.11087, + 2.11718, + 2.07965, + 2.11609, + 2.09812, + 2.0933, + 2.06185, + 2.08572, + 2.07693, + 2.08531, + 2.1196, + 2.08382, + 2.09419, + 2.11851, + 2.12256, + 2.07704, + 2.08892, + 2.0857, + 2.11729, + 2.11258, + 2.08314, + 2.07861, + 2.10291, + 2.07943, + 2.10687, + 2.14702, + 2.09533, + 2.05637, + 2.12697, + 2.08087, + 2.16349, + 2.08352, + 2.08133, + 2.06009, + 2.0746, + 2.11259, + 2.12606, + 2.10411, + 2.09402, + 2.09521, + 2.12929, + 2.11751, + 2.05863, + 2.11136, + 2.07442, + 2.11697, + 2.11331, + 2.07639, + 2.09011, + 2.10535, + 2.0959, + 2.10974, + 2.10441, + 2.12313, + 2.12, + 2.10566, + 2.06719, + 2.0681, + 2.0305, + 2.11669, + 2.09149, + 2.07944, + 2.09889, + 2.0962, + 2.10209, + 2.10019, + 2.07214, + 2.09813, + 2.1024, + 2.12443, + 2.07027, + 2.09208, + 2.10762, + 2.09267, + 2.09957, + 2.11318, + 2.07418, + 2.07919, + 2.10222, + 2.06574, + 2.07709, + 2.11575, + 2.09319, + 2.10793, + 2.09194, + 2.13396, + 2.0968, + 2.08733, + 2.09404, + 2.08597, + 2.08676, + 2.13163, + 2.08519, + 2.10102, + 2.08714, + 2.09944, + 2.11455, + 2.12718, + 2.07525, + 2.12318, + 2.1073, + 2.14625, + 2.09465, + 2.06911, + 2.10687, + 2.09853, + 2.12161, + 2.0934, + 2.0618, + 2.06444, + 2.10858, + 2.06591, + 2.09175, + 2.05045, + 2.09561, + 2.10733, + 2.06683, + 2.0895, + 2.07788, + 2.08878, + 2.08881, + 2.09343, + 2.10234, + 2.10432, + 2.05548, + 2.11979, + 2.13852, + 2.10609, + 2.06919, + 2.08711, + 2.09877, + 2.10447, + 2.07354, + 2.1028, + 2.04471, + 2.11842, + 2.11724, + 2.10107, + 2.07089, + 2.06712, + 2.05558, + 2.05996, + 2.18266, + 2.12735, + 2.04893, + 2.11742, + 2.10346, + 2.09354, + 2.13741, + 2.05657, + 2.09517, + 2.08737, + 2.12482, + 2.07638, + 2.04745, + 2.11341, + 2.09005, + 2.13207, + 2.09965, + 2.14064, + 2.10264, + 2.06801, + 2.08266, + 2.10203, + 2.06392, + 2.10268, + 2.05567, + 2.11455, + 2.07179, + 2.09775, + 2.0813, + 2.11424, + 2.08782, + 2.0959, + 2.14368, + 2.11275, + 2.13281, + 2.04383, + 2.08707, + 2.09902, + 2.11258, + 2.06034, + 2.09194, + 2.05059, + 2.07638, + 2.08818, + 2.1151, + 2.08768, + 2.10977, + 2.10541, + 2.07258, + 2.06794, + 2.11237, + 2.0858, + 2.16095, + 2.13367, + 2.15316, + 2.07624, + 2.13384, + 2.10182, + 2.09083, + 2.09443, + 2.11665, + 2.12159, + 2.06844, + 2.10805, + 2.09698, + 2.11764, + 2.05752, + 2.06101, + 2.09712, + 2.15138, + 2.09315, + 2.1476, + 2.0992, + 2.10949, + 2.09798, + 2.11826, + 2.04555, + 2.09322, + 2.08421, + 2.09839, + 2.07979, + 2.10109, + 2.0957, + 2.08068, + 2.1366, + 2.13502, + 2.05187, + 2.11725, + 2.09857, + 2.10659, + 2.12293, + 2.06406, + 2.08669, + 2.09868, + 2.0906, + 2.0313, + 2.11945, + 2.04933, + 2.06667, + 2.10354, + 2.11594, + 2.12276, + 2.16091, + 2.13829, + 2.05014, + 2.08296, + 2.13385, + 2.10876, + 2.073, + 2.14426, + 2.14419, + 2.12245, + 2.08536, + 2.04344, + 2.09313, + 2.07499, + 2.11034, + 2.08844, + 2.09579, + 2.04232, + 2.02866, + 2.09838, + 2.10088, + 2.09163, + 2.13497, + 2.11638, + 2.09761, + 2.10215, + 2.09704, + 2.08768, + 2.07743, + 2.10841, + 2.05139, + 2.0958, + 2.09852, + 2.04167, + 2.09325, + 2.06652, + 2.08253, + 2.10495, + 2.08861, + 2.10549, + 2.1082, + 2.08944, + 2.12531, + 2.05851, + 2.10046, + 2.09875, + 2.10216, + 2.0999, + 2.05823, + 2.08969, + 2.08372, + 2.07472, + 2.0925, + 2.12, + 2.09712, + 2.04483, + 2.08306, + 2.14129, + 2.09718, + 2.06585, + 2.06543, + 2.12429, + 2.08928, + 2.05799, + 2.083, + 2.06025, + 2.07825, + 2.13556, + 2.14149, + 2.10207, + 2.10597, + 2.09636, + 2.11855, + 2.08618, + 2.08455, + 2.06983, + 2.09598, + 2.09511, + 2.10834, + 2.06031, + 2.07017, + 2.09965, + 2.0806, + 2.05791, + 2.0494, + 2.14306, + 2.08732, + 2.13002, + 2.12531, + 2.07959, + 2.07394, + 2.05711, + 2.09396, + 2.13426, + 2.07273, + 2.08209, + 2.09963, + 2.10569, + 2.07571, + 2.07713, + 2.07359, + 2.077, + 2.09296, + 2.11698, + 2.11433, + 2.15538, + 2.12141, + 2.09701, + 2.06529, + 2.08679, + 2.07281, + 2.09007, + 2.08507, + 2.10782, + 2.07234, + 2.09165, + 2.05352, + 2.05664, + 2.09603, + 2.09925, + 2.13805, + 2.10218, + 2.09074, + 2.11966, + 2.11393, + 2.13612, + 2.0295, + 2.06639, + 2.10488, + 2.13164, + 2.10598, + 2.10302, + 2.03461, + 2.08115, + 2.08521, + 2.14027, + 2.07098, + 2.07341, + 2.08796, + 2.07977, + 2.10679, + 2.08379, + 2.10401, + 2.06856, + 2.12346, + 2.1077, + 2.08288, + 2.05438, + 2.09745, + 2.10725, + 2.10592, + 2.06763, + 2.0627, + 2.09889, + 2.09544, + 2.05868, + 2.1224, + 2.09809, + 2.10655, + 2.13555, + 2.06655, + 2.1095, + 2.1434, + 2.12952, + 2.07114, + 2.09688, + 2.05278, + 2.10734, + 2.1207, + 2.09302, + 2.04689, + 2.09876, + 2.07933, + 2.08186, + 2.08031, + 2.10035, + 2.05869, + 2.1056, + 2.08951, + 2.0591, + 2.07628, + 2.09412, + 2.08192, + 2.06388, + 2.10212, + 2.10531, + 2.07814, + 2.07004, + 2.10413, + 2.12098, + 2.12568, + 2.10982, + 2.09327, + 2.08941, + 2.17485, + 2.11135, + 2.11555, + 2.10964, + 2.09866, + 2.05464, + 2.12883, + 2.12335, + 2.0632, + 2.10092, + 2.06457, + 2.10065, + 2.09129, + 2.07436, + 2.09219, + 2.0903, + 2.12306, + 2.05879, + 2.09461, + 2.08791, + 2.0932, + 2.107, + 2.12141, + 2.10174, + 2.08455, + 2.10446, + 2.0589, + 2.08861, + 2.09538, + 2.06244, + 2.12129, + 2.04785, + 2.10927, + 2.07907, + 2.08957, + 2.06641, + 2.09543, + 2.09624, + 2.06308, + 2.06983, + 2.09502, + 2.0673, + 2.09205, + 2.08403, + 2.0743, + 2.10818, + 2.07747, + 2.07768, + 2.06761, + 2.10385, + 2.08824, + 2.09295, + 2.11088, + 2.1162, + 2.12279, + 2.10406, + 2.06693, + 2.09472, + 2.10743, + 2.12754, + 2.04905, + 2.10957, + 2.05826, + 2.10684, + 2.06485, + 2.10718, + 2.07938, + 2.11882, + 2.10898, + 2.06888, + 2.05873, + 2.07172, + 2.10595, + 2.07307, + 2.10964, + 2.1244, + 2.08716, + 2.07816, + 2.06458, + 2.10505, + 2.0868, + 2.07527, + 2.06643, + 2.10857, + 2.09433, + 2.07548, + 2.12231, + 2.10679, + 2.1301, + 2.07847, + 2.10072, + 2.07385, + 2.07359, + 2.09019, + 2.08324, + 2.10433, + 2.10947, + 2.06253, + 2.13539, + 2.07343, + 2.05194, + 2.09756, + 2.08743, + 2.06763, + 2.08374, + 2.07282, + 2.09699, + 2.09435, + 2.05835, + 2.07964, + 2.02119, + 2.07815, + 2.10801, + 2.10046, + 2.10966, + 2.09726, + 2.11314, + 2.05466, + 2.08073, + 2.14106, + 2.06047, + 2.10951, + 2.09, + 2.11125, + 2.08879, + 2.06707, + 2.07183, + 2.0867, + 2.13009, + 2.08191, + 2.04381, + 2.11193, + 2.0715, + 2.07854, + 2.0421, + 2.08556, + 2.08938, + 2.07561, + 2.11215, + 2.14527, + 2.06868, + 2.11486, + 2.07242, + 2.12995, + 2.10319, + 2.10211, + 2.11666, + 2.09679, + 2.06133, + 2.09817, + 2.06243, + 2.1081, + 2.05099, + 2.0494, + 2.1311, + 2.10945, + 2.10221, + 2.09648, + 2.06595, + 2.06851, + 2.10172, + 2.08489, + 2.0322, + 2.08705, + 2.10071, + 2.09936, + 2.04936, + 2.10958, + 2.12478, + 2.09828, + 2.09245, + 2.07993, + 2.08409, + 2.12464, + 2.12218, + 2.11401, + 2.10059, + 2.08952, + 2.10188, + 2.12488, + 2.06727, + 2.13965, + 2.06252, + 2.05318, + 2.11949, + 2.08002, + 2.06681, + 2.08075, + 2.11239, + 2.08155, + 2.0781, + 2.08551, + 2.10294, + 2.09623, + 2.1116, + 2.12795, + 2.14226, + 2.1018, + 2.08956, + 2.08394, + 2.08378, + 2.09745, + 2.08278, + 2.05187, + 2.03201, + 2.12293, + 2.08458, + 2.08061, + 2.09901, + 2.08154, + 2.0693, + 2.08471, + 2.11249, + 2.08377, + 2.09548, + 2.07383, + 2.09053, + 2.10952, + 2.12585, + 2.05094, + 2.08438, + 2.07713, + 2.05305, + 2.07802, + 2.1183, + 2.07688, + 2.09514, + 2.05049, + 2.09273, + 2.09997, + 2.10551, + 2.0632, + 2.06938, + 2.06185, + 2.07321, + 2.10497, + 2.06888, + 2.03839, + 2.12977, + 2.10986, + 2.13385, + 2.087, + 2.03975, + 2.0583, + 2.07912, + 2.05545, + 2.08134, + 2.10043, + 2.0853, + 2.07958, + 2.05652, + 2.10452, + 2.05476, + 2.10687, + 2.09623, + 2.10474, + 2.11976, + 2.07815, + 2.07492, + 2.11689, + 2.13339, + 2.05766, + 2.10764, + 2.07703, + 2.08976, + 2.11237, + 2.08523, + 2.08433, + 2.03489, + 2.12074, + 2.0819, + 2.12938, + 2.08626, + 2.04672, + 2.04057, + 2.04352, + 2.06714, + 2.05572, + 2.10896, + 2.06512, + 2.06987, + 2.06589, + 2.06275, + 2.06563, + 2.08737, + 2.06706, + 2.09171, + 2.12159, + 2.0688, + 2.06997, + 2.12483, + 2.09286, + 2.10183, + 2.09763, + 2.08051, + 2.08133, + 2.08057, + 2.07328, + 2.10866, + 2.0682, + 2.07177, + 2.08688, + 2.09552, + 2.10886, + 2.08312, + 2.06387, + 2.10857, + 2.07828, + 2.09443, + 2.04866, + 2.05244, + 2.10254, + 2.06371, + 2.07301, + 2.08382, + 2.0516, + 2.09006, + 2.05821, + 2.11601, + 2.09929, + 2.1087, + 2.05414, + 2.06161, + 2.08538, + 2.06941, + 2.05073, + 2.07326, + 2.06644, + 2.05663, + 2.13895, + 2.11788, + 2.07981, + 2.05151, + 2.04575, + 2.13003, + 2.06948, + 2.05482, + 2.08719, + 2.12215, + 2.07701, + 2.12889, + 2.08601, + 2.0604, + 2.05012, + 2.0865, + 2.0683, + 2.06886, + 2.06661, + 2.02421, + 2.10141, + 2.10602, + 2.06811, + 2.08901, + 2.12167, + 2.06259, + 2.08304, + 2.07032, + 2.07062, + 2.09732, + 2.10641, + 2.0579, + 2.11326, + 2.08299, + 2.03471, + 2.06602, + 2.07125, + 2.11695, + 2.08697, + 2.08855, + 2.09645, + 2.10792, + 2.09512, + 2.07033, + 2.06452, + 2.09568, + 2.02708, + 2.0726, + 2.10019, + 2.06094, + 2.10202, + 2.08402, + 2.06983, + 2.08993, + 2.09638, + 2.05037, + 2.13457, + 2.07581, + 2.13575, + 2.12398, + 2.06613, + 2.11111, + 2.1145, + 2.08894, + 2.09376, + 2.12175, + 2.05732, + 2.13705, + 2.09908, + 2.0791, + 2.05217, + 2.1283, + 2.0691, + 2.08499, + 2.11142, + 2.08245, + 2.05724, + 2.05902, + 2.01511, + 2.08533, + 2.10273, + 2.07988, + 2.05698, + 2.08838, + 2.06698, + 2.09402, + 2.08717, + 2.04053, + 2.07462, + 2.04778, + 2.08019, + 2.0987, + 2.09668, + 2.15165, + 2.0697, + 2.11636, + 2.11334, + 2.07712, + 2.08999, + 2.03823, + 2.05859, + 2.09449, + 2.06406, + 2.07528, + 2.06655, + 2.08959, + 2.07137, + 2.08289, + 2.06845, + 2.07106, + 2.11089, + 2.05086, + 2.12127, + 2.05918, + 2.07549, + 2.12179, + 2.10599, + 2.13149, + 2.08887, + 2.08957, + 2.09877, + 2.08595, + 2.10023, + 2.0853, + 2.07759, + 2.11362, + 2.10138, + 2.08006, + 2.10543, + 2.10535, + 2.06143, + 2.07307, + 2.05596, + 2.10223, + 2.08959, + 2.08539, + 2.07365, + 2.06753, + 2.07256, + 2.12952, + 2.10517, + 2.10021, + 2.05825, + 2.08121, + 2.10933, + 2.049, + 2.05466, + 2.07098, + 2.07628, + 2.03626, + 2.05291, + 2.06655, + 2.07309, + 2.05568, + 2.14316, + 2.04853, + 2.07942, + 2.06593, + 2.06254, + 2.08289, + 2.08615, + 2.09532, + 2.11679, + 2.0649, + 2.09978, + 2.0762, + 2.08371, + 2.10591, + 2.03856, + 2.10787, + 2.09956, + 2.04981, + 2.07355, + 2.0518, + 2.11728, + 2.10659, + 2.04927, + 2.05877, + 2.12205, + 2.08597, + 2.12813, + 2.08334, + 2.10963, + 2.06045, + 2.02757, + 2.09841, + 2.08826, + 2.11186, + 2.05326, + 2.07644, + 2.08052, + 2.08743, + 2.09356, + 2.09012, + 2.10745, + 2.06707, + 2.0733, + 2.06414, + 2.09557, + 2.07098, + 2.11413, + 2.05894, + 2.11377, + 2.06735, + 2.1064, + 2.04679, + 2.07763, + 2.07354, + 2.09175, + 2.09248, + 2.07801, + 2.09581, + 2.09903, + 2.09648, + 2.08277, + 2.09021, + 2.10057, + 2.08105, + 2.08443, + 2.08643, + 2.05964, + 2.05119, + 2.09015, + 2.03249, + 2.05187, + 2.11343, + 2.07439, + 2.07955, + 2.07078, + 2.07819, + 2.08596, + 2.08629, + 2.06124, + 2.13248, + 2.0839, + 2.11169, + 2.09365, + 2.05932, + 2.14305, + 2.10147, + 2.10506, + 2.0836, + 2.04407, + 2.11549, + 2.0569, + 2.08803, + 2.03878, + 2.07207, + 2.12435, + 2.08074, + 2.05453, + 2.09376, + 2.11245, + 2.13387, + 2.05021, + 2.06261, + 2.08147, + 2.10192, + 2.05371, + 2.09035, + 2.09981, + 2.11283, + 2.07552, + 2.05011, + 2.08086, + 2.08791, + 2.09109, + 2.09478, + 2.08687, + 2.06774, + 2.06801, + 2.05868, + 2.03571, + 2.08034, + 2.08834, + 2.04109, + 2.09595, + 2.08277, + 2.05391, + 2.09077, + 2.10114, + 2.10108, + 2.04491, + 2.02705, + 2.10968, + 2.05859, + 2.11915, + 2.06795, + 2.04326, + 2.11999, + 2.06947, + 2.0688, + 2.09242, + 2.05161, + 2.09139, + 2.07867, + 2.11192, + 2.06297, + 2.0617, + 2.12802, + 2.06317, + 2.07201, + 2.07335, + 2.06238, + 2.07271, + 2.08971, + 2.06031, + 2.10149, + 2.06412, + 2.10306, + 2.14723, + 2.08575, + 2.05408, + 2.12632, + 2.07604, + 2.10745, + 2.09542, + 2.0623, + 2.11184, + 2.08896, + 2.09458, + 2.08551, + 2.07229, + 2.0738, + 2.08896, + 2.04936, + 2.07808, + 2.08666, + 2.05616, + 2.08514, + 2.08047, + 2.09825, + 2.02671, + 2.07708, + 2.07179, + 2.07423, + 2.12159, + 2.07667, + 2.08858, + 2.12131, + 2.0846, + 2.0895, + 2.11982, + 2.09272, + 2.0967, + 2.10081, + 2.08209, + 2.11958, + 2.08962, + 2.04527, + 2.08881, + 2.06814, + 2.0639, + 2.01201, + 2.07051, + 2.06076, + 2.07664, + 2.02808, + 2.10331, + 2.07758, + 2.09839, + 2.14935, + 2.09953, + 2.13459, + 2.04503, + 2.07972, + 2.08001, + 2.11964, + 2.0841, + 2.1039, + 2.07457, + 2.06021, + 2.09136, + 2.06603, + 2.06455, + 2.07861, + 2.0946, + 2.11661, + 2.08214, + 2.07236, + 2.04942, + 2.0918, + 2.11123, + 2.04044, + 2.06608, + 2.07055, + 2.04475, + 2.09647, + 2.08891, + 2.09564, + 2.09057, + 2.06203, + 2.08412, + 2.06771, + 2.10738, + 2.111, + 2.07876, + 2.10525, + 2.08044, + 2.08084, + 2.08596, + 2.11474, + 2.04799, + 2.08073, + 2.09498, + 2.03642, + 2.05626, + 2.06404, + 2.07853, + 2.0787, + 2.10622, + 2.10965, + 2.08003, + 2.0884, + 2.11147, + 2.04152, + 2.09926, + 2.08705, + 2.08691, + 2.08258, + 2.15522, + 2.04744, + 2.06077, + 2.06625, + 2.08346, + 2.01916, + 2.08161, + 2.06885, + 2.06217, + 2.05991, + 2.08466, + 2.0668, + 2.12277, + 2.10026, + 2.09785, + 2.13425, + 2.06195, + 2.08098, + 2.09011, + 2.1044, + 2.06869, + 2.10859, + 2.04348, + 2.07798, + 2.07843, + 2.11816, + 2.05896, + 2.08501, + 2.04687, + 2.1052, + 2.04771, + 2.06816, + 2.05366, + 2.07519, + 2.11421, + 2.04638, + 2.05439, + 2.07166, + 2.06867, + 2.08037, + 2.077, + 2.11032, + 2.05045, + 2.10494, + 2.05387, + 2.08535, + 2.1083, + 2.07564, + 2.04316, + 2.06658, + 2.09089, + 2.09009, + 2.10344, + 2.06899, + 2.0881, + 2.05215, + 2.07387, + 2.06798, + 2.09136, + 2.05706, + 2.03709, + 2.13512, + 2.0706, + 2.09118, + 2.08415, + 2.08297, + 2.09499, + 2.08912, + 2.08797, + 2.13359, + 2.06446, + 2.09476, + 2.07187, + 2.0788, + 2.06457, + 2.06219, + 2.07307, + 2.05628, + 2.04264, + 2.09273, + 2.08226, + 2.06731, + 2.08178, + 2.11459, + 2.09887, + 2.07403, + 2.08059, + 2.08739, + 2.04807, + 2.08743, + 2.0474, + 2.10141, + 2.0685, + 2.06352, + 2.04894, + 2.08403, + 2.12205, + 2.05783, + 2.07466, + 2.09815, + 2.05485, + 2.03991, + 2.06384, + 2.11639, + 2.08208, + 2.07266, + 2.08265, + 2.0591, + 2.06771, + 2.05732, + 2.13982, + 2.07704, + 2.04526, + 2.04001, + 2.09515, + 2.10646, + 2.06798, + 2.09709, + 2.06035, + 2.05816, + 2.09316, + 2.08389, + 2.10467, + 2.08073, + 2.05574, + 2.06912, + 2.09295, + 2.08362, + 2.01352, + 2.09536, + 2.06837, + 2.10496, + 2.07366, + 2.08966, + 2.07952, + 2.03814, + 2.03333, + 2.14111, + 2.05627, + 2.09667, + 2.05805, + 2.07048, + 2.07534, + 2.10524, + 2.05593, + 2.05148, + 2.10256, + 2.09606, + 2.03006, + 2.09898, + 2.09594, + 2.13432, + 2.07676, + 2.04125, + 2.0871, + 2.05073, + 2.09526, + 2.05568, + 2.08687, + 2.12749, + 2.0439, + 2.09112, + 2.12374, + 2.07473, + 2.04127, + 2.06902, + 2.06305, + 2.08114, + 2.12068, + 2.0489, + 2.03523, + 2.0712, + 2.03938, + 2.08268, + 2.04612, + 2.0983, + 2.07369, + 2.14825, + 2.08007, + 2.09702, + 2.09641, + 2.0741, + 2.08124, + 2.06221, + 2.09249, + 2.09425, + 2.09837, + 2.09091, + 2.0715, + 2.09521, + 2.07192, + 2.05742, + 2.07861, + 2.07327, + 2.07223, + 2.08697, + 2.06871, + 2.11655, + 2.11108, + 2.04672, + 2.08033, + 2.04286, + 2.07712, + 2.10259, + 2.05975, + 2.08855, + 2.05255, + 2.05195, + 2.06346, + 2.12159, + 2.0913, + 2.09612, + 2.06177, + 2.062, + 2.05142, + 2.08616, + 2.09952, + 2.05103, + 2.08964, + 2.08272, + 2.05511, + 2.10005, + 2.10844, + 2.11543, + 2.05338, + 2.0983, + 2.04775, + 2.07966, + 2.09714, + 2.08829, + 2.07721, + 2.04104, + 2.06485, + 2.08435, + 2.06925, + 2.04254, + 2.10755, + 2.12386, + 2.08613, + 2.09179, + 2.10993, + 2.06787, + 2.08731, + 2.0888, + 2.10631, + 2.06076, + 2.06641, + 2.06878, + 2.10194, + 2.0369, + 2.06487, + 2.07335, + 2.11526, + 2.10839, + 2.07693, + 2.09576, + 2.03904, + 2.07573, + 2.05599, + 2.08564, + 2.04587, + 2.08345, + 2.11384, + 2.06815, + 2.11204, + 2.09701, + 2.06959, + 2.05975, + 2.06271, + 2.07987, + 2.03438, + 2.02026, + 2.1091, + 2.01115, + 2.10765, + 2.07201, + 2.07531, + 2.09321, + 2.0907, + 2.06081, + 2.04693, + 2.07821, + 2.07435, + 2.10637, + 2.06875, + 2.09833, + 2.06236, + 2.09697, + 2.08328, + 2.08284, + 2.08317, + 2.08863, + 2.07185, + 2.04685, + 2.07006, + 2.08846, + 2.08496, + 2.05436, + 2.11998, + 2.05075, + 2.0581, + 2.02831, + 2.06315, + 2.0343, + 2.04784, + 2.04021, + 2.08055, + 2.0899, + 2.02951, + 2.11211, + 2.0732, + 2.0612, + 2.09649, + 2.11165, + 2.1091, + 2.08815, + 2.07245, + 2.05356, + 2.07355, + 2.04884, + 2.09897, + 2.12194, + 2.0959, + 2.07338, + 2.11015, + 2.10684, + 2.08965, + 2.05893, + 2.09282, + 2.09683, + 2.07195, + 2.08022, + 2.09747, + 2.0633, + 2.06675, + 2.04568, + 2.09471, + 2.05657, + 2.12949, + 2.09248, + 2.05523, + 2.05705, + 2.0681, + 2.11056, + 2.09744, + 2.06548, + 2.06289, + 2.07977, + 2.05106, + 2.08546, + 2.03567, + 2.03405, + 2.1159, + 2.06592, + 2.13672, + 2.06547, + 2.02872, + 2.07857, + 2.06797, + 2.09301, + 2.08979, + 2.04519, + 2.09267, + 2.02386, + 2.10066, + 2.05834, + 2.11306, + 2.08807, + 2.03376, + 2.06363, + 2.07743, + 2.10855, + 2.08777, + 2.05537, + 2.07145, + 2.10631, + 2.05601, + 2.05508, + 2.09123, + 2.10311, + 2.07929, + 2.1161, + 2.08299, + 2.10095, + 2.07158, + 2.05518, + 2.10988, + 2.0714, + 2.09, + 2.07644, + 2.08957, + 2.07559, + 2.08717, + 2.05797, + 2.04493, + 2.06979, + 2.0303, + 2.11279, + 2.06173, + 2.08649, + 2.09217, + 2.0717, + 2.07361, + 2.09312, + 2.10422, + 2.09161, + 2.06168, + 2.05155, + 2.11064, + 2.07019, + 2.04297, + 2.07233, + 2.09003, + 2.0214, + 2.05269, + 2.11527, + 2.03512, + 2.05921, + 2.08215, + 2.0993, + 2.04872, + 2.07001, + 2.0959, + 2.12354, + 2.08807, + 2.04736, + 2.06479, + 2.06382, + 2.11517, + 2.11688, + 2.03433, + 2.11025, + 2.09423, + 2.09858, + 2.12959, + 2.06117, + 2.07987, + 2.09105, + 2.10937, + 2.10648, + 2.08059, + 2.0805, + 2.06238, + 2.01031, + 2.1228, + 2.09327, + 2.0613, + 2.02498, + 2.08956, + 2.11001, + 2.07878, + 2.09466, + 2.10274, + 2.02658, + 2.12011, + 2.06768, + 2.06425, + 2.11235, + 2.08678, + 2.0983, + 2.06864, + 2.06021, + 2.08937, + 2.10728, + 2.1105, + 2.07406, + 2.09195, + 2.06193, + 2.05703, + 2.0821, + 2.07184, + 2.06265, + 2.06179, + 2.06258, + 2.03414, + 2.07447, + 2.11165, + 2.10368, + 2.08222, + 2.06034, + 2.09299, + 2.06639, + 2.0324, + 2.0877, + 2.09959, + 2.08443, + 2.04432, + 2.10967, + 2.11683, + 2.06221, + 2.10054, + 2.0798, + 2.09493, + 2.09083, + 2.08705, + 2.03815, + 2.07846, + 2.09124, + 2.10942, + 2.05648, + 2.08805, + 2.07112, + 2.04936, + 2.07101, + 2.10421, + 2.0818, + 2.08563, + 2.04788, + 2.11426, + 2.09575, + 2.08545, + 2.07318, + 2.10313, + 2.06514, + 2.04833, + 2.07918, + 2.07118, + 2.08761, + 2.07334, + 2.08246, + 2.07719, + 2.11151, + 2.08335, + 2.08137, + 2.10705, + 2.04507, + 2.094, + 2.06063, + 2.08394, + 2.07395, + 2.04107, + 2.09402, + 2.05912, + 2.06276, + 2.05562, + 2.08194, + 2.09197, + 2.03237, + 2.08978, + 2.07932, + 2.06838, + 2.11952, + 2.09376, + 2.05076, + 2.06075, + 2.09378, + 2.07295, + 2.07245, + 2.09441, + 2.0509, + 2.12568, + 2.07312, + 2.06425, + 2.08261, + 2.1046, + 2.10361, + 2.09654, + 2.08097, + 2.06297, + 2.07988, + 2.07039, + 2.10489, + 2.06451, + 2.07418, + 2.09753, + 2.07034, + 2.09364, + 2.05596, + 2.07561, + 2.07982, + 2.08376, + 2.0661, + 2.08775, + 2.05946, + 2.08205, + 2.09396, + 2.05477, + 2.07471, + 2.0555, + 2.06897, + 2.04614, + 2.03952, + 2.0747, + 2.09001, + 2.0183, + 2.02674, + 2.04552, + 2.07474, + 2.08825, + 2.04965, + 2.07348, + 2.09583, + 2.10536, + 2.088, + 2.09251, + 2.0852, + 2.06831, + 2.07849, + 2.07613, + 2.04917, + 2.05789, + 2.09694, + 2.0399, + 2.06253, + 2.01876, + 2.08518, + 2.10308, + 2.05429, + 2.08299, + 2.06004, + 2.02499, + 2.04802, + 2.02709, + 2.07147, + 2.07627, + 2.07057, + 2.03969, + 2.04239, + 2.09315, + 2.10108, + 2.10792, + 2.09723, + 2.05226, + 2.05174, + 2.07536, + 2.06491, + 2.0742, + 2.07106, + 2.0622, + 2.0667, + 2.0977, + 2.08766, + 2.08177, + 2.03357, + 2.09672, + 2.10537, + 2.04546, + 2.06886, + 2.07088, + 2.0942, + 2.084, + 2.08445, + 2.09584, + 2.06988, + 2.05098, + 2.0683, + 2.08299, + 2.0533, + 2.09987, + 2.09807, + 2.08093, + 2.09702, + 2.1107, + 2.08643, + 2.05762, + 2.05959, + 2.0522, + 2.03443, + 2.08717, + 2.11314, + 2.04909, + 2.07131, + 2.09459, + 2.11283, + 2.06813, + 2.08574, + 2.04717, + 2.07728, + 2.04941, + 2.07708, + 2.06748, + 2.08139, + 2.09414, + 2.08328, + 2.09451, + 2.03865, + 2.01092, + 2.06203, + 2.0759, + 2.06087, + 2.08099, + 2.05932, + 2.09506, + 2.08399, + 2.09903, + 2.06451, + 2.08355, + 2.07075, + 2.07816, + 2.09058, + 2.07034, + 2.06601, + 2.04449, + 2.05414, + 2.08353, + 2.05311, + 2.0926, + 2.07921, + 2.07332, + 2.07781, + 2.05381, + 2.09666, + 2.07943, + 2.08521, + 2.07927, + 2.13237, + 2.06252, + 2.03259, + 2.05256, + 2.06459, + 2.08257, + 2.08109, + 2.02265, + 2.07611, + 2.04178, + 2.03406, + 2.05848, + 2.07218, + 2.022, + 2.0741, + 2.08695, + 2.10449, + 2.08748, + 2.03154, + 2.06957, + 2.05772, + 2.06352, + 2.07712, + 2.07633, + 2.0476, + 2.095, + 2.07497, + 2.11, + 2.05855, + 2.05679, + 2.06296, + 2.11952, + 2.04389, + 2.01461, + 2.05332, + 2.09808, + 2.09688, + 2.07873, + 2.08474, + 2.04521, + 2.06892, + 2.0626, + 2.11122, + 2.06913, + 2.04477, + 2.08495, + 2.0841, + 2.11028, + 2.07752, + 2.08095, + 2.07349, + 2.06445, + 2.09024, + 2.08983, + 2.08029, + 2.07716, + 2.04518, + 2.07579, + 2.06677, + 2.03602, + 2.04712, + 2.04221, + 2.11063, + 2.074, + 2.09985, + 2.09698, + 2.10368, + 2.08202, + 2.09736, + 2.08315, + 2.06055, + 2.02932, + 2.06129, + 2.12731, + 2.06857, + 2.04041, + 2.09405, + 2.04375, + 2.09501, + 2.08089, + 2.03934, + 2.07517, + 2.08621, + 2.02647, + 2.07262, + 2.06782, + 2.08082, + 2.01646, + 2.10592, + 2.09469, + 2.0787, + 2.07126, + 2.05826, + 2.06572, + 2.11188, + 2.03812, + 2.05959, + 2.04282, + 2.11179, + 2.08053, + 2.07824, + 2.07045, + 2.07447, + 2.07614, + 2.06566, + 2.11008, + 2.07341, + 2.04886, + 2.06936, + 2.02935, + 2.07037, + 2.06631, + 2.05797, + 2.08815, + 2.02614, + 2.10452, + 2.10405, + 2.06925, + 2.09142, + 2.06891, + 2.07501, + 2.02991, + 2.08181, + 2.06432, + 2.1097, + 2.10621, + 2.11628, + 2.07979, + 2.04662, + 2.06314, + 2.05579, + 2.08932, + 2.0844, + 2.03566, + 2.07489, + 2.06528, + 2.09113, + 2.07292, + 2.08534, + 2.09153, + 2.00248, + 2.08949, + 2.09018, + 2.08023, + 2.02429, + 2.10397, + 2.05376, + 2.0944, + 2.08502, + 2.08701, + 2.05415, + 2.07793, + 2.08653, + 2.11732, + 2.06127, + 2.09374, + 2.01291, + 2.07747, + 2.09672, + 2.10731, + 2.0676, + 2.06539, + 2.06253, + 2.04147, + 2.04148, + 2.06516, + 2.0866, + 2.0439, + 2.06518, + 2.03176, + 2.06022, + 2.07628, + 2.10906, + 2.07795, + 2.08238, + 2.05263, + 2.04501, + 2.04578, + 2.05974, + 2.07929, + 2.03826, + 2.03811, + 2.08098, + 2.08204, + 2.06672, + 2.10597, + 2.01384, + 2.07135, + 2.02871, + 2.09301, + 2.09987, + 2.10714, + 2.07693, + 2.1019, + 2.0469, + 2.04736, + 2.01895, + 2.0625, + 2.07069, + 2.05596, + 2.08074, + 2.09343, + 2.06839, + 2.08167, + 2.07656, + 2.07342, + 2.08039, + 2.04495, + 2.06596, + 2.06165, + 2.09712, + 2.04277, + 2.07644, + 2.07413, + 2.04887, + 2.03502, + 2.13111, + 2.08508, + 2.00005, + 2.06236, + 2.07033, + 2.09669, + 2.09403, + 2.04191, + 2.05961, + 2.06106, + 2.10675, + 2.07416, + 2.04197, + 2.07654, + 2.10383, + 2.09884, + 2.0673, + 2.10688, + 2.06403, + 2.08151, + 2.05666, + 2.06854, + 2.07167, + 2.09647, + 2.08684, + 2.02248, + 2.06082, + 2.09339, + 2.02371, + 2.07545, + 2.0813, + 2.08717, + 2.06585, + 2.05875, + 2.08995, + 2.078, + 2.10113, + 2.0617, + 2.05961, + 2.03764, + 2.11098, + 2.0631, + 2.0997, + 2.02369, + 2.088, + 2.06504, + 2.02063, + 2.04333, + 2.09234, + 2.03768, + 2.06992, + 2.07984, + 2.03296, + 2.06035, + 2.06925, + 2.07467, + 2.05863, + 2.04196, + 2.0705, + 2.08661, + 2.11827, + 2.08325, + 2.0509, + 2.08374, + 2.11546, + 2.08848, + 2.08377, + 2.02237, + 2.06809, + 2.06717, + 2.06612, + 2.09177, + 2.11161, + 2.08595, + 2.0652, + 2.12189, + 2.11844, + 2.06706, + 2.08807, + 2.05416, + 2.06017, + 2.05851, + 2.04156, + 2.05711, + 2.09344, + 2.08676, + 2.09291, + 2.00789, + 2.06745, + 2.11207, + 2.06548, + 2.04166, + 2.09161, + 2.0741, + 2.03587, + 2.07542, + 2.06881, + 2.04148, + 2.07547, + 2.08548, + 2.08202, + 2.0744, + 2.07063, + 2.11084, + 2.06949, + 2.04703, + 2.05149, + 2.04564, + 2.04473, + 2.05258, + 2.08828, + 2.09724, + 2.08835, + 2.107, + 2.08063, + 2.01908, + 2.09219, + 2.09228, + 2.03252, + 2.09815, + 2.06588, + 2.11376, + 2.07592, + 2.08393, + 2.065, + 2.1193, + 2.0821, + 2.07037, + 2.07218, + 2.05314, + 2.09861, + 2.06275, + 2.05085, + 2.07715, + 2.0724, + 2.07403, + 2.05647, + 2.08492, + 2.07734, + 2.08386, + 2.06479, + 2.09125, + 2.1146, + 2.10814, + 2.07879, + 2.04318, + 2.03921, + 2.09372, + 2.01558, + 2.09331, + 2.0616, + 2.03819, + 2.08418, + 2.06862, + 2.11498, + 2.08314, + 2.06934, + 2.08743, + 2.04098, + 2.11315, + 2.09814, + 2.07877, + 2.0425, + 2.08685, + 2.04016, + 2.06854, + 2.05003, + 2.10174, + 2.08306, + 1.99888, + 2.07582, + 2.05837, + 2.04002, + 2.07468, + 2.073, + 2.06512, + 2.10677, + 2.07408, + 2.07757, + 2.09105, + 2.08195, + 2.10606, + 2.04807, + 2.05125, + 2.11798, + 2.05167, + 2.05773, + 2.04953, + 2.06488, + 2.05727, + 2.08435, + 2.0997, + 2.03705, + 2.05103, + 2.08619, + 2.0252, + 2.08752, + 2.10921, + 2.04601, + 2.02898, + 2.09664, + 2.03456, + 2.0785, + 2.0657, + 2.02014, + 2.09206, + 2.0554, + 2.08262, + 2.03325, + 2.08073, + 2.06443, + 2.03291, + 2.11252, + 2.08177, + 2.04144, + 2.09611, + 2.07816, + 2.09593, + 2.10515, + 2.06409, + 2.08925, + 2.05736, + 2.06693, + 2.10318, + 2.05381, + 2.07481, + 2.06401, + 2.05767, + 2.05422, + 2.07506, + 2.05545, + 2.06652, + 2.0884, + 2.07451, + 2.10113, + 2.05598, + 2.0523, + 2.08356, + 2.06443, + 2.06185, + 2.06997, + 2.09839, + 2.0423, + 2.04142, + 2.1195, + 2.0569, + 2.08862, + 2.07529, + 2.04833, + 2.09108, + 2.10455, + 2.0952, + 2.05292, + 2.07192, + 2.08117, + 2.12158, + 2.04384, + 2.05562, + 2.02166, + 2.03748, + 2.08783, + 2.0634, + 2.07965, + 2.05098, + 2.04682, + 2.10202, + 2.08528, + 2.04323, + 2.06446, + 2.03978, + 2.07718, + 2.07222, + 2.06177, + 2.06228, + 2.09846, + 2.10678, + 2.09355, + 2.0482, + 2.05602, + 2.07892, + 1.99858, + 2.10278, + 2.05747, + 2.08472, + 2.07448, + 2.00511, + 2.05635, + 2.05797, + 2.05232, + 2.09237, + 2.07376, + 2.06226, + 2.12422, + 2.06089, + 2.07038, + 2.07775, + 2.06823, + 2.11149, + 2.08014, + 2.04319, + 2.0248, + 2.0414, + 2.0474, + 2.03549, + 2.07151, + 2.07864, + 2.06277, + 2.08794, + 2.07528, + 2.01345, + 2.05544, + 2.02518, + 2.09435, + 2.08207, + 2.06672, + 2.0443, + 2.07141, + 2.04331, + 2.09558, + 2.05631, + 2.07703, + 2.06419, + 2.03431, + 2.07911, + 2.06894, + 2.05369, + 2.05697, + 2.06419, + 2.03767, + 2.10536, + 2.05649, + 2.03733, + 2.07269, + 2.09182, + 2.05047, + 2.02289, + 2.10384, + 2.07654, + 2.03778, + 2.06016, + 2.12357, + 2.10756, + 2.09351, + 2.07295, + 2.08712, + 2.05478, + 2.05937, + 2.08481, + 2.10536, + 2.09487, + 2.10054, + 2.09837, + 2.04571, + 2.08385, + 2.01669, + 2.06679, + 2.04007, + 2.04358, + 2.09403, + 2.04523, + 2.08494, + 2.08541, + 2.06179, + 2.08437, + 2.0925, + 2.1116, + 2.08138, + 2.04169, + 2.07468, + 2.03849, + 2.04533, + 2.07702, + 2.07217, + 2.10779, + 2.0796, + 2.11877, + 2.11224, + 2.08886, + 2.03036, + 2.08859, + 2.0684, + 2.1023, + 2.09056, + 2.0574, + 2.06598, + 2.04513, + 2.03187, + 2.13824, + 2.06769, + 2.0694, + 2.06701, + 2.04639, + 2.09635, + 2.06166, + 2.05073, + 2.09575, + 2.06235, + 2.02933, + 2.07925, + 2.09975, + 2.0758, + 2.08538, + 2.06306, + 2.0477, + 2.06234, + 2.06836, + 2.06186, + 2.06892, + 2.09477, + 2.11532, + 2.0585, + 2.11146, + 2.07557, + 2.0838, + 2.10376, + 2.08768, + 2.05489, + 2.08115, + 2.02263, + 2.10584, + 2.08003, + 2.05209, + 2.02933, + 2.01476, + 2.08208, + 2.06255, + 2.06305, + 2.06576, + 2.05781, + 2.09639, + 2.0864, + 2.02354, + 2.04121, + 2.07383, + 2.06523, + 2.07341, + 2.04069, + 2.07805, + 2.08764, + 2.04878, + 2.08479, + 2.04466, + 2.04325, + 2.02903, + 2.0638, + 2.05099, + 2.09189, + 2.07382, + 2.04222, + 2.06531, + 2.1341, + 2.0746, + 2.06006, + 2.02114, + 2.10314, + 2.07141, + 2.04396, + 2.0596, + 2.04019, + 2.05566, + 2.04833, + 2.04427, + 2.09751, + 2.08477, + 2.10528, + 2.07315, + 2.06632, + 2.07494, + 2.04671, + 2.01532, + 2.02963, + 2.08672, + 2.10224, + 2.06265, + 2.04386, + 2.04765, + 2.05239, + 2.09169, + 2.06093, + 2.04906, + 2.02777, + 2.09424, + 2.0721, + 2.1243, + 2.08666, + 2.02798, + 2.08581, + 2.05828, + 2.09805, + 2.05381, + 2.06521, + 2.02771, + 2.06363, + 2.11276, + 2.04973, + 2.0927, + 2.08452, + 2.06374, + 2.03925, + 2.07391, + 2.08942, + 2.07363, + 2.0893, + 2.08564, + 2.1284, + 2.04209, + 2.07609, + 2.05801, + 2.05208, + 2.09921, + 2.08537, + 2.06907, + 2.07798, + 2.09951, + 2.05666, + 2.04942, + 2.0579, + 2.07992, + 2.03813, + 2.07502, + 2.07117, + 2.06719, + 2.07157, + 2.03792, + 2.04382, + 2.10435, + 2.02939, + 2.06829, + 2.08719, + 2.08453, + 2.09189, + 2.08162, + 2.0465, + 2.04253, + 2.05715, + 2.04819, + 2.10555, + 2.0963, + 2.05777, + 2.09406, + 2.04671, + 2.07063, + 2.06687, + 2.05201, + 2.06319, + 2.04476, + 2.07859, + 2.028, + 2.00129, + 2.04064, + 2.08684, + 2.02701, + 2.09431, + 2.04182, + 2.06478, + 2.0467, + 2.06311, + 2.08791, + 2.04562, + 2.09362, + 2.08938, + 2.03436, + 2.09585, + 2.12644, + 2.05605, + 2.06859, + 2.02657, + 2.10927, + 2.01744, + 2.04359, + 2.0508, + 2.05605, + 2.07272, + 2.0363, + 2.0717, + 2.00094, + 2.02956, + 2.08888, + 2.07499, + 2.05193, + 2.08895, + 2.11596, + 2.04701, + 2.04703, + 2.09, + 2.07431, + 2.0648, + 2.08833, + 2.02336, + 2.10007, + 2.05656, + 2.09339, + 2.06891, + 2.09231, + 2.06401, + 2.04668, + 2.04483, + 2.09012, + 2.05707, + 2.09578, + 2.10391, + 2.04064, + 2.03733, + 2.02623, + 2.0671, + 2.06169, + 2.06432, + 2.0463, + 2.0466, + 2.09942, + 2.07183, + 2.07705, + 2.05066, + 2.06322, + 2.0874, + 2.06993, + 2.06947, + 2.10037, + 2.02194, + 2.07165, + 2.07551, + 2.11145, + 2.06394, + 2.10103, + 2.05326, + 2.03447, + 2.06941, + 2.0457, + 2.07419, + 2.07523, + 2.08091, + 2.04321, + 2.0873, + 2.07493, + 2.1031, + 2.08907, + 2.10676, + 2.08984, + 2.02682, + 2.05795, + 2.0798, + 2.05243, + 2.12404, + 2.05006, + 2.05595, + 2.05828, + 2.05913, + 2.06077, + 2.05207, + 2.03966, + 2.12969, + 2.06691, + 2.09157, + 2.0473, + 2.07587, + 2.08864, + 2.08304, + 2.06687, + 2.09101, + 2.06481, + 2.07672, + 2.07299, + 2.0734, + 2.08786, + 2.09514, + 2.05356, + 2.03455, + 2.03673, + 2.0726, + 2.06357, + 2.05524, + 2.07212, + 2.06986, + 2.04086, + 2.04801, + 2.06381, + 2.04744, + 2.09731, + 2.04987, + 2.01524, + 2.02156, + 2.04468, + 2.02319, + 2.02415, + 2.05538, + 2.05843, + 2.05963, + 2.06679, + 2.05293, + 2.08778, + 2.0709, + 2.07126, + 2.05035, + 2.09885, + 2.08363, + 2.11965, + 2.06864, + 2.05459, + 2.03544, + 2.05616, + 2.08001, + 2.07057, + 2.05053, + 2.05617, + 2.06429, + 2.08439, + 2.0665, + 2.07114, + 2.03091, + 2.03153, + 2.03786, + 2.09287, + 2.0531, + 2.04921, + 2.01535, + 2.06357, + 2.08418, + 2.08526, + 2.06545, + 2.11771, + 2.02191, + 2.08584, + 2.09107, + 2.05836, + 2.06254, + 2.05628, + 2.07727, + 2.07786, + 2.0709, + 2.06299, + 2.05157, + 2.05682, + 2.10891, + 2.03811, + 2.06872, + 2.07246, + 2.06924, + 2.05836, + 2.03234, + 2.03543, + 2.06053, + 2.02157, + 2.07003, + 2.08191, + 2.05235, + 2.07064, + 2.09273, + 2.08616, + 2.07072, + 2.07697, + 2.07648, + 2.07147, + 2.04587, + 2.05781, + 2.10867, + 2.06132, + 2.08451, + 2.03336, + 2.066, + 2.07014, + 2.03973, + 2.04799, + 2.06102, + 2.03106, + 2.05143, + 2.0506, + 2.10166, + 2.02965, + 2.07172, + 2.08167, + 2.06961, + 2.05894, + 2.04579, + 2.10675, + 2.04427, + 2.06656, + 2.08586, + 2.07329, + 2.05613, + 2.07464, + 2.07808, + 2.06746, + 2.056, + 2.07606, + 2.08605, + 2.06819, + 2.00983, + 2.07741, + 2.03761, + 2.06837, + 2.10556, + 2.03823, + 2.07895, + 2.05847, + 2.05989, + 2.0507, + 2.06293, + 2.04385, + 2.03209, + 2.03575, + 2.07399, + 2.05288, + 2.06443, + 2.0712, + 2.07611, + 2.03958, + 2.04126, + 2.0451, + 2.07635, + 2.05233, + 2.08531, + 2.04737, + 2.06706, + 2.03229, + 2.06175, + 2.04673, + 2.02085, + 2.09303, + 2.06181, + 2.05964, + 2.06241, + 2.09271, + 2.11104, + 2.05905, + 2.03337, + 2.05428, + 2.07153, + 2.06453, + 2.05989, + 2.02128, + 2.03608, + 2.11014, + 2.07648, + 2.09263, + 2.06599, + 2.08835, + 2.06757, + 2.06048, + 2.08727, + 2.04568, + 2.06627, + 2.01364, + 2.07884, + 2.05731, + 2.06175, + 2.11935, + 2.08045, + 2.00039, + 2.09513, + 2.05638, + 2.07121, + 2.06668, + 2.07038, + 2.03034, + 2.07761, + 2.07915, + 2.05382, + 2.09052, + 2.03708, + 2.0428, + 2.04437, + 2.03799, + 2.03803, + 2.06913, + 2.1007, + 2.06931, + 2.0765, + 2.08393, + 2.08549, + 2.09262, + 2.07214, + 2.01194, + 2.04994, + 2.08583, + 2.08883, + 2.06602, + 2.06201, + 2.06767, + 2.06892, + 2.07033, + 2.09088, + 2.06742, + 2.05522, + 2.04306, + 2.05319, + 2.03709, + 2.0714, + 2.09622, + 2.08187, + 2.08226, + 2.06553, + 2.10049, + 2.0276, + 2.09369, + 2.07708, + 2.03175, + 2.05742, + 2.04189, + 2.05888, + 2.07931, + 2.02275, + 2.05766, + 2.08503, + 2.08222, + 2.00651, + 2.07871, + 2.03384, + 2.10804, + 2.04975, + 2.05903, + 2.0742, + 2.06377, + 2.07306, + 2.08479, + 2.02205, + 2.07078, + 2.04194, + 2.07093, + 2.084, + 2.03891, + 2.04859, + 2.02922, + 2.06835, + 2.05206, + 2.06555, + 2.07544, + 2.07378, + 2.08072, + 2.07931, + 2.05166, + 2.08162, + 2.04455, + 2.02291, + 2.05221, + 2.10031, + 2.06292, + 2.07319, + 2.0849, + 2.07765, + 2.05631, + 2.07727, + 2.07953, + 2.06431, + 2.05257, + 2.09346, + 2.0801, + 2.07604, + 2.04926, + 2.0625, + 2.07715, + 2.0506, + 2.03707, + 2.08848, + 2.06847, + 2.06856, + 2.09228, + 2.08618, + 2.02637, + 2.07704, + 2.04095, + 2.08681, + 2.03725, + 2.07613, + 2.03648, + 2.11019, + 2.01243, + 2.10533, + 2.08905, + 2.02206, + 2.02457, + 2.06179, + 2.05651, + 2.05945, + 2.07999, + 2.04594, + 2.09759, + 2.11483, + 2.06103, + 2.04316, + 2.04977, + 2.06611, + 2.08146, + 2.03253, + 2.07963, + 2.06097, + 2.03911, + 2.09647, + 2.07788, + 2.08777, + 2.05856, + 2.0946, + 2.05636, + 2.06306, + 2.07938, + 2.0687, + 2.04253, + 2.05437, + 2.06511, + 2.05505, + 2.10283, + 2.06121, + 2.07344, + 2.09726, + 2.05334, + 2.06074, + 2.03507, + 2.09696, + 2.11831, + 2.06713, + 2.02194, + 2.02906, + 2.05732, + 2.07388, + 2.0911, + 2.0348, + 2.08358, + 2.06063, + 2.0007, + 2.10561, + 2.06328 ] }, "mem-allocated-bytes": { "start_step": 0, - "end_step": 2924, + "end_step": 51540, "step_interval": 5, "values": [ 12697244672.0, @@ -1180,12 +10904,9735 @@ 12697245696.0, 12697245696.0, 12697245696.0, - 12697245696.0 + 12697245696.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0 ] }, "iteration-time": { "start_step": 0, - "end_step": 29, + "end_step": 515, "step_interval": 5, "values": [ 3.59643, @@ -1193,7 +20640,104 @@ 3.44454, 3.42413, 3.41615, - 3.41152 + 3.41152, + 3.41325, + 3.41159, + 3.40907, + 3.42962, + 3.40788, + 3.40842, + 3.40754, + 4.68029, + 3.40664, + 3.40625, + 3.40697, + 3.40568, + 3.40669, + 3.40601, + 3.40767, + 3.40852, + 3.41193, + 3.40854, + 3.40843, + 3.60816, + 3.40721, + 3.40755, + 3.40381, + 3.40313, + 3.4025, + 3.40273, + 3.40112, + 3.39983, + 3.4019, + 3.40494, + 3.40491, + 3.4048, + 3.40535, + 3.4054, + 3.40448, + 3.40538, + 3.40463, + 3.40325, + 3.4694, + 3.40183, + 3.49134, + 4.24819, + 3.66255, + 3.44411, + 3.4105, + 3.40827, + 3.4096, + 3.40879, + 3.4091, + 3.40866, + 7.45584, + 3.39139, + 3.39177, + 3.39184, + 3.39163, + 3.39104, + 3.39133, + 3.39156, + 3.40723, + 3.4025, + 3.40588, + 3.40231, + 3.40177, + 3.40523, + 3.40642, + 3.40569, + 3.4054, + 3.4056, + 3.40496, + 3.40567, + 3.40358, + 3.40434, + 3.4062, + 3.39914, + 3.39957, + 3.39901, + 3.39972, + 3.40013, + 3.40046, + 3.40287, + 3.395, + 3.39505, + 3.39453, + 3.39441, + 3.39505, + 3.39618, + 3.39436, + 3.40617, + 3.40465, + 3.40516, + 3.40573, + 3.40362, + 3.40474, + 3.40257, + 5.44128, + 3.41246, + 3.41248 ] } } \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.9.0.json new file mode 100644 index 000000000..8980f680f --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.9.0.json @@ -0,0 +1,32049 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 53183, + "step_interval": 5, + "values": [ + 12.98419, + 12.91908, + 12.86794, + 11.80404, + 10.36067, + 10.02501, + 9.62196, + 9.49541, + 9.14868, + 8.94843, + 8.84285, + 8.739, + 8.65228, + 8.4803, + 8.27706, + 8.30883, + 8.20317, + 8.06388, + 8.01718, + 7.89727, + 7.77931, + 7.69837, + 7.57899, + 7.62154, + 7.50171, + 7.32272, + 7.32606, + 7.22861, + 7.15479, + 7.15683, + 7.03266, + 6.99443, + 6.88133, + 6.8455, + 6.90151, + 6.79922, + 6.7058, + 6.68805, + 6.67142, + 6.65646, + 6.64242, + 6.57541, + 6.53691, + 6.51028, + 6.53759, + 6.49952, + 6.40743, + 6.43299, + 6.36578, + 6.36631, + 6.3464, + 6.22929, + 6.26552, + 6.22281, + 6.24165, + 6.26106, + 6.20117, + 6.16901, + 6.08495, + 6.14694, + 6.11357, + 6.14213, + 6.03523, + 6.03786, + 6.00835, + 5.94486, + 6.04637, + 5.89847, + 5.9588, + 5.92718, + 5.88896, + 5.87864, + 5.84874, + 5.78918, + 5.82016, + 5.72101, + 5.77954, + 5.7496, + 5.74263, + 5.74162, + 5.67057, + 5.7516, + 5.69378, + 5.62135, + 5.58512, + 5.59513, + 5.62787, + 5.65226, + 5.56646, + 5.5468, + 5.55958, + 5.57677, + 5.59785, + 5.48969, + 5.45037, + 5.44465, + 5.4802, + 5.46002, + 5.43968, + 5.41462, + 5.43837, + 5.41611, + 5.4328, + 5.42789, + 5.35512, + 5.3339, + 5.36373, + 5.35987, + 5.37546, + 5.32334, + 5.34594, + 5.35304, + 5.27175, + 5.31666, + 5.3014, + 5.24568, + 5.3172, + 5.22113, + 5.17969, + 5.2957, + 5.18428, + 5.14478, + 5.17169, + 5.18525, + 5.19099, + 5.19711, + 5.14148, + 5.12108, + 5.11314, + 5.14493, + 5.12742, + 5.14362, + 5.05985, + 5.03878, + 5.07784, + 5.08032, + 5.04553, + 4.99105, + 5.0338, + 4.96559, + 5.01587, + 4.89967, + 4.89247, + 4.92978, + 4.87118, + 4.9224, + 4.91386, + 4.81396, + 4.81013, + 4.78872, + 4.85803, + 4.81016, + 4.75921, + 4.75526, + 4.75735, + 4.73742, + 4.74295, + 4.63332, + 4.64861, + 4.65814, + 4.64983, + 4.62055, + 4.64685, + 4.60608, + 4.60148, + 4.53416, + 4.57535, + 4.5439, + 4.51442, + 4.51116, + 4.4958, + 4.4381, + 4.54965, + 4.42558, + 4.44803, + 4.41747, + 4.41138, + 4.42972, + 4.43969, + 4.34347, + 4.45788, + 4.36819, + 4.39574, + 4.35585, + 4.32917, + 4.3533, + 4.32413, + 4.30382, + 4.36074, + 4.25067, + 4.30811, + 4.23739, + 4.21233, + 4.26024, + 4.23104, + 4.19611, + 4.23352, + 4.23584, + 4.18101, + 4.22907, + 4.1586, + 4.17231, + 4.20159, + 4.18734, + 4.15726, + 4.13587, + 4.10493, + 4.11823, + 4.07787, + 4.1653, + 4.10161, + 4.11814, + 4.10383, + 4.05246, + 4.10388, + 4.01047, + 4.06683, + 4.04952, + 4.04421, + 4.04533, + 4.0388, + 4.02576, + 3.96637, + 4.01096, + 4.03711, + 4.07673, + 4.02488, + 4.00188, + 3.98159, + 4.01223, + 3.97921, + 3.96743, + 3.97293, + 3.97897, + 3.85555, + 3.92234, + 3.94774, + 3.91426, + 3.94461, + 3.91534, + 3.87929, + 3.9411, + 3.88143, + 3.86679, + 3.8553, + 3.88821, + 3.83123, + 3.85266, + 3.84551, + 3.88909, + 3.84973, + 3.85953, + 3.82762, + 3.82071, + 3.84309, + 3.80714, + 3.83137, + 3.81531, + 3.78891, + 3.7809, + 3.75503, + 3.78689, + 3.7963, + 3.78109, + 3.70658, + 3.76395, + 3.80263, + 3.80963, + 3.73183, + 3.86115, + 3.73697, + 3.72256, + 3.73822, + 3.79105, + 3.73342, + 3.68097, + 3.73596, + 3.70602, + 3.75098, + 3.68107, + 3.66367, + 3.71469, + 3.69341, + 3.69057, + 3.66595, + 3.66825, + 3.64835, + 3.686, + 3.68602, + 3.65497, + 3.68047, + 3.66293, + 3.61094, + 3.62359, + 3.65903, + 3.59935, + 3.63558, + 3.5599, + 3.6547, + 3.63513, + 3.61388, + 3.58081, + 3.65811, + 3.61744, + 3.61355, + 3.62284, + 3.61707, + 3.55356, + 3.6029, + 3.56837, + 3.54483, + 3.56704, + 3.611, + 3.59329, + 3.58814, + 3.59871, + 3.51559, + 3.52262, + 3.56131, + 3.50849, + 3.60802, + 3.5961, + 3.48829, + 3.47554, + 3.48074, + 3.56141, + 3.4539, + 3.51638, + 3.51675, + 3.45733, + 3.51842, + 3.50406, + 3.49069, + 3.44249, + 3.47773, + 3.46363, + 3.55154, + 3.48545, + 3.46725, + 3.48369, + 3.43862, + 3.51175, + 3.47131, + 3.46854, + 3.45139, + 3.42636, + 3.4575, + 3.48506, + 3.42788, + 3.4359, + 3.4285, + 3.45492, + 3.45567, + 3.37167, + 3.38145, + 3.38504, + 3.41001, + 3.44639, + 3.4458, + 3.37718, + 3.43357, + 3.41693, + 3.40982, + 3.38623, + 3.42285, + 3.3654, + 3.3697, + 3.35109, + 3.46915, + 3.3605, + 3.42528, + 3.34254, + 3.31809, + 3.37538, + 3.3352, + 3.34618, + 3.37505, + 3.36954, + 3.34879, + 3.33113, + 3.29592, + 3.35797, + 3.28196, + 3.31722, + 3.36562, + 3.33716, + 3.35187, + 3.28997, + 3.31062, + 3.37159, + 3.27541, + 3.30545, + 3.33852, + 3.32558, + 3.27672, + 3.28821, + 3.25892, + 3.29762, + 3.29732, + 3.25202, + 3.31146, + 3.29029, + 3.30011, + 3.29203, + 3.23834, + 3.26237, + 3.3225, + 3.23396, + 3.27615, + 3.2507, + 3.26527, + 3.21649, + 3.25948, + 3.26662, + 3.24859, + 3.28338, + 3.30685, + 3.24206, + 3.2265, + 3.24162, + 3.22024, + 3.2434, + 3.17623, + 3.26649, + 3.18358, + 3.16895, + 3.186, + 3.24542, + 3.20835, + 3.17379, + 3.20578, + 3.23138, + 3.28144, + 3.29039, + 3.23571, + 3.23105, + 3.18598, + 3.20142, + 3.15922, + 3.21054, + 3.1879, + 3.18374, + 3.22548, + 3.18672, + 3.18695, + 3.22257, + 3.20346, + 3.22214, + 3.21936, + 3.14212, + 3.13831, + 3.16945, + 3.12089, + 3.22079, + 3.1756, + 3.19436, + 3.14402, + 3.14306, + 3.21999, + 3.17097, + 3.13181, + 3.09422, + 3.11322, + 3.13357, + 3.13941, + 3.11551, + 3.07559, + 3.15389, + 3.14509, + 3.14922, + 3.14026, + 3.13487, + 3.15091, + 3.11567, + 3.09468, + 3.11667, + 3.09644, + 3.08766, + 3.07902, + 3.16316, + 3.12037, + 3.13054, + 3.10603, + 3.13903, + 3.12847, + 3.11667, + 3.08897, + 3.04173, + 3.10995, + 3.0873, + 3.13949, + 3.08735, + 3.14988, + 3.09382, + 3.0723, + 3.05878, + 3.05924, + 3.05126, + 3.06549, + 3.07887, + 3.13286, + 3.19623, + 3.08624, + 3.0392, + 3.04488, + 3.01615, + 3.08774, + 2.99622, + 3.02914, + 3.02947, + 3.09067, + 3.11401, + 3.08468, + 3.05285, + 3.02889, + 2.9696, + 3.07302, + 2.99563, + 3.03485, + 3.01352, + 3.02108, + 3.06754, + 3.02656, + 2.99796, + 3.03663, + 3.00679, + 2.98737, + 3.01097, + 3.05347, + 3.02116, + 3.01341, + 3.02204, + 3.06755, + 3.02376, + 3.0096, + 3.02609, + 2.99124, + 2.99161, + 3.01815, + 2.97387, + 3.01255, + 2.99293, + 3.04182, + 3.03241, + 3.00223, + 3.04234, + 3.07248, + 3.09676, + 3.10294, + 3.19843, + 3.06778, + 2.99661, + 3.02581, + 2.97053, + 2.98138, + 2.9383, + 2.93503, + 2.95344, + 2.96671, + 2.95751, + 2.96192, + 2.96042, + 2.96135, + 3.01044, + 2.97769, + 2.9561, + 3.09305, + 3.02437, + 2.97395, + 3.02485, + 2.981, + 2.948, + 2.9446, + 2.92086, + 2.94248, + 3.01167, + 2.91831, + 2.93553, + 2.98174, + 2.89493, + 2.973, + 2.96363, + 2.99416, + 2.96201, + 2.94617, + 2.98645, + 2.97847, + 2.94128, + 2.93834, + 2.93446, + 2.96779, + 2.95177, + 2.8867, + 2.96466, + 2.97525, + 2.93456, + 2.93265, + 2.85252, + 2.9222, + 2.97286, + 2.90604, + 2.98789, + 2.91011, + 2.9286, + 2.88644, + 2.89074, + 2.94705, + 2.9526, + 2.94425, + 2.94716, + 2.9229, + 2.90919, + 2.87595, + 2.97207, + 2.8887, + 2.91916, + 2.85855, + 2.92068, + 2.89862, + 2.91754, + 2.94756, + 2.85766, + 2.90518, + 2.91967, + 2.92002, + 2.89104, + 2.91582, + 2.89176, + 2.91633, + 2.87038, + 2.82494, + 2.85775, + 2.87309, + 2.93097, + 2.89861, + 2.84242, + 2.90866, + 2.83677, + 2.91942, + 2.94944, + 2.84783, + 2.85024, + 2.80212, + 2.89931, + 2.87082, + 2.85774, + 2.85876, + 2.93155, + 2.87041, + 2.87513, + 2.82293, + 2.85404, + 2.84661, + 2.846, + 2.88063, + 2.85407, + 2.84886, + 2.86981, + 2.79641, + 2.88895, + 2.89171, + 2.80083, + 2.85598, + 2.82243, + 2.91043, + 2.89791, + 2.82592, + 2.92519, + 2.88935, + 2.93367, + 2.93402, + 2.82809, + 2.87602, + 2.83651, + 2.84219, + 2.84956, + 2.84504, + 2.83968, + 2.82287, + 2.86714, + 2.85398, + 2.8445, + 2.821, + 2.80801, + 2.85356, + 2.86331, + 2.88855, + 2.84713, + 2.82335, + 2.83445, + 2.83796, + 2.86726, + 2.85303, + 2.8329, + 2.783, + 2.75861, + 2.87956, + 2.81064, + 2.84658, + 2.85592, + 2.80521, + 2.77466, + 2.82725, + 2.80499, + 2.81019, + 2.79605, + 2.80587, + 2.85307, + 2.85023, + 2.77447, + 2.77115, + 2.79416, + 2.83456, + 2.82582, + 2.79226, + 2.79049, + 2.78918, + 2.82485, + 2.86423, + 2.77456, + 2.81596, + 2.8141, + 2.85011, + 2.83399, + 2.83108, + 2.78418, + 2.76324, + 2.78822, + 2.84092, + 2.82659, + 2.83108, + 2.84488, + 2.82732, + 2.78741, + 2.86013, + 2.79839, + 2.83151, + 2.74863, + 2.73853, + 2.83164, + 2.74581, + 2.78201, + 2.76296, + 2.73349, + 2.81648, + 2.80169, + 2.78341, + 2.77496, + 2.76252, + 2.79892, + 2.77346, + 2.73542, + 2.78466, + 2.76123, + 2.80823, + 2.78521, + 2.76411, + 2.78331, + 2.74127, + 2.75627, + 2.82989, + 2.83589, + 2.81394, + 2.75656, + 2.79305, + 2.73452, + 2.80567, + 2.74423, + 2.77838, + 2.77774, + 2.79062, + 2.74438, + 2.76191, + 2.736, + 2.75827, + 2.83205, + 2.73078, + 2.77335, + 2.75757, + 2.74508, + 2.73489, + 2.77663, + 2.79235, + 2.77173, + 2.76863, + 2.69548, + 2.72459, + 2.71633, + 2.79954, + 2.74726, + 2.68926, + 2.74916, + 2.73581, + 2.76657, + 2.70092, + 2.75065, + 2.76108, + 2.73907, + 2.74262, + 2.73596, + 2.80021, + 2.72376, + 2.73266, + 2.75955, + 2.74406, + 2.7226, + 2.75581, + 2.76734, + 2.7851, + 2.75595, + 2.6995, + 2.69929, + 2.71547, + 2.74243, + 2.70713, + 2.77846, + 2.72904, + 2.71435, + 2.70781, + 2.7877, + 2.7351, + 2.72156, + 2.77158, + 2.79335, + 2.74251, + 2.77298, + 2.73439, + 2.72965, + 2.74746, + 2.7702, + 2.74092, + 2.71081, + 2.69085, + 2.64368, + 2.69356, + 2.74094, + 2.70176, + 2.69215, + 2.67547, + 2.69488, + 2.77212, + 2.75865, + 2.66891, + 2.73618, + 2.73656, + 2.7385, + 2.75532, + 2.69934, + 2.67207, + 2.65692, + 2.69801, + 2.72377, + 2.71155, + 2.70355, + 2.70758, + 2.67797, + 2.71973, + 2.6857, + 2.69295, + 2.70358, + 2.68169, + 2.73862, + 2.67394, + 2.68954, + 2.73816, + 2.66373, + 2.68648, + 2.66598, + 2.7194, + 2.67951, + 2.70225, + 2.70741, + 2.72767, + 2.69146, + 2.68471, + 2.68885, + 2.70103, + 2.75286, + 2.70084, + 2.69385, + 2.67393, + 2.66134, + 2.73428, + 2.74802, + 2.66833, + 2.73713, + 2.68683, + 2.68042, + 2.6732, + 2.681, + 2.71559, + 2.68703, + 2.69938, + 2.68443, + 2.68584, + 2.6813, + 2.66379, + 2.61926, + 2.65717, + 2.68524, + 2.67082, + 2.64322, + 2.66691, + 2.71284, + 2.63993, + 2.64571, + 2.64294, + 2.62535, + 2.64654, + 2.69179, + 2.67462, + 2.69557, + 2.68745, + 2.66002, + 2.70778, + 2.68837, + 2.67251, + 2.67251, + 2.69555, + 2.70804, + 2.7017, + 2.63079, + 2.68191, + 2.68339, + 2.71709, + 2.65548, + 2.66565, + 2.62854, + 2.63167, + 2.6936, + 2.69876, + 2.65896, + 2.6522, + 2.6606, + 2.63048, + 2.67646, + 2.70366, + 2.65661, + 2.69764, + 2.65852, + 2.66819, + 2.67769, + 2.68095, + 2.67396, + 2.69301, + 2.67953, + 2.6367, + 2.59549, + 2.66537, + 2.6787, + 2.67001, + 2.7172, + 2.6412, + 2.6181, + 2.67814, + 2.65454, + 2.67921, + 2.69037, + 2.63561, + 2.66344, + 2.61298, + 2.69973, + 2.63666, + 2.65655, + 2.63696, + 2.68234, + 2.61719, + 2.65599, + 2.66065, + 2.64616, + 2.67095, + 2.59275, + 2.64435, + 2.65471, + 2.69924, + 2.64539, + 2.60645, + 2.66212, + 2.71533, + 2.68817, + 2.66263, + 2.64011, + 2.6414, + 2.66992, + 2.61474, + 2.64712, + 2.64041, + 2.6534, + 2.62336, + 2.66051, + 2.67468, + 2.60067, + 2.61385, + 2.61745, + 2.64008, + 2.57779, + 2.58634, + 2.64649, + 2.62782, + 2.61556, + 2.63198, + 2.67001, + 2.65, + 2.65546, + 2.62416, + 2.66066, + 2.65857, + 2.60059, + 2.60206, + 2.63312, + 2.61806, + 2.63129, + 2.62377, + 2.59056, + 2.66388, + 2.6675, + 2.62269, + 2.63428, + 2.62533, + 2.64793, + 2.65119, + 2.63294, + 2.59744, + 2.62581, + 2.64768, + 2.63606, + 2.61877, + 2.60563, + 2.65874, + 2.64996, + 2.65706, + 2.60299, + 2.63145, + 2.61945, + 2.63531, + 2.64766, + 2.63675, + 2.6322, + 2.62394, + 2.59152, + 2.60842, + 2.65137, + 2.60099, + 2.58619, + 2.622, + 2.60498, + 2.62332, + 2.67063, + 2.63481, + 2.55966, + 2.59884, + 2.57809, + 2.56345, + 2.61952, + 2.57435, + 2.57911, + 2.61293, + 2.56825, + 2.62418, + 2.57672, + 2.5657, + 2.55569, + 2.6583, + 2.59679, + 2.57316, + 2.52258, + 2.56856, + 2.56653, + 2.60895, + 2.60955, + 2.60742, + 2.60524, + 2.58511, + 2.61865, + 2.54429, + 2.57955, + 2.60742, + 2.60812, + 2.58147, + 2.61105, + 2.57176, + 2.58242, + 2.55882, + 2.5998, + 2.60262, + 2.54016, + 2.62618, + 2.6191, + 2.58602, + 2.63077, + 2.57095, + 2.60009, + 2.56923, + 2.56645, + 2.58642, + 2.59774, + 2.60899, + 2.56033, + 2.64222, + 2.59506, + 2.62285, + 2.59309, + 2.59015, + 2.56993, + 2.58954, + 2.61676, + 2.55554, + 2.57971, + 2.60456, + 2.55721, + 2.57422, + 2.57879, + 2.60781, + 2.51687, + 2.56004, + 2.50109, + 2.6096, + 2.57868, + 2.58675, + 2.60828, + 2.57062, + 2.58576, + 2.59196, + 2.60063, + 2.55805, + 2.61719, + 2.62474, + 2.5756, + 2.52894, + 2.61512, + 2.57136, + 2.59832, + 2.57085, + 2.5437, + 2.54518, + 2.57654, + 2.61867, + 2.5582, + 2.57172, + 2.55028, + 2.53879, + 2.54825, + 2.58383, + 2.55716, + 2.55585, + 2.59319, + 2.58946, + 2.52414, + 2.54023, + 2.60288, + 2.59264, + 2.55414, + 2.56634, + 2.59225, + 2.56708, + 2.59247, + 2.58039, + 2.60525, + 2.55538, + 2.59248, + 2.59206, + 2.57052, + 2.55799, + 2.61974, + 2.54098, + 2.57906, + 2.56644, + 2.55381, + 2.5323, + 2.5873, + 2.55185, + 2.59869, + 2.53981, + 2.5837, + 2.57577, + 2.54476, + 2.5592, + 2.53242, + 2.52013, + 2.61405, + 2.53815, + 2.5568, + 2.54179, + 2.53228, + 2.57172, + 2.5355, + 2.53033, + 2.54588, + 2.56312, + 2.55533, + 2.54647, + 2.52223, + 2.54247, + 2.56063, + 2.55561, + 2.57172, + 2.54352, + 2.54393, + 2.50013, + 2.53398, + 2.55553, + 2.59468, + 2.52424, + 2.5382, + 2.57504, + 2.54588, + 2.57543, + 2.51161, + 2.55126, + 2.51887, + 2.53646, + 2.55676, + 2.5304, + 2.59277, + 2.54044, + 2.57123, + 2.6003, + 2.49646, + 2.53898, + 2.52565, + 2.56482, + 2.60363, + 2.57907, + 2.48965, + 2.50199, + 2.55087, + 2.55861, + 2.56767, + 2.55119, + 2.56728, + 2.56228, + 2.5453, + 2.57644, + 2.52451, + 2.5021, + 2.59152, + 2.54781, + 2.5724, + 2.51337, + 2.52616, + 2.53721, + 2.52757, + 2.52641, + 2.55016, + 2.54188, + 2.54979, + 2.56938, + 2.54981, + 2.52435, + 2.5921, + 2.5229, + 2.55128, + 2.55864, + 2.56234, + 2.52253, + 2.52182, + 2.55833, + 2.50951, + 2.56224, + 2.55813, + 2.56019, + 2.53151, + 2.52623, + 2.55852, + 2.54794, + 2.49912, + 2.54606, + 2.53852, + 2.54865, + 2.53166, + 2.53923, + 2.51674, + 2.50393, + 2.48558, + 2.52789, + 2.55185, + 2.54107, + 2.53168, + 2.5522, + 2.54562, + 2.54469, + 2.57939, + 2.4972, + 2.54304, + 2.51904, + 2.53839, + 2.52036, + 2.52717, + 2.52244, + 2.53731, + 2.54459, + 2.5515, + 2.56656, + 2.53226, + 2.44153, + 2.48606, + 2.49793, + 2.52143, + 2.51475, + 2.5032, + 2.53246, + 2.55709, + 2.52275, + 2.50349, + 2.53142, + 2.52539, + 2.56627, + 2.50335, + 2.49016, + 2.50717, + 2.45547, + 2.53239, + 2.54252, + 2.4854, + 2.47096, + 2.49029, + 2.5684, + 2.51388, + 2.52363, + 2.51274, + 2.53134, + 2.57428, + 2.51913, + 2.49343, + 2.52374, + 2.46945, + 2.51212, + 2.51176, + 2.53629, + 2.54166, + 2.48024, + 2.49983, + 2.50244, + 2.46708, + 2.50453, + 2.52617, + 2.52839, + 2.47474, + 2.54907, + 2.51612, + 2.50456, + 2.51193, + 2.53536, + 2.52447, + 2.57062, + 2.49637, + 2.53967, + 2.52325, + 2.49184, + 2.54194, + 2.46873, + 2.5236, + 2.49495, + 2.51795, + 2.4885, + 2.50693, + 2.50458, + 2.51677, + 2.46832, + 2.51039, + 2.48969, + 2.5417, + 2.51261, + 2.50471, + 2.50959, + 2.53441, + 2.47371, + 2.47498, + 2.47009, + 2.49353, + 2.51926, + 2.49677, + 2.48562, + 2.5401, + 2.48562, + 2.54572, + 2.47338, + 2.51237, + 2.50847, + 2.51632, + 2.50885, + 2.49845, + 2.46106, + 2.48298, + 2.49227, + 2.50196, + 2.49089, + 2.49019, + 2.49425, + 2.51916, + 2.4712, + 2.51248, + 2.52114, + 2.46329, + 2.47717, + 2.49578, + 2.53218, + 2.47959, + 2.4718, + 2.50834, + 2.48089, + 2.52138, + 2.54444, + 2.47143, + 2.50868, + 2.47049, + 2.49498, + 2.54311, + 2.51507, + 2.5268, + 2.50941, + 2.50588, + 2.47824, + 2.51134, + 2.54083, + 2.51842, + 2.49119, + 2.49874, + 2.48358, + 2.46988, + 2.49678, + 2.5227, + 2.52353, + 2.46098, + 2.4835, + 2.50653, + 2.52461, + 2.49873, + 2.51227, + 2.44116, + 2.43741, + 2.45375, + 2.48973, + 2.51768, + 2.5229, + 2.48912, + 2.46431, + 2.47457, + 2.47566, + 2.49241, + 2.46526, + 2.43836, + 2.48552, + 2.46722, + 2.50475, + 2.49552, + 2.49723, + 2.48812, + 2.4622, + 2.52397, + 2.47532, + 2.49661, + 2.53455, + 2.45947, + 2.48932, + 2.50029, + 2.46941, + 2.52551, + 2.50054, + 2.43772, + 2.52083, + 2.47606, + 2.46856, + 2.47513, + 2.52144, + 2.46683, + 2.45432, + 2.48696, + 2.48036, + 2.50704, + 2.52042, + 2.5283, + 2.44247, + 2.47057, + 2.49015, + 2.48899, + 2.49301, + 2.5368, + 2.48499, + 2.477, + 2.50119, + 2.51599, + 2.48781, + 2.48645, + 2.50422, + 2.47308, + 2.46711, + 2.48569, + 2.51404, + 2.49852, + 2.49996, + 2.51047, + 2.50389, + 2.47199, + 2.45675, + 2.50458, + 2.50673, + 2.50761, + 2.48005, + 2.46156, + 2.46481, + 2.51002, + 2.48861, + 2.44232, + 2.47867, + 2.44272, + 2.51273, + 2.50682, + 2.48148, + 2.47751, + 2.49822, + 2.50632, + 2.49264, + 2.45902, + 2.44918, + 2.47203, + 2.50082, + 2.4936, + 2.42406, + 2.48076, + 2.48853, + 2.41644, + 2.44562, + 2.44746, + 2.48856, + 2.48456, + 2.45951, + 2.48788, + 2.47264, + 2.46361, + 2.49379, + 2.51188, + 2.49719, + 2.47921, + 2.47002, + 2.47636, + 2.45043, + 2.49448, + 2.48338, + 2.4714, + 2.47708, + 2.48189, + 2.43904, + 2.48078, + 2.46934, + 2.49312, + 2.45741, + 2.52217, + 2.49114, + 2.52001, + 2.50908, + 2.47191, + 2.45726, + 2.46327, + 2.51216, + 2.46282, + 2.46216, + 2.51233, + 2.45002, + 2.47264, + 2.47781, + 2.49215, + 2.43742, + 2.43408, + 2.41878, + 2.49157, + 2.49674, + 2.47366, + 2.461, + 2.47251, + 2.47477, + 2.48874, + 2.45467, + 2.42854, + 2.5089, + 2.4855, + 2.43789, + 2.45628, + 2.48046, + 2.4811, + 2.46436, + 2.46119, + 2.44883, + 2.44836, + 2.42589, + 2.54467, + 2.48679, + 2.42558, + 2.42779, + 2.45567, + 2.47442, + 2.46326, + 2.48475, + 2.45112, + 2.43099, + 2.44148, + 2.45381, + 2.48534, + 2.43155, + 2.4798, + 2.45362, + 2.48073, + 2.53277, + 2.4947, + 2.44257, + 2.47023, + 2.48024, + 2.45757, + 2.47364, + 2.43789, + 2.45069, + 2.43908, + 2.46809, + 2.44938, + 2.45398, + 2.46977, + 2.4516, + 2.41585, + 2.44424, + 2.48174, + 2.4399, + 2.46276, + 2.48028, + 2.50232, + 2.48649, + 2.44632, + 2.51331, + 2.45198, + 2.46772, + 2.47924, + 2.46174, + 2.41598, + 2.47149, + 2.50108, + 2.42365, + 2.4672, + 2.44726, + 2.45445, + 2.46386, + 2.47119, + 2.44565, + 2.43915, + 2.43623, + 2.42684, + 2.48212, + 2.47656, + 2.42247, + 2.47218, + 2.45116, + 2.4212, + 2.46954, + 2.44465, + 2.41909, + 2.48952, + 2.51748, + 2.52221, + 2.44872, + 2.44206, + 2.46907, + 2.43174, + 2.47023, + 2.43705, + 2.4185, + 2.4569, + 2.46952, + 2.48206, + 2.47408, + 2.4539, + 2.47445, + 2.42394, + 2.45395, + 2.44834, + 2.42642, + 2.44206, + 2.46098, + 2.45543, + 2.45796, + 2.44468, + 2.44098, + 2.42427, + 2.4239, + 2.43791, + 2.49488, + 2.43737, + 2.44396, + 2.46736, + 2.4683, + 2.45407, + 2.4542, + 2.44154, + 2.42637, + 2.42361, + 2.48675, + 2.45458, + 2.4439, + 2.43621, + 2.42222, + 2.49616, + 2.42608, + 2.46972, + 2.45859, + 2.44728, + 2.44741, + 2.43318, + 2.44258, + 2.43579, + 2.41052, + 2.44061, + 2.46347, + 2.42659, + 2.44777, + 2.44381, + 2.43926, + 2.4344, + 2.42818, + 2.43351, + 2.44399, + 2.39769, + 2.43949, + 2.48018, + 2.44648, + 2.45692, + 2.40909, + 2.43483, + 2.45647, + 2.39934, + 2.39287, + 2.43614, + 2.44456, + 2.48993, + 2.44823, + 2.44936, + 2.40574, + 2.40074, + 2.45376, + 2.45123, + 2.42492, + 2.41836, + 2.42335, + 2.43323, + 2.43933, + 2.43792, + 2.48867, + 2.43787, + 2.43378, + 2.41573, + 2.43863, + 2.46001, + 2.40407, + 2.44993, + 2.45847, + 2.40583, + 2.45827, + 2.45425, + 2.43504, + 2.41136, + 2.47834, + 2.40462, + 2.41501, + 2.46588, + 2.43642, + 2.44544, + 2.40237, + 2.40361, + 2.42828, + 2.42495, + 2.49418, + 2.37629, + 2.40121, + 2.48734, + 2.38038, + 2.43845, + 2.4517, + 2.4699, + 2.41947, + 2.43187, + 2.44657, + 2.44123, + 2.41938, + 2.40222, + 2.42545, + 2.41268, + 2.49022, + 2.42048, + 2.38719, + 2.4488, + 2.42704, + 2.45788, + 2.44896, + 2.43458, + 2.47298, + 2.41989, + 2.45365, + 2.4551, + 2.38841, + 2.40977, + 2.42921, + 2.44837, + 2.43066, + 2.4104, + 2.44185, + 2.43418, + 2.42102, + 2.42816, + 2.4481, + 2.47833, + 2.41271, + 2.39075, + 2.43393, + 2.4301, + 2.39789, + 2.43808, + 2.42409, + 2.3998, + 2.4348, + 2.40504, + 2.43412, + 2.41964, + 2.47073, + 2.42032, + 2.4182, + 2.41686, + 2.4091, + 2.41202, + 2.4744, + 2.45341, + 2.42216, + 2.38629, + 2.42227, + 2.3949, + 2.42597, + 2.43345, + 2.4033, + 2.42782, + 2.42795, + 2.43672, + 2.43901, + 2.41077, + 2.3959, + 2.44701, + 2.4326, + 2.41483, + 2.40245, + 2.40167, + 2.41886, + 2.43415, + 2.46731, + 2.41425, + 2.40864, + 2.38945, + 2.39272, + 2.41816, + 2.39451, + 2.43208, + 2.41808, + 2.40419, + 2.47542, + 2.44037, + 2.37254, + 2.40797, + 2.4161, + 2.4555, + 2.41324, + 2.37544, + 2.40916, + 2.39928, + 2.36893, + 2.39834, + 2.42514, + 2.42034, + 2.41952, + 2.39531, + 2.41875, + 2.41904, + 2.40517, + 2.4455, + 2.39346, + 2.43404, + 2.41116, + 2.4104, + 2.39527, + 2.40085, + 2.35791, + 2.46814, + 2.41736, + 2.40424, + 2.4578, + 2.39449, + 2.44911, + 2.43566, + 2.43022, + 2.48053, + 2.39956, + 2.42973, + 2.43203, + 2.37597, + 2.41757, + 2.37497, + 2.43604, + 2.40956, + 2.38516, + 2.38833, + 2.44666, + 2.36002, + 2.46161, + 2.44621, + 2.38175, + 2.44658, + 2.39635, + 2.40173, + 2.4385, + 2.42944, + 2.4297, + 2.38568, + 2.43804, + 2.43503, + 2.39494, + 2.38995, + 2.42145, + 2.40455, + 2.38452, + 2.42348, + 2.40443, + 2.41578, + 2.41045, + 2.44383, + 2.37083, + 2.40343, + 2.36111, + 2.40886, + 2.41537, + 2.43849, + 2.47706, + 2.43722, + 2.38781, + 2.43626, + 2.43463, + 2.35431, + 2.40143, + 2.3807, + 2.3874, + 2.44311, + 2.41326, + 2.39779, + 2.4384, + 2.44513, + 2.43208, + 2.44734, + 2.41476, + 2.47766, + 2.37664, + 2.39589, + 2.40416, + 2.38793, + 2.37903, + 2.38143, + 2.36649, + 2.4344, + 2.38476, + 2.42088, + 2.38202, + 2.36308, + 2.43007, + 2.3996, + 2.43126, + 2.42001, + 2.38902, + 2.45338, + 2.40084, + 2.4181, + 2.37636, + 2.42268, + 2.38875, + 2.42246, + 2.40696, + 2.37248, + 2.41147, + 2.3964, + 2.42269, + 2.42928, + 2.44764, + 2.38972, + 2.38337, + 2.42218, + 2.41398, + 2.4144, + 2.44582, + 2.39876, + 2.40281, + 2.4479, + 2.40925, + 2.39995, + 2.37399, + 2.42343, + 2.39007, + 2.38361, + 2.35764, + 2.39641, + 2.39661, + 2.462, + 2.38067, + 2.3763, + 2.38298, + 2.36606, + 2.38746, + 2.43554, + 2.44202, + 2.42766, + 2.38651, + 2.38103, + 2.42624, + 2.39899, + 2.40719, + 2.41077, + 2.36751, + 2.45914, + 2.40187, + 2.3622, + 2.39932, + 2.40727, + 2.35981, + 2.39686, + 2.40559, + 2.40829, + 2.37755, + 2.37567, + 2.40269, + 2.41889, + 2.38588, + 2.41283, + 2.36274, + 2.39852, + 2.39475, + 2.38881, + 2.37977, + 2.38436, + 2.38116, + 2.45097, + 2.39336, + 2.35309, + 2.3193, + 2.39562, + 2.42489, + 2.35553, + 2.36392, + 2.41132, + 2.39906, + 2.38236, + 2.34957, + 2.38655, + 2.37886, + 2.4032, + 2.44724, + 2.42583, + 2.35575, + 2.40803, + 2.38587, + 2.32984, + 2.40585, + 2.39817, + 2.39539, + 2.36618, + 2.37288, + 2.38173, + 2.44428, + 2.36327, + 2.38855, + 2.38821, + 2.40833, + 2.40302, + 2.38264, + 2.34846, + 2.3694, + 2.41922, + 2.37434, + 2.42192, + 2.37205, + 2.3617, + 2.37145, + 2.34717, + 2.40241, + 2.31411, + 2.38114, + 2.4103, + 2.38677, + 2.35757, + 2.37079, + 2.35967, + 2.38387, + 2.41274, + 2.40819, + 2.37717, + 2.39562, + 2.36174, + 2.38422, + 2.42365, + 2.32535, + 2.39445, + 2.3837, + 2.44464, + 2.40211, + 2.39042, + 2.38827, + 2.36975, + 2.34269, + 2.41897, + 2.42899, + 2.35431, + 2.38611, + 2.37312, + 2.3915, + 2.38932, + 2.4127, + 2.33445, + 2.34791, + 2.34999, + 2.37074, + 2.44889, + 2.35828, + 2.38525, + 2.37374, + 2.36779, + 2.41399, + 2.38956, + 2.36053, + 2.36688, + 2.36029, + 2.41255, + 2.36126, + 2.42017, + 2.37035, + 2.3579, + 2.39731, + 2.37274, + 2.36164, + 2.3406, + 2.35618, + 2.41837, + 2.40452, + 2.38041, + 2.35802, + 2.3776, + 2.35, + 2.34043, + 2.41691, + 2.37895, + 2.32466, + 2.35918, + 2.36973, + 2.37125, + 2.36101, + 2.35971, + 2.37979, + 2.37985, + 2.30211, + 2.35671, + 2.37984, + 2.36267, + 2.36033, + 2.41398, + 2.36709, + 2.3638, + 2.37147, + 2.38241, + 2.37443, + 2.40214, + 2.38842, + 2.3924, + 2.35504, + 2.40521, + 2.35751, + 2.3778, + 2.35868, + 2.34116, + 2.37323, + 2.37569, + 2.35289, + 2.37776, + 2.36834, + 2.37741, + 2.37573, + 2.33007, + 2.37332, + 2.36447, + 2.36356, + 2.34745, + 2.41894, + 2.3699, + 2.32165, + 2.3626, + 2.42148, + 2.36015, + 2.30794, + 2.34737, + 2.39952, + 2.31543, + 2.41693, + 2.35574, + 2.28794, + 2.38521, + 2.33121, + 2.38382, + 2.38452, + 2.34225, + 2.38258, + 2.32508, + 2.35264, + 2.34782, + 2.35467, + 2.31892, + 2.33791, + 2.33464, + 2.40442, + 2.36503, + 2.33589, + 2.36791, + 2.38653, + 2.37104, + 2.39368, + 2.34645, + 2.38549, + 2.32241, + 2.3949, + 2.37387, + 2.35282, + 2.34102, + 2.37072, + 2.33689, + 2.34766, + 2.32982, + 2.38524, + 2.33179, + 2.36397, + 2.33285, + 2.32107, + 2.32406, + 2.30448, + 2.39387, + 2.40308, + 2.36095, + 2.3717, + 2.33301, + 2.31196, + 2.40569, + 2.37152, + 2.37446, + 2.36441, + 2.31796, + 2.36133, + 2.35281, + 2.34712, + 2.36205, + 2.36266, + 2.30883, + 2.36213, + 2.35561, + 2.40853, + 2.37288, + 2.34161, + 2.3968, + 2.36399, + 2.33852, + 2.36198, + 2.34423, + 2.32484, + 2.33432, + 2.36546, + 2.33976, + 2.31307, + 2.3184, + 2.31741, + 2.31843, + 2.28965, + 2.34009, + 2.30929, + 2.39347, + 2.31745, + 2.35377, + 2.33591, + 2.34666, + 2.37045, + 2.32797, + 2.31528, + 2.36211, + 2.37247, + 2.38143, + 2.31443, + 2.34936, + 2.33315, + 2.37157, + 2.34943, + 2.39519, + 2.34092, + 2.36524, + 2.36448, + 2.34077, + 2.33426, + 2.37359, + 2.31207, + 2.27711, + 2.32888, + 2.34586, + 2.36063, + 2.3318, + 2.31964, + 2.34302, + 2.37103, + 2.36492, + 2.31915, + 2.34072, + 2.35957, + 2.3319, + 2.33556, + 2.3562, + 2.38816, + 2.2878, + 2.31349, + 2.36829, + 2.28982, + 2.34635, + 2.36405, + 2.38149, + 2.33435, + 2.33024, + 2.29923, + 2.30443, + 2.31556, + 2.35307, + 2.33861, + 2.30846, + 2.31353, + 2.29566, + 2.32083, + 2.35146, + 2.29441, + 2.35297, + 2.32767, + 2.34018, + 2.34667, + 2.33407, + 2.28717, + 2.30826, + 2.3541, + 2.35607, + 2.38586, + 2.35185, + 2.30789, + 2.36756, + 2.36125, + 2.34786, + 2.36249, + 2.32214, + 2.30432, + 2.35128, + 2.34236, + 2.37517, + 2.31364, + 2.32562, + 2.31039, + 2.34544, + 2.40571, + 2.33947, + 2.34913, + 2.36287, + 2.3212, + 2.30485, + 2.36056, + 2.31541, + 2.32215, + 2.34605, + 2.34271, + 2.36568, + 2.32517, + 2.34936, + 2.34077, + 2.34932, + 2.29629, + 2.32931, + 2.35075, + 2.362, + 2.33497, + 2.35549, + 2.32194, + 2.36096, + 2.36015, + 2.29582, + 2.27681, + 2.32794, + 2.34127, + 2.30457, + 2.3071, + 2.32661, + 2.35084, + 2.33485, + 2.32981, + 2.29971, + 2.29722, + 2.32502, + 2.33562, + 2.34413, + 2.31711, + 2.32385, + 2.3013, + 2.34517, + 2.31441, + 2.29988, + 2.33875, + 2.30426, + 2.32811, + 2.27243, + 2.31843, + 2.32735, + 2.35129, + 2.31243, + 2.33749, + 2.27449, + 2.3257, + 2.25419, + 2.29672, + 2.3124, + 2.31962, + 2.33483, + 2.30304, + 2.30413, + 2.33105, + 2.31994, + 2.35972, + 2.31645, + 2.33765, + 2.33977, + 2.31776, + 2.30349, + 2.31356, + 2.34195, + 2.35769, + 2.37973, + 2.28063, + 2.29228, + 2.33746, + 2.29104, + 2.29211, + 2.33338, + 2.31777, + 2.27725, + 2.307, + 2.33335, + 2.30224, + 2.30553, + 2.31524, + 2.31688, + 2.34076, + 2.29786, + 2.31358, + 2.33641, + 2.29565, + 2.28182, + 2.33547, + 2.30591, + 2.27764, + 2.30327, + 2.33003, + 2.32329, + 2.32525, + 2.28749, + 2.31093, + 2.32738, + 2.33409, + 2.31175, + 2.33567, + 2.31535, + 2.311, + 2.30972, + 2.33276, + 2.29739, + 2.32964, + 2.30207, + 2.27677, + 2.3503, + 2.33818, + 2.33365, + 2.28167, + 2.31607, + 2.30898, + 2.32936, + 2.3051, + 2.30535, + 2.29316, + 2.30575, + 2.32814, + 2.29362, + 2.25537, + 2.25836, + 2.34003, + 2.35558, + 2.31729, + 2.32946, + 2.33906, + 2.32978, + 2.33966, + 2.33326, + 2.29669, + 2.29924, + 2.32072, + 2.35547, + 2.3035, + 2.29738, + 2.24206, + 2.33233, + 2.33684, + 2.32312, + 2.28649, + 2.27303, + 2.33374, + 2.3125, + 2.34015, + 2.3112, + 2.3141, + 2.31768, + 2.28583, + 2.31022, + 2.26557, + 2.32764, + 2.26705, + 2.28732, + 2.35371, + 2.2953, + 2.31997, + 2.30031, + 2.31895, + 2.33904, + 2.36762, + 2.34275, + 2.30489, + 2.31493, + 2.32912, + 2.291, + 2.29867, + 2.29168, + 2.29001, + 2.24825, + 2.30495, + 2.29858, + 2.31002, + 2.3044, + 2.28227, + 2.31635, + 2.30022, + 2.31452, + 2.29895, + 2.3311, + 2.31911, + 2.30548, + 2.23997, + 2.3353, + 2.36311, + 2.27473, + 2.2722, + 2.29061, + 2.3044, + 2.32973, + 2.26708, + 2.31933, + 2.33451, + 2.3549, + 2.26994, + 2.32027, + 2.28571, + 2.3195, + 2.27086, + 2.28465, + 2.29026, + 2.31531, + 2.32206, + 2.30039, + 2.33538, + 2.27727, + 2.30024, + 2.31034, + 2.2913, + 2.33377, + 2.3245, + 2.28124, + 2.3192, + 2.36317, + 2.30549, + 2.33118, + 2.32956, + 2.29643, + 2.33456, + 2.29492, + 2.27967, + 2.32514, + 2.26525, + 2.34146, + 2.31721, + 2.3095, + 2.31842, + 2.27477, + 2.36543, + 2.30209, + 2.33102, + 2.29281, + 2.30537, + 2.30877, + 2.28741, + 2.31256, + 2.27592, + 2.33802, + 2.29691, + 2.33722, + 2.28763, + 2.27307, + 2.28154, + 2.26603, + 2.33762, + 2.32565, + 2.26349, + 2.31934, + 2.30015, + 2.30581, + 2.32179, + 2.29746, + 2.31545, + 2.27709, + 2.29831, + 2.32369, + 2.32282, + 2.29007, + 2.26772, + 2.27034, + 2.31313, + 2.27646, + 2.27135, + 2.2711, + 2.31532, + 2.26508, + 2.33919, + 2.31847, + 2.28195, + 2.30779, + 2.24485, + 2.32588, + 2.31598, + 2.28815, + 2.28607, + 2.30007, + 2.30106, + 2.2734, + 2.24112, + 2.2586, + 2.31028, + 2.28471, + 2.32799, + 2.31743, + 2.2891, + 2.2722, + 2.26724, + 2.33275, + 2.27824, + 2.28047, + 2.27328, + 2.25161, + 2.34134, + 2.31941, + 2.27379, + 2.278, + 2.30143, + 2.27707, + 2.28433, + 2.31914, + 2.27659, + 2.28272, + 2.29019, + 2.29962, + 2.29996, + 2.32479, + 2.2974, + 2.27877, + 2.27834, + 2.29428, + 2.30593, + 2.30184, + 2.31135, + 2.33953, + 2.22678, + 2.30668, + 2.24082, + 2.27051, + 2.31478, + 2.30401, + 2.26316, + 2.28387, + 2.25895, + 2.24659, + 2.25712, + 2.31148, + 2.21367, + 2.28321, + 2.26488, + 2.26945, + 2.26141, + 2.3179, + 2.309, + 2.27742, + 2.30301, + 2.28325, + 2.29617, + 2.25262, + 2.26874, + 2.27095, + 2.30893, + 2.27123, + 2.29399, + 2.29153, + 2.27741, + 2.27633, + 2.27156, + 2.26737, + 2.28168, + 2.30604, + 2.30977, + 2.24271, + 2.26894, + 2.26102, + 2.22229, + 2.25247, + 2.30878, + 2.27168, + 2.30424, + 2.28097, + 2.29077, + 2.25369, + 2.27975, + 2.22882, + 2.25941, + 2.32174, + 2.31329, + 2.29222, + 2.29252, + 2.31835, + 2.27207, + 2.27184, + 2.32122, + 2.26802, + 2.26493, + 2.29336, + 2.25048, + 2.28585, + 2.30154, + 2.32283, + 2.27142, + 2.2949, + 2.30116, + 2.29588, + 2.28977, + 2.28252, + 2.28442, + 2.27311, + 2.28592, + 2.25947, + 2.24684, + 2.23176, + 2.286, + 2.26311, + 2.24889, + 2.31326, + 2.26237, + 2.29902, + 2.31138, + 2.26962, + 2.25494, + 2.23909, + 2.29693, + 2.29296, + 2.30222, + 2.23661, + 2.23045, + 2.28157, + 2.30548, + 2.32873, + 2.27367, + 2.19852, + 2.28908, + 2.22143, + 2.31705, + 2.29283, + 2.26405, + 2.27247, + 2.22796, + 2.24569, + 2.27137, + 2.30207, + 2.27222, + 2.24397, + 2.25135, + 2.25066, + 2.2795, + 2.23164, + 2.30015, + 2.263, + 2.27733, + 2.27297, + 2.26413, + 2.24749, + 2.26877, + 2.27833, + 2.29671, + 2.32373, + 2.34461, + 2.27396, + 2.27066, + 2.32654, + 2.26566, + 2.27202, + 2.28009, + 2.29428, + 2.34702, + 2.21399, + 2.22244, + 2.28987, + 2.2678, + 2.30161, + 2.27397, + 2.25324, + 2.24715, + 2.26753, + 2.24871, + 2.28586, + 2.28708, + 2.20494, + 2.26623, + 2.2741, + 2.30765, + 2.28199, + 2.26124, + 2.21894, + 2.25519, + 2.24896, + 2.26031, + 2.22856, + 2.29874, + 2.2271, + 2.27081, + 2.22766, + 2.27599, + 2.25844, + 2.29885, + 2.2347, + 2.28497, + 2.31597, + 2.27505, + 2.23547, + 2.29681, + 2.24009, + 2.24159, + 2.25183, + 2.27174, + 2.27964, + 2.2845, + 2.2952, + 2.26439, + 2.23067, + 2.25705, + 2.2831, + 2.30329, + 2.22301, + 2.23729, + 2.27918, + 2.25807, + 2.26794, + 2.2421, + 2.2466, + 2.26048, + 2.21555, + 2.3154, + 2.25099, + 2.24706, + 2.31945, + 2.2796, + 2.25629, + 2.31402, + 2.26547, + 2.27183, + 2.24525, + 2.25277, + 2.30176, + 2.20707, + 2.22433, + 2.22723, + 2.25621, + 2.25819, + 2.30353, + 2.2426, + 2.26048, + 2.20818, + 2.34739, + 2.29828, + 2.2285, + 2.24406, + 2.25237, + 2.25692, + 2.30262, + 2.26141, + 2.24704, + 2.22083, + 2.23604, + 2.2809, + 2.21527, + 2.23686, + 2.28301, + 2.28014, + 2.25412, + 2.29256, + 2.25096, + 2.22856, + 2.19706, + 2.24572, + 2.23912, + 2.28371, + 2.22828, + 2.26356, + 2.28211, + 2.28233, + 2.22137, + 2.26463, + 2.26212, + 2.2908, + 2.29192, + 2.31109, + 2.3013, + 2.25506, + 2.27361, + 2.28979, + 2.27712, + 2.28039, + 2.27155, + 2.27079, + 2.28127, + 2.22103, + 2.26647, + 2.30047, + 2.25897, + 2.23723, + 2.20951, + 2.22234, + 2.27251, + 2.26997, + 2.25904, + 2.26619, + 2.22155, + 2.24171, + 2.2541, + 2.29241, + 2.26703, + 2.28625, + 2.24318, + 2.24285, + 2.23389, + 2.25815, + 2.28947, + 2.26555, + 2.25154, + 2.2828, + 2.19781, + 2.2746, + 2.24191, + 2.24755, + 2.26066, + 2.30043, + 2.23375, + 2.28005, + 2.25571, + 2.25661, + 2.26161, + 2.2714, + 2.26885, + 2.30167, + 2.27867, + 2.22438, + 2.2331, + 2.27016, + 2.26315, + 2.23641, + 2.30983, + 2.2661, + 2.2989, + 2.24743, + 2.2647, + 2.25619, + 2.2609, + 2.28082, + 2.30966, + 2.26783, + 2.22843, + 2.23044, + 2.25996, + 2.23219, + 2.25266, + 2.25615, + 2.26885, + 2.273, + 2.26008, + 2.24419, + 2.22667, + 2.26038, + 2.24018, + 2.22072, + 2.2686, + 2.24281, + 2.25009, + 2.20681, + 2.23877, + 2.32055, + 2.22457, + 2.25065, + 2.24086, + 2.2145, + 2.21653, + 2.26435, + 2.27299, + 2.23922, + 2.28132, + 2.2703, + 2.277, + 2.25949, + 2.26024, + 2.26521, + 2.21293, + 2.25174, + 2.24268, + 2.22512, + 2.30825, + 2.27955, + 2.23685, + 2.24023, + 2.26787, + 2.24209, + 2.23372, + 2.27888, + 2.27049, + 2.25464, + 2.27517, + 2.21792, + 2.29258, + 2.27042, + 2.27142, + 2.26137, + 2.25661, + 2.21069, + 2.29061, + 2.26525, + 2.22938, + 2.23041, + 2.25913, + 2.25231, + 2.25351, + 2.25021, + 2.21251, + 2.19543, + 2.25193, + 2.22868, + 2.17977, + 2.28988, + 2.2263, + 2.23866, + 2.25927, + 2.20465, + 2.24969, + 2.2294, + 2.25592, + 2.25309, + 2.23502, + 2.20113, + 2.2426, + 2.23169, + 2.24738, + 2.22658, + 2.21879, + 2.21201, + 2.2637, + 2.27222, + 2.25559, + 2.24115, + 2.2294, + 2.27283, + 2.27579, + 2.20695, + 2.25348, + 2.25106, + 2.29619, + 2.24014, + 2.24642, + 2.24057, + 2.24666, + 2.23374, + 2.23241, + 2.25486, + 2.28059, + 2.24519, + 2.2445, + 2.23902, + 2.23049, + 2.26964, + 2.23568, + 2.27511, + 2.23997, + 2.28266, + 2.25762, + 2.24458, + 2.2207, + 2.23317, + 2.24448, + 2.24122, + 2.26386, + 2.24813, + 2.25642, + 2.26275, + 2.22676, + 2.25657, + 2.24688, + 2.2559, + 2.27123, + 2.27252, + 2.3105, + 2.22187, + 2.24516, + 2.2509, + 2.27687, + 2.21641, + 2.22104, + 2.23885, + 2.22289, + 2.24141, + 2.24335, + 2.22094, + 2.26742, + 2.21861, + 2.20891, + 2.2061, + 2.28183, + 2.24503, + 2.28091, + 2.22907, + 2.22878, + 2.28197, + 2.24617, + 2.23746, + 2.26137, + 2.26632, + 2.26075, + 2.24664, + 2.25997, + 2.27046, + 2.21454, + 2.24372, + 2.24965, + 2.21759, + 2.22405, + 2.20312, + 2.28102, + 2.2421, + 2.20396, + 2.20726, + 2.20819, + 2.23877, + 2.20466, + 2.26779, + 2.24921, + 2.23536, + 2.25159, + 2.23653, + 2.23253, + 2.24051, + 2.27492, + 2.21496, + 2.20726, + 2.26435, + 2.26531, + 2.22791, + 2.26591, + 2.18891, + 2.30193, + 2.24878, + 2.20736, + 2.23167, + 2.23327, + 2.19672, + 2.1943, + 2.20467, + 2.23222, + 2.25391, + 2.20702, + 2.21312, + 2.21716, + 2.24114, + 2.21358, + 2.23025, + 2.21369, + 2.26312, + 2.20486, + 2.19672, + 2.24469, + 2.19429, + 2.19666, + 2.24965, + 2.24365, + 2.26443, + 2.23697, + 2.28952, + 2.19175, + 2.23533, + 2.22425, + 2.26002, + 2.26293, + 2.25339, + 2.25575, + 2.21611, + 2.28037, + 2.19663, + 2.24342, + 2.24181, + 2.22055, + 2.23641, + 2.16185, + 2.27231, + 2.22533, + 2.20262, + 2.2042, + 2.2072, + 2.25298, + 2.22359, + 2.21866, + 2.23734, + 2.22935, + 2.24302, + 2.23509, + 2.26453, + 2.24443, + 2.20471, + 2.21579, + 2.27924, + 2.19698, + 2.29148, + 2.25224, + 2.1962, + 2.2656, + 2.22161, + 2.23362, + 2.23203, + 2.19204, + 2.24016, + 2.22655, + 2.22054, + 2.23323, + 2.22276, + 2.22851, + 2.19944, + 2.2511, + 2.2176, + 2.23201, + 2.23884, + 2.20434, + 2.21057, + 2.18305, + 2.21192, + 2.21541, + 2.24033, + 2.24525, + 2.17242, + 2.27383, + 2.20978, + 2.24201, + 2.22347, + 2.19631, + 2.23404, + 2.24319, + 2.18459, + 2.27573, + 2.22857, + 2.2158, + 2.23134, + 2.22049, + 2.26988, + 2.26421, + 2.19765, + 2.19646, + 2.23463, + 2.2113, + 2.2507, + 2.1872, + 2.23676, + 2.20931, + 2.24544, + 2.27864, + 2.20702, + 2.20036, + 2.17364, + 2.24238, + 2.23131, + 2.23186, + 2.25269, + 2.18756, + 2.23956, + 2.24208, + 2.22705, + 2.2445, + 2.24644, + 2.22745, + 2.21172, + 2.26562, + 2.21675, + 2.20704, + 2.21538, + 2.22449, + 2.24353, + 2.24164, + 2.23281, + 2.16963, + 2.23757, + 2.24092, + 2.22678, + 2.26761, + 2.20965, + 2.19952, + 2.20648, + 2.2957, + 2.24925, + 2.18888, + 2.19019, + 2.18239, + 2.21649, + 2.26061, + 2.22504, + 2.22334, + 2.22078, + 2.23979, + 2.23915, + 2.21966, + 2.20811, + 2.20911, + 2.2271, + 2.20099, + 2.21655, + 2.24889, + 2.21637, + 2.23056, + 2.20812, + 2.2769, + 2.25091, + 2.24396, + 2.20858, + 2.2084, + 2.25965, + 2.24494, + 2.24198, + 2.18277, + 2.22092, + 2.15779, + 2.25506, + 2.20356, + 2.22225, + 2.23111, + 2.20607, + 2.24196, + 2.26393, + 2.22827, + 2.172, + 2.2621, + 2.18329, + 2.25431, + 2.20124, + 2.19573, + 2.22409, + 2.24819, + 2.24108, + 2.23197, + 2.19632, + 2.18857, + 2.21233, + 2.23028, + 2.18295, + 2.19351, + 2.21518, + 2.22952, + 2.20828, + 2.21205, + 2.20824, + 2.2387, + 2.20393, + 2.23443, + 2.21199, + 2.25188, + 2.2562, + 2.2203, + 2.18899, + 2.21131, + 2.22809, + 2.22014, + 2.22407, + 2.21843, + 2.26856, + 2.18797, + 2.22494, + 2.23875, + 2.27295, + 2.23967, + 2.23981, + 2.18051, + 2.20797, + 2.19298, + 2.21851, + 2.22431, + 2.21201, + 2.19524, + 2.21444, + 2.22351, + 2.20566, + 2.23687, + 2.22342, + 2.21503, + 2.25832, + 2.22103, + 2.24585, + 2.17213, + 2.2287, + 2.22911, + 2.22208, + 2.22572, + 2.19645, + 2.2042, + 2.14498, + 2.2471, + 2.22748, + 2.23159, + 2.25433, + 2.19095, + 2.17744, + 2.22185, + 2.20914, + 2.24606, + 2.1812, + 2.24469, + 2.24636, + 2.2235, + 2.2379, + 2.21194, + 2.19506, + 2.21344, + 2.19904, + 2.24134, + 2.19789, + 2.21885, + 2.23527, + 2.2274, + 2.18237, + 2.19056, + 2.21468, + 2.21474, + 2.20981, + 2.22273, + 2.173, + 2.26311, + 2.24765, + 2.22107, + 2.18842, + 2.22802, + 2.17172, + 2.19625, + 2.20099, + 2.23226, + 2.205, + 2.16246, + 2.21725, + 2.24505, + 2.18956, + 2.18247, + 2.20926, + 2.21139, + 2.22716, + 2.23963, + 2.21784, + 2.25488, + 2.25087, + 2.22603, + 2.19324, + 2.17134, + 2.21469, + 2.24885, + 2.19814, + 2.23438, + 2.22379, + 2.18645, + 2.19048, + 2.26294, + 2.21659, + 2.2291, + 2.21383, + 2.20328, + 2.21457, + 2.16515, + 2.22091, + 2.21627, + 2.19729, + 2.23379, + 2.20164, + 2.22897, + 2.20838, + 2.22746, + 2.21223, + 2.20605, + 2.21004, + 2.20278, + 2.18889, + 2.21508, + 2.21088, + 2.21543, + 2.25657, + 2.21637, + 2.22832, + 2.21336, + 2.22711, + 2.2061, + 2.22568, + 2.23374, + 2.22531, + 2.20687, + 2.25749, + 2.24376, + 2.23437, + 2.15815, + 2.1908, + 2.18676, + 2.22369, + 2.19005, + 2.19435, + 2.2098, + 2.23888, + 2.21464, + 2.19578, + 2.20222, + 2.18432, + 2.18878, + 2.23715, + 2.19603, + 2.1787, + 2.21657, + 2.20199, + 2.19578, + 2.19258, + 2.22656, + 2.16703, + 2.22065, + 2.19388, + 2.20789, + 2.17001, + 2.21117, + 2.23408, + 2.18041, + 2.22712, + 2.19562, + 2.16716, + 2.21055, + 2.20713, + 2.1713, + 2.21497, + 2.19658, + 2.20757, + 2.20027, + 2.18994, + 2.21117, + 2.16733, + 2.2107, + 2.16034, + 2.18521, + 2.21242, + 2.19298, + 2.19285, + 2.18318, + 2.19937, + 2.25748, + 2.2242, + 2.24497, + 2.20767, + 2.2005, + 2.21122, + 2.21584, + 2.14569, + 2.20592, + 2.1879, + 2.21068, + 2.27923, + 2.18232, + 2.20699, + 2.24365, + 2.22019, + 2.22732, + 2.22696, + 2.19996, + 2.2076, + 2.1618, + 2.24236, + 2.21538, + 2.24597, + 2.1647, + 2.15413, + 2.2151, + 2.21547, + 2.19728, + 2.18719, + 2.18188, + 2.2145, + 2.26362, + 2.20403, + 2.20246, + 2.18506, + 2.19727, + 2.2175, + 2.24009, + 2.20184, + 2.18475, + 2.20479, + 2.18445, + 2.19447, + 2.19756, + 2.20463, + 2.16656, + 2.259, + 2.24037, + 2.21995, + 2.18527, + 2.18214, + 2.19891, + 2.20758, + 2.17869, + 2.18176, + 2.24069, + 2.20986, + 2.18334, + 2.23201, + 2.2231, + 2.21626, + 2.15789, + 2.20736, + 2.20452, + 2.1969, + 2.24178, + 2.19462, + 2.16635, + 2.20613, + 2.21965, + 2.19277, + 2.23078, + 2.22622, + 2.17316, + 2.19892, + 2.22889, + 2.13626, + 2.19802, + 2.21082, + 2.2211, + 2.20861, + 2.19092, + 2.19321, + 2.21281, + 2.19061, + 2.22331, + 2.21377, + 2.21097, + 2.22023, + 2.21364, + 2.21695, + 2.21525, + 2.20792, + 2.23189, + 2.17622, + 2.23871, + 2.21325, + 2.15775, + 2.22191, + 2.17794, + 2.19138, + 2.15929, + 2.1846, + 2.20952, + 2.24375, + 2.2376, + 2.19207, + 2.20191, + 2.15854, + 2.20346, + 2.18676, + 2.20789, + 2.20248, + 2.23652, + 2.22614, + 2.21133, + 2.1916, + 2.21076, + 2.19274, + 2.18646, + 2.16035, + 2.23142, + 2.20169, + 2.20634, + 2.16964, + 2.17719, + 2.22733, + 2.22773, + 2.1917, + 2.20324, + 2.20843, + 2.18351, + 2.28204, + 2.21039, + 2.20862, + 2.18473, + 2.18581, + 2.20056, + 2.21968, + 2.17868, + 2.21771, + 2.22493, + 2.24893, + 2.24074, + 2.22117, + 2.1812, + 2.21478, + 2.20271, + 2.21441, + 2.20156, + 2.18085, + 2.24194, + 2.17072, + 2.22654, + 2.18459, + 2.16064, + 2.2127, + 2.21268, + 2.2075, + 2.18771, + 2.2412, + 2.19567, + 2.23818, + 2.20639, + 2.17262, + 2.17941, + 2.18159, + 2.1532, + 2.19474, + 2.19922, + 2.16617, + 2.21663, + 2.15394, + 2.19594, + 2.20902, + 2.19627, + 2.15241, + 2.19928, + 2.16016, + 2.19956, + 2.24343, + 2.19729, + 2.15239, + 2.19926, + 2.16015, + 2.19952, + 2.24334, + 2.19734, + 2.16842, + 2.22048, + 2.17577, + 2.19094, + 2.17378, + 2.18015, + 2.17338, + 2.21369, + 2.17643, + 2.2176, + 2.16992, + 2.19244, + 2.22764, + 2.21336, + 2.14604, + 2.2221, + 2.2102, + 2.21349, + 2.18116, + 2.15912, + 2.21113, + 2.20936, + 2.19783, + 2.21537, + 2.19813, + 2.17213, + 2.19955, + 2.16916, + 2.17469, + 2.25863, + 2.16602, + 2.23827, + 2.22504, + 2.20831, + 2.19234, + 2.2084, + 2.18026, + 2.21383, + 2.15706, + 2.16266, + 2.18302, + 2.24512, + 2.1781, + 2.21879, + 2.1834, + 2.18299, + 2.14026, + 2.19335, + 2.21695, + 2.21689, + 2.19752, + 2.22457, + 2.15914, + 2.15213, + 2.21437, + 2.16924, + 2.21181, + 2.2019, + 2.20662, + 2.18745, + 2.18372, + 2.20772, + 2.16942, + 2.18976, + 2.21133, + 2.20043, + 2.22123, + 2.14495, + 2.19675, + 2.18768, + 2.17767, + 2.15831, + 2.18366, + 2.16631, + 2.1641, + 2.2107, + 2.17591, + 2.18002, + 2.19929, + 2.17186, + 2.18516, + 2.1805, + 2.1761, + 2.19196, + 2.27241, + 2.20002, + 2.2073, + 2.23544, + 2.26259, + 2.19286, + 2.19042, + 2.20764, + 2.14257, + 2.20939, + 2.22146, + 2.20637, + 2.19244, + 2.23398, + 2.19825, + 2.16565, + 2.16901, + 2.20003, + 2.19801, + 2.20519, + 2.16926, + 2.21995, + 2.16604, + 2.14999, + 2.22083, + 2.16442, + 2.18866, + 2.187, + 2.19109, + 2.17532, + 2.21806, + 2.18666, + 2.17899, + 2.17863, + 2.16642, + 2.20048, + 2.19494, + 2.17443, + 2.20327, + 2.19404, + 2.21443, + 2.14888, + 2.22845, + 2.21441, + 2.19559, + 2.18534, + 2.21377, + 2.1852, + 2.1314, + 2.17638, + 2.18514, + 2.12761, + 2.1935, + 2.18724, + 2.20804, + 2.20378, + 2.1871, + 2.18737, + 2.13451, + 2.17889, + 2.16364, + 2.22186, + 2.2131, + 2.17384, + 2.17538, + 2.18701, + 2.15132, + 2.21864, + 2.15574, + 2.17345, + 2.18948, + 2.17734, + 2.14107, + 2.16922, + 2.18955, + 2.17062, + 2.22445, + 2.22347, + 2.20846, + 2.16172, + 2.19281, + 2.22074, + 2.21853, + 2.2179, + 2.19498, + 2.16798, + 2.13389, + 2.15565, + 2.18191, + 2.18506, + 2.19379, + 2.1651, + 2.1597, + 2.17774, + 2.18309, + 2.18548, + 2.17875, + 2.1647, + 2.18344, + 2.1937, + 2.18061, + 2.24236, + 2.17225, + 2.16795, + 2.18216, + 2.17772, + 2.17197, + 2.20252, + 2.17159, + 2.18217, + 2.22712, + 2.18749, + 2.17006, + 2.18883, + 2.17821, + 2.20445, + 2.1517, + 2.21262, + 2.17422, + 2.19338, + 2.17166, + 2.16346, + 2.13421, + 2.21842, + 2.18567, + 2.1472, + 2.22321, + 2.18658, + 2.15171, + 2.1778, + 2.17479, + 2.18861, + 2.21819, + 2.20546, + 2.19571, + 2.20015, + 2.21495, + 2.19301, + 2.17685, + 2.21443, + 2.19095, + 2.19199, + 2.19132, + 2.17147, + 2.1467, + 2.1735, + 2.1527, + 2.17177, + 2.1733, + 2.17979, + 2.20872, + 2.19373, + 2.17966, + 2.18571, + 2.15685, + 2.16672, + 2.18822, + 2.24412, + 2.15758, + 2.15271, + 2.23147, + 2.17206, + 2.181, + 2.21899, + 2.20409, + 2.18629, + 2.17353, + 2.15818, + 2.21138, + 2.21197, + 2.17169, + 2.15749, + 2.17335, + 2.22805, + 2.16633, + 2.16424, + 2.16652, + 2.21848, + 2.19068, + 2.20309, + 2.21376, + 2.16991, + 2.1835, + 2.20526, + 2.166, + 2.17374, + 2.177, + 2.18478, + 2.16993, + 2.20882, + 2.13416, + 2.16707, + 2.15516, + 2.16373, + 2.20626, + 2.18509, + 2.15541, + 2.17454, + 2.19609, + 2.10769, + 2.16538, + 2.14836, + 2.17317, + 2.17682, + 2.18426, + 2.16881, + 2.17014, + 2.16452, + 2.16755, + 2.12889, + 2.17789, + 2.21524, + 2.17162, + 2.17213, + 2.19698, + 2.22117, + 2.19178, + 2.17581, + 2.19096, + 2.16373, + 2.11816, + 2.14627, + 2.18512, + 2.19521, + 2.19665, + 2.19628, + 2.18991, + 2.20444, + 2.16578, + 2.18633, + 2.15008, + 2.1641, + 2.19327, + 2.17938, + 2.16376, + 2.18979, + 2.14261, + 2.17485, + 2.15901, + 2.18961, + 2.16367, + 2.17294, + 2.18237, + 2.16375, + 2.17763, + 2.14412, + 2.23155, + 2.18071, + 2.17755, + 2.16625, + 2.14994, + 2.18536, + 2.1851, + 2.19508, + 2.19961, + 2.15979, + 2.18119, + 2.17653, + 2.18864, + 2.17955, + 2.21378, + 2.17088, + 2.20922, + 2.18446, + 2.19155, + 2.14343, + 2.14728, + 2.17404, + 2.17996, + 2.18006, + 2.1816, + 2.14984, + 2.16943, + 2.1921, + 2.19744, + 2.1525, + 2.21724, + 2.11438, + 2.17021, + 2.18621, + 2.18711, + 2.15281, + 2.20832, + 2.17414, + 2.16847, + 2.14683, + 2.19263, + 2.19615, + 2.16999, + 2.20088, + 2.18569, + 2.18355, + 2.17963, + 2.15445, + 2.15536, + 2.26344, + 2.15138, + 2.14383, + 2.19653, + 2.15733, + 2.17847, + 2.16653, + 2.14876, + 2.16023, + 2.18213, + 2.17377, + 2.20933, + 2.1799, + 2.16824, + 2.18085, + 2.15923, + 2.19493, + 2.19784, + 2.19531, + 2.17005, + 2.17337, + 2.15707, + 2.19014, + 2.18798, + 2.15813, + 2.15847, + 2.17383, + 2.18981, + 2.15524, + 2.15583, + 2.15085, + 2.12696, + 2.17162, + 2.18542, + 2.17662, + 2.15636, + 2.19926, + 2.16174, + 2.19083, + 2.13156, + 2.14885, + 2.18351, + 2.19694, + 2.15617, + 2.14488, + 2.14642, + 2.12363, + 2.14041, + 2.19571, + 2.19216, + 2.17894, + 2.20783, + 2.18743, + 2.18487, + 2.16926, + 2.11756, + 2.17457, + 2.18933, + 2.18984, + 2.19816, + 2.13683, + 2.19122, + 2.15497, + 2.1748, + 2.22715, + 2.18044, + 2.1534, + 2.14391, + 2.16126, + 2.18936, + 2.17912, + 2.18483, + 2.16115, + 2.15323, + 2.18309, + 2.23305, + 2.18876, + 2.17963, + 2.16238, + 2.17015, + 2.20679, + 2.17327, + 2.20301, + 2.16498, + 2.19734, + 2.1824, + 2.14627, + 2.14243, + 2.19251, + 2.21814, + 2.18329, + 2.20867, + 2.18759, + 2.19187, + 2.20729, + 2.2057, + 2.18725, + 2.1847, + 2.17537, + 2.16339, + 2.1786, + 2.17951, + 2.17996, + 2.16891, + 2.17069, + 2.18127, + 2.19872, + 2.20472, + 2.15939, + 2.14811, + 2.17522, + 2.20313, + 2.17461, + 2.14452, + 2.16394, + 2.16964, + 2.15049, + 2.18439, + 2.16792, + 2.11975, + 2.14771, + 2.19557, + 2.20576, + 2.12044, + 2.1549, + 2.15546, + 2.14708, + 2.14473, + 2.14109, + 2.171, + 2.12942, + 2.17106, + 2.10015, + 2.27051, + 2.17798, + 2.19201, + 2.18754, + 2.19809, + 2.18437, + 2.20419, + 2.16753, + 2.19971, + 2.17484, + 2.19263, + 2.20859, + 2.16484, + 2.19198, + 2.1779, + 2.15021, + 2.18804, + 2.16078, + 2.16841, + 2.15725, + 2.1613, + 2.14764, + 2.16085, + 2.16933, + 2.1966, + 2.14398, + 2.15847, + 2.17247, + 2.18909, + 2.15898, + 2.1478, + 2.17818, + 2.15456, + 2.17928, + 2.15588, + 2.18713, + 2.15734, + 2.1517, + 2.14255, + 2.18992, + 2.21926, + 2.22612, + 2.21743, + 2.19475, + 2.1801, + 2.15852, + 2.14612, + 2.21622, + 2.21616, + 2.16975, + 2.17048, + 2.16175, + 2.13239, + 2.15726, + 2.12556, + 2.17941, + 2.16216, + 2.14035, + 2.18469, + 2.1696, + 2.19059, + 2.14463, + 2.14517, + 2.15618, + 2.18068, + 2.18458, + 2.13348, + 2.18515, + 2.2014, + 2.15721, + 2.18946, + 2.21125, + 2.17046, + 2.20573, + 2.15866, + 2.20669, + 2.17205, + 2.16632, + 2.18938, + 2.16222, + 2.16632, + 2.19873, + 2.14604, + 2.19569, + 2.21645, + 2.21248, + 2.18156, + 2.14153, + 2.18355, + 2.17111, + 2.17867, + 2.13356, + 2.15927, + 2.12408, + 2.15861, + 2.18723, + 2.17267, + 2.18654, + 2.15728, + 2.15302, + 2.14231, + 2.12637, + 2.19394, + 2.15926, + 2.18104, + 2.19901, + 2.1902, + 2.18474, + 2.18173, + 2.16629, + 2.15979, + 2.18367, + 2.18037, + 2.20064, + 2.13752, + 2.18504, + 2.17159, + 2.1661, + 2.17655, + 2.15915, + 2.10873, + 2.17854, + 2.13846, + 2.17051, + 2.14174, + 2.12537, + 2.17608, + 2.16135, + 2.18615, + 2.09541, + 2.14057, + 2.18523, + 2.15555, + 2.15936, + 2.1318, + 2.16706, + 2.18395, + 2.16847, + 2.18098, + 2.14105, + 2.12816, + 2.14824, + 2.16294, + 2.19564, + 2.17697, + 2.1621, + 2.16185, + 2.13345, + 2.16218, + 2.16696, + 2.18757, + 2.153, + 2.16848, + 2.12694, + 2.1439, + 2.16917, + 2.14999, + 2.18294, + 2.1425, + 2.16657, + 2.16947, + 2.1431, + 2.18161, + 2.14911, + 2.18262, + 2.1797, + 2.16234, + 2.19183, + 2.1784, + 2.17465, + 2.19013, + 2.16067, + 2.19193, + 2.13367, + 2.20197, + 2.15076, + 2.17321, + 2.16784, + 2.12477, + 2.11399, + 2.17824, + 2.156, + 2.14096, + 2.18114, + 2.13447, + 2.16557, + 2.17357, + 2.20938, + 2.14777, + 2.18127, + 2.1744, + 2.19442, + 2.15363, + 2.16685, + 2.12111, + 2.18725, + 2.20475, + 2.12231, + 2.13934, + 2.17479, + 2.14848, + 2.14109, + 2.17038, + 2.19984, + 2.13387, + 2.167, + 2.15354, + 2.15302, + 2.18602, + 2.16062, + 2.14146, + 2.17027, + 2.14351, + 2.18497, + 2.16019, + 2.19006, + 2.1479, + 2.18671, + 2.13551, + 2.135, + 2.17669, + 2.14165, + 2.19581, + 2.12177, + 2.15406, + 2.16763, + 2.17618, + 2.181, + 2.17901, + 2.10328, + 2.14171, + 2.19008, + 2.12351, + 2.17358, + 2.17955, + 2.13902, + 2.18343, + 2.1763, + 2.13078, + 2.19134, + 2.12578, + 2.14905, + 2.14637, + 2.19027, + 2.25382, + 2.17345, + 2.17834, + 2.14327, + 2.12737, + 2.1608, + 2.1556, + 2.15124, + 2.15839, + 2.14512, + 2.19067, + 2.16934, + 2.16245, + 2.19191, + 2.16126, + 2.17952, + 2.17233, + 2.20475, + 2.15288, + 2.15615, + 2.15589, + 2.17093, + 2.17351, + 2.15767, + 2.1031, + 2.18355, + 2.21361, + 2.17387, + 2.18068, + 2.13022, + 2.16683, + 2.19119, + 2.2019, + 2.1415, + 2.14956, + 2.15678, + 2.1577, + 2.19968, + 2.19445, + 2.11721, + 2.14302, + 2.17216, + 2.1248, + 2.09752, + 2.17449, + 2.12292, + 2.14993, + 2.18809, + 2.14888, + 2.14015, + 2.16722, + 2.16813, + 2.20578, + 2.21819, + 2.13705, + 2.14802, + 2.16233, + 2.14961, + 2.15414, + 2.09723, + 2.18731, + 2.1363, + 2.14775, + 2.17624, + 2.1336, + 2.15152, + 2.14756, + 2.11907, + 2.20711, + 2.17921, + 2.19652, + 2.13845, + 2.11612, + 2.17092, + 2.13699, + 2.16441, + 2.1313, + 2.15736, + 2.11473, + 2.16612, + 2.2035, + 2.16649, + 2.16057, + 2.141, + 2.13255, + 2.14794, + 2.14774, + 2.14235, + 2.13635, + 2.16235, + 2.19152, + 2.15345, + 2.1511, + 2.08878, + 2.16734, + 2.20028, + 2.19222, + 2.14872, + 2.19182, + 2.15673, + 2.1572, + 2.18504, + 2.127, + 2.12302, + 2.11176, + 2.14987, + 2.08642, + 2.17168, + 2.14896, + 2.15704, + 2.13415, + 2.19367, + 2.18156, + 2.15787, + 2.13577, + 2.13732, + 2.15458, + 2.14696, + 2.13656, + 2.17765, + 2.15875, + 2.13939, + 2.13572, + 2.16372, + 2.14554, + 2.16876, + 2.1763, + 2.14148, + 2.13363, + 2.17448, + 2.14582, + 2.16399, + 2.17864, + 2.11704, + 2.18451, + 2.13791, + 2.09483, + 2.17485, + 2.171, + 2.16585, + 2.15641, + 2.11398, + 2.1933, + 2.16659, + 2.11705, + 2.18533, + 2.1376, + 2.14452, + 2.14798, + 2.10416, + 2.18204, + 2.15977, + 2.16837, + 2.15676, + 2.16268, + 2.15171, + 2.14989, + 2.14358, + 2.17646, + 2.15323, + 2.1435, + 2.11332, + 2.15491, + 2.11292, + 2.13509, + 2.18815, + 2.17583, + 2.15105, + 2.12616, + 2.16429, + 2.19165, + 2.13445, + 2.12668, + 2.14715, + 2.16051, + 2.17577, + 2.18437, + 2.12147, + 2.14173, + 2.19119, + 2.14259, + 2.16069, + 2.13931, + 2.13257, + 2.13368, + 2.17843, + 2.18003, + 2.15228, + 2.15841, + 2.18479, + 2.13727, + 2.16872, + 2.18235, + 2.18741, + 2.18707, + 2.20625, + 2.14712, + 2.17132, + 2.17173, + 2.14073, + 2.10116, + 2.20496, + 2.15772, + 2.19509, + 2.20366, + 2.11044, + 2.156, + 2.17841, + 2.1801, + 2.12048, + 2.18712, + 2.18221, + 2.15968, + 2.1459, + 2.1443, + 2.16884, + 2.107, + 2.18104, + 2.1166, + 2.10592, + 2.1412, + 2.13225, + 2.17143, + 2.13275, + 2.11507, + 2.13192, + 2.12221, + 2.17945, + 2.20474, + 2.17471, + 2.16931, + 2.13238, + 2.10923, + 2.14124, + 2.16795, + 2.18898, + 2.18312, + 2.09957, + 2.11802, + 2.16699, + 2.14606, + 2.16508, + 2.11333, + 2.17366, + 2.11857, + 2.14846, + 2.13323, + 2.16219, + 2.11718, + 2.13992, + 2.13892, + 2.1457, + 2.10234, + 2.13532, + 2.19414, + 2.15058, + 2.15193, + 2.15096, + 2.14659, + 2.14549, + 2.17342, + 2.14192, + 2.12625, + 2.11478, + 2.18829, + 2.16783, + 2.14319, + 2.13884, + 2.17131, + 2.18925, + 2.17489, + 2.18202, + 2.16298, + 2.1508, + 2.15014, + 2.12937, + 2.16168, + 2.1714, + 2.1668, + 2.13418, + 2.16065, + 2.21061, + 2.16126, + 2.11185, + 2.14461, + 2.17969, + 2.10698, + 2.09044, + 2.15758, + 2.15375, + 2.16383, + 2.13245, + 2.19047, + 2.1472, + 2.16643, + 2.16811, + 2.19967, + 2.1244, + 2.13006, + 2.14583, + 2.12804, + 2.16276, + 2.16689, + 2.14063, + 2.17279, + 2.12726, + 2.17034, + 2.11752, + 2.17501, + 2.1926, + 2.16911, + 2.09497, + 2.16066, + 2.19386, + 2.10672, + 2.147, + 2.11698, + 2.15454, + 2.17636, + 2.14325, + 2.13193, + 2.15237, + 2.12483, + 2.15946, + 2.14216, + 2.14877, + 2.09697, + 2.11371, + 2.13351, + 2.16581, + 2.16066, + 2.16743, + 2.13634, + 2.12924, + 2.14702, + 2.12892, + 2.1668, + 2.1522, + 2.16604, + 2.19061, + 2.11983, + 2.13366, + 2.10699, + 2.15441, + 2.1676, + 2.1694, + 2.12743, + 2.13471, + 2.18747, + 2.13023, + 2.19107, + 2.1321, + 2.14259, + 2.16956, + 2.19361, + 2.14398, + 2.11797, + 2.10863, + 2.14346, + 2.12159, + 2.19451, + 2.14807, + 2.13874, + 2.1516, + 2.10797, + 2.09939, + 2.12946, + 2.17435, + 2.11143, + 2.17784, + 2.14156, + 2.14533, + 2.17696, + 2.14203, + 2.15071, + 2.11011, + 2.16908, + 2.1706, + 2.16703, + 2.13855, + 2.16176, + 2.14157, + 2.17087, + 2.20186, + 2.10983, + 2.13922, + 2.19236, + 2.16432, + 2.1754, + 2.1656, + 2.17702, + 2.17027, + 2.14538, + 2.15832, + 2.13773, + 2.18334, + 2.17546, + 2.15989, + 2.13713, + 2.15447, + 2.10695, + 2.15466, + 2.11713, + 2.14668, + 2.13398, + 2.14844, + 2.16052, + 2.15726, + 2.17533, + 2.12558, + 2.12761, + 2.13157, + 2.10692, + 2.20562, + 2.12857, + 2.12588, + 2.1346, + 2.15945, + 2.1288, + 2.16761, + 2.14991, + 2.10526, + 2.17739, + 2.18675, + 2.20731, + 2.12029, + 2.1523, + 2.16777, + 2.12095, + 2.13545, + 2.16134, + 2.11709, + 2.11789, + 2.16944, + 2.12856, + 2.15495, + 2.1182, + 2.09788, + 2.14004, + 2.14291, + 2.16266, + 2.15156, + 2.0972, + 2.17693, + 2.15852, + 2.15903, + 2.10183, + 2.1416, + 2.11404, + 2.19407, + 2.11699, + 2.17899, + 2.14283, + 2.14344, + 2.15259, + 2.18662, + 2.18779, + 2.13915, + 2.12533, + 2.17327, + 2.15896, + 2.17776, + 2.13174, + 2.16252, + 2.1644, + 2.1793, + 2.10426, + 2.12368, + 2.12738, + 2.18203, + 2.10629, + 2.1689, + 2.17597, + 2.17203, + 2.10734, + 2.12659, + 2.16685, + 2.15431, + 2.14967, + 2.14079, + 2.1438, + 2.13513, + 2.18143, + 2.12313, + 2.15419, + 2.12765, + 2.164, + 2.16244, + 2.15503, + 2.16961, + 2.11907, + 2.13193, + 2.13485, + 2.14159, + 2.16923, + 2.13656, + 2.1314, + 2.14872, + 2.13233, + 2.10057, + 2.14367, + 2.16474, + 2.14571, + 2.13129, + 2.17073, + 2.14878, + 2.13761, + 2.12414, + 2.16312, + 2.12182, + 2.15251, + 2.16149, + 2.17208, + 2.14538, + 2.15571, + 2.12569, + 2.08976, + 2.14935, + 2.20761, + 2.17022, + 2.14493, + 2.13671, + 2.16371, + 2.13993, + 2.15544, + 2.14585, + 2.14978, + 2.0978, + 2.14243, + 2.14532, + 2.19018, + 2.09518, + 2.13939, + 2.12702, + 2.13127, + 2.12441, + 2.15245, + 2.09389, + 2.14901, + 2.13478, + 2.17157, + 2.15137, + 2.12996, + 2.10468, + 2.09343, + 2.14596, + 2.14001, + 2.1059, + 2.17019, + 2.12371, + 2.18654, + 2.11822, + 2.12322, + 2.13852, + 2.14918, + 2.11615, + 2.16195, + 2.13596, + 2.16663, + 2.11985, + 2.17567, + 2.15815, + 2.11397, + 2.10551, + 2.10105, + 2.13678, + 2.12597, + 2.143, + 2.11903, + 2.11374, + 2.13401, + 2.10533, + 2.19884, + 2.14265, + 2.15892, + 2.12189, + 2.1075, + 2.17377, + 2.11619, + 2.12564, + 2.14689, + 2.14838, + 2.15968, + 2.13385, + 2.17871, + 2.18743, + 2.11674, + 2.15358, + 2.13287, + 2.14467, + 2.14385, + 2.15097, + 2.12389, + 2.13063, + 2.15403, + 2.17818, + 2.1176, + 2.13839, + 2.09886, + 2.15505, + 2.13632, + 2.16768, + 2.13509, + 2.12509, + 2.11603, + 2.14385, + 2.09451, + 2.1456, + 2.1422, + 2.19208, + 2.12414, + 2.13025, + 2.12967, + 2.13282, + 2.11999, + 2.10608, + 2.09721, + 2.11294, + 2.14824, + 2.1077, + 2.17249, + 2.11254, + 2.13875, + 2.10992, + 2.14203, + 2.19748, + 2.17373, + 2.12571, + 2.15508, + 2.09296, + 2.15969, + 2.10727, + 2.16069, + 2.1281, + 2.15192, + 2.16759, + 2.17505, + 2.17871, + 2.12461, + 2.14144, + 2.14497, + 2.15439, + 2.15332, + 2.1599, + 2.16703, + 2.11559, + 2.15726, + 2.13004, + 2.09935, + 2.15864, + 2.13041, + 2.13299, + 2.16125, + 2.14967, + 2.16318, + 2.10817, + 2.133, + 2.14493, + 2.16514, + 2.12097, + 2.17644, + 2.15639, + 2.16246, + 2.18479, + 2.14845, + 2.10433, + 2.1395, + 2.11984, + 2.1692, + 2.09604, + 2.14929, + 2.12645, + 2.1407, + 2.15826, + 2.18878, + 2.07415, + 2.13586, + 2.11267, + 2.11688, + 2.16593, + 2.15135, + 2.14363, + 2.1358, + 2.13361, + 2.12986, + 2.13311, + 2.07136, + 2.11647, + 2.19506, + 2.14691, + 2.15606, + 2.10683, + 2.12736, + 2.13159, + 2.15623, + 2.16743, + 2.16151, + 2.11969, + 2.10611, + 2.10962, + 2.13044, + 2.17478, + 2.1448, + 2.12965, + 2.08623, + 2.13043, + 2.09283, + 2.16873, + 2.14139, + 2.1043, + 2.15255, + 2.15873, + 2.15032, + 2.13322, + 2.13143, + 2.16012, + 2.16421, + 2.09401, + 2.08427, + 2.10674, + 2.14381, + 2.11744, + 2.12551, + 2.11385, + 2.12282, + 2.1678, + 2.1262, + 2.0947, + 2.15236, + 2.16461, + 2.11428, + 2.14919, + 2.08848, + 2.13702, + 2.09586, + 2.1369, + 2.19728, + 2.11058, + 2.13479, + 2.14056, + 2.17871, + 2.11145, + 2.16839, + 2.15406, + 2.1731, + 2.12341, + 2.13816, + 2.15165, + 2.14093, + 2.16582, + 2.14207, + 2.13801, + 2.17713, + 2.15638, + 2.17091, + 2.16117, + 2.13487, + 2.16257, + 2.16206, + 2.19882, + 2.11888, + 2.10646, + 2.08643, + 2.16012, + 2.08846, + 2.09914, + 2.14465, + 2.10321, + 2.10914, + 2.12985, + 2.15083, + 2.13683, + 2.14648, + 2.17932, + 2.16821, + 2.13741, + 2.1201, + 2.10379, + 2.13683, + 2.16058, + 2.15999, + 2.13644, + 2.13412, + 2.09325, + 2.16394, + 2.09119, + 2.12577, + 2.11695, + 2.15944, + 2.15893, + 2.15669, + 2.13675, + 2.14947, + 2.19116, + 2.10843, + 2.14734, + 2.15731, + 2.12981, + 2.11599, + 2.11285, + 2.1318, + 2.132, + 2.14687, + 2.11874, + 2.1381, + 2.15827, + 2.19088, + 2.1165, + 2.14317, + 2.17349, + 2.14614, + 2.16461, + 2.12818, + 2.13753, + 2.10454, + 2.10475, + 2.16402, + 2.09478, + 2.1212, + 2.10195, + 2.1199, + 2.15636, + 2.12659, + 2.12693, + 2.09993, + 2.11189, + 2.1289, + 2.11812, + 2.13287, + 2.11231, + 2.14206, + 2.16843, + 2.13639, + 2.14425, + 2.09665, + 2.11477, + 2.10752, + 2.14236, + 2.14631, + 2.12025, + 2.13563, + 2.13685, + 2.13369, + 2.15586, + 2.10845, + 2.13446, + 2.16196, + 2.12616, + 2.16333, + 2.14753, + 2.11648, + 2.12531, + 2.15338, + 2.10907, + 2.11759, + 2.10461, + 2.07099, + 2.1288, + 2.16598, + 2.07058, + 2.11899, + 2.10584, + 2.11741, + 2.13033, + 2.1663, + 2.11573, + 2.1372, + 2.14031, + 2.15917, + 2.13693, + 2.16147, + 2.07929, + 2.14901, + 2.1409, + 2.16247, + 2.12957, + 2.14447, + 2.12736, + 2.15479, + 2.13856, + 2.10616, + 2.15782, + 2.14136, + 2.10211, + 2.15777, + 2.14765, + 2.11804, + 2.0819, + 2.092, + 2.12426, + 2.10807, + 2.1149, + 2.14078, + 2.18298, + 2.1223, + 2.10649, + 2.14487, + 2.08981, + 2.13699, + 2.16398, + 2.09739, + 2.11924, + 2.16895, + 2.11007, + 2.12884, + 2.09463, + 2.11184, + 2.11767, + 2.13542, + 2.10656, + 2.13339, + 2.1366, + 2.14579, + 2.09656, + 2.09435, + 2.07356, + 2.11332, + 2.15238, + 2.15207, + 2.12598, + 2.12335, + 2.1421, + 2.15679, + 2.12453, + 2.13526, + 2.14133, + 2.10196, + 2.14753, + 2.16914, + 2.13765, + 2.10407, + 2.1711, + 2.1303, + 2.13426, + 2.12031, + 2.1961, + 2.11324, + 2.11445, + 2.12486, + 2.1204, + 2.09879, + 2.11375, + 2.11677, + 2.14572, + 2.11955, + 2.11567, + 2.1003, + 2.13393, + 2.11633, + 2.17204, + 2.13136, + 2.13734, + 2.13796, + 2.16168, + 2.11231, + 2.09353, + 2.15149, + 2.13124, + 2.15622, + 2.13868, + 2.11608, + 2.11149, + 2.13024, + 2.13585, + 2.15504, + 2.12449, + 2.12367, + 2.1399, + 2.12866, + 2.11289, + 2.12934, + 2.14393, + 2.13566, + 2.14373, + 2.11753, + 2.10841, + 2.13074, + 2.12789, + 2.15526, + 2.11489, + 2.12104, + 2.13843, + 2.13777, + 2.12097, + 2.10244, + 2.17778, + 2.13605, + 2.12675, + 2.12159, + 2.13815, + 2.08907, + 2.13444, + 2.13577, + 2.10076, + 2.11821, + 2.10232, + 2.14453, + 2.17023, + 2.0337, + 2.11439, + 2.14401, + 2.13903, + 2.1518, + 2.12047, + 2.13882, + 2.099, + 2.15143, + 2.19799, + 2.12641, + 2.1025, + 2.09817, + 2.09579, + 2.13479, + 2.12495, + 2.15583, + 2.09657, + 2.12034, + 2.12975, + 2.15929, + 2.10809, + 2.13027, + 2.15783, + 2.10149, + 2.1334, + 2.17382, + 2.14305, + 2.12402, + 2.12527, + 2.12312, + 2.11042, + 2.12055, + 2.15865, + 2.10883, + 2.12948, + 2.10529, + 2.11077, + 2.1249, + 2.09475, + 2.12472, + 2.12687, + 2.12713, + 2.12256, + 2.11256, + 2.11841, + 2.14053, + 2.1064, + 2.11714, + 2.10714, + 2.15293, + 2.19692, + 2.14055, + 2.08169, + 2.13974, + 2.16855, + 2.09478, + 2.12631, + 2.14383, + 2.09277, + 2.13721, + 2.13032, + 2.14967, + 2.12394, + 2.17736, + 2.13786, + 2.12334, + 2.1533, + 2.12572, + 2.11051, + 2.17335, + 2.08796, + 2.16495, + 2.13117, + 2.12382, + 2.13507, + 2.04445, + 2.08573, + 2.16131, + 2.10625, + 2.12618, + 2.14758, + 2.11864, + 2.13185, + 2.11287, + 2.12533, + 2.13137, + 2.14742, + 2.09504, + 2.14279, + 2.10047, + 2.11993, + 2.11881, + 2.15383, + 2.13342, + 2.12715, + 2.11787, + 2.05652, + 2.13874, + 2.11141, + 2.09975, + 2.10952, + 2.09028, + 2.10495, + 2.08814, + 2.10335, + 2.09943, + 2.13021, + 2.17148, + 2.11765, + 2.17736, + 2.12111, + 2.11913, + 2.14293, + 2.09066, + 2.15396, + 2.16153, + 2.08881, + 2.13141, + 2.09804, + 2.15381, + 2.08805, + 2.13143, + 2.11033, + 2.14109, + 2.14728, + 2.1091, + 2.10329, + 2.11108, + 2.17749, + 2.13786, + 2.13742, + 2.12179, + 2.13358, + 2.14135, + 2.10708, + 2.13164, + 2.10376, + 2.09768, + 2.11786, + 2.10825, + 2.1197, + 2.14667, + 2.14201, + 2.18491, + 2.13168, + 2.07802, + 2.12686, + 2.13434, + 2.11713, + 2.13025, + 2.09278, + 2.11446, + 2.13802, + 2.12397, + 2.09113, + 2.13059, + 2.1282, + 2.11799, + 2.10972, + 2.11513, + 2.14225, + 2.11859, + 2.16514, + 2.08961, + 2.14516, + 2.12416, + 2.09814, + 2.11396, + 2.08971, + 2.11929, + 2.14696, + 2.09441, + 2.15763, + 2.12072, + 2.18128, + 2.12681, + 2.17585, + 2.11701, + 2.17835, + 2.10973, + 2.10133, + 2.11217, + 2.1711, + 2.10351, + 2.15197, + 2.14303, + 2.13709, + 2.12931, + 2.12122, + 2.14236, + 2.15559, + 2.12635, + 2.14091, + 2.16287, + 2.10875, + 2.14038, + 2.10369, + 2.13428, + 2.09718, + 2.1489, + 2.1227, + 2.12243, + 2.13812, + 2.14285, + 2.15294, + 2.09895, + 2.13794, + 2.11598, + 2.12054, + 2.14944, + 2.11722, + 2.09128, + 2.11423, + 2.12521, + 2.13723, + 2.16048, + 2.13869, + 2.11923, + 2.12547, + 2.09441, + 2.1185, + 2.09894, + 2.12675, + 2.12524, + 2.09801, + 2.14031, + 2.08554, + 2.10324, + 2.10534, + 2.14002, + 2.1316, + 2.13571, + 2.10256, + 2.08533, + 2.12025, + 2.10473, + 2.12501, + 2.1933, + 2.08989, + 2.12629, + 2.09351, + 2.09922, + 2.1404, + 2.09956, + 2.08689, + 2.11506, + 2.15424, + 2.16101, + 2.11189, + 2.12862, + 2.11177, + 2.10821, + 2.12846, + 2.11742, + 2.08781, + 2.13473, + 2.12221, + 2.15802, + 2.13391, + 2.09907, + 2.11351, + 2.09979, + 2.11353, + 2.15312, + 2.08958, + 2.10074, + 2.09865, + 2.14159, + 2.05822, + 2.11044, + 2.10347, + 2.10134, + 2.10349, + 2.13831, + 2.13878, + 2.10616, + 2.07396, + 2.12464, + 2.16997, + 2.09815, + 2.08547, + 2.16503, + 2.06907, + 2.10988, + 2.16151, + 2.1141, + 2.11294, + 2.09218, + 2.11275, + 2.11515, + 2.13305, + 2.11775, + 2.10267, + 2.1121, + 2.07591, + 2.1332, + 2.11559, + 2.10773, + 2.16294, + 2.10317, + 2.14781, + 2.1044, + 2.10788, + 2.12625, + 2.09901, + 2.17952, + 2.13967, + 2.17455, + 2.09002, + 2.11658, + 2.13498, + 2.14351, + 2.11181, + 2.11601, + 2.12249, + 2.16597, + 2.15764, + 2.1597, + 2.15078, + 2.13907, + 2.14725, + 2.14415, + 2.16097, + 2.10853, + 2.11451, + 2.09799, + 2.11377, + 2.10592, + 2.14911, + 2.1337, + 2.08712, + 2.08662, + 2.14033, + 2.10219, + 2.11061, + 2.15216, + 2.12996, + 2.13128, + 2.17102, + 2.10687, + 2.15353, + 2.12543, + 2.13553, + 2.10056, + 2.10464, + 2.13733, + 2.0902, + 2.11825, + 2.08609, + 2.09566, + 2.13765, + 2.07274, + 2.12641, + 2.11197, + 2.07709, + 2.118, + 2.10084, + 2.12198, + 2.08523, + 2.11117, + 2.1018, + 2.09848, + 2.12199, + 2.10204, + 2.13525, + 2.13304, + 2.12105, + 2.09973, + 2.12237, + 2.17302, + 2.1398, + 2.07602, + 2.09201, + 2.12109, + 2.18325, + 2.08152, + 2.10198, + 2.10918, + 2.13383, + 2.09263, + 2.13685, + 2.09968, + 2.13612, + 2.03047, + 2.15391, + 2.13358, + 2.10222, + 2.15451, + 2.15211, + 2.14633, + 2.08741, + 2.12117, + 2.07721, + 2.10413, + 2.08823, + 2.12938, + 2.11048, + 2.15263, + 2.13725, + 2.11799, + 2.13048, + 2.1067, + 2.11096, + 2.12536, + 2.07133, + 2.08747, + 2.13986, + 2.08873, + 2.09246, + 2.07017, + 2.14036, + 2.14424, + 2.11736, + 2.14807, + 2.16531, + 2.15071, + 2.16051, + 2.12, + 2.13679, + 2.09274, + 2.10173, + 2.12141, + 2.13333, + 2.14599, + 2.09426, + 2.11227, + 2.10872, + 2.12231, + 2.10324, + 2.15173, + 2.11666, + 2.11765, + 2.11968, + 2.11489, + 2.08386, + 2.13578, + 2.06377, + 2.16615, + 2.10211, + 2.14858, + 2.13675, + 2.14573, + 2.11208, + 2.14561, + 2.09079, + 2.15821, + 2.1238, + 2.12045, + 2.12735, + 2.13403, + 2.11798, + 2.11864, + 2.10731, + 2.1176, + 2.13106, + 2.1066, + 2.11646, + 2.08695, + 2.11385, + 2.11768, + 2.08169, + 2.10635, + 2.12933, + 2.12261, + 2.12714, + 2.13656, + 2.13486, + 2.13317, + 2.0787, + 2.09095, + 2.10864, + 2.11584, + 2.09483, + 2.11854, + 2.09834, + 2.1198, + 2.13201, + 2.10561, + 2.10857, + 2.12778, + 2.11358, + 2.08942, + 2.15128, + 2.13853, + 2.09613, + 2.16559, + 2.11753, + 2.11102, + 2.12098, + 2.10367, + 2.0972, + 2.1504, + 2.07743, + 2.14421, + 2.09319, + 2.09999, + 2.14038, + 2.09829, + 2.06088, + 2.11746, + 2.10754, + 2.15191, + 2.12793, + 2.12689, + 2.12444, + 2.1136, + 2.15682, + 2.18835, + 2.11507, + 2.10239, + 2.12042, + 2.12467, + 2.13243, + 2.10058, + 2.11116, + 2.09426, + 2.10201, + 2.14905, + 2.09256, + 2.12082, + 2.09389, + 2.10008, + 2.14122, + 2.06972, + 2.12729, + 2.10368, + 2.10274, + 2.16134, + 2.14008, + 2.07028, + 2.12761, + 2.11435, + 2.10445, + 2.10342, + 2.08907, + 2.09885, + 2.11214, + 2.10246, + 2.15113, + 2.16171, + 2.09088, + 2.10272, + 2.14088, + 2.09274, + 2.15749, + 2.0888, + 2.13651, + 2.12688, + 2.11257, + 2.099, + 2.06837, + 2.1057, + 2.10333, + 2.10685, + 2.1596, + 2.10119, + 2.10185, + 2.10856, + 2.12995, + 2.09983, + 2.11709, + 2.09944, + 2.1366, + 2.11599, + 2.07312, + 2.13018, + 2.12862, + 2.12638, + 2.0916, + 2.08332, + 2.12767, + 2.11948, + 2.14687, + 2.05501, + 2.09528, + 2.122, + 2.13165, + 2.13842, + 2.136, + 2.12782, + 2.14612, + 2.10212, + 2.13352, + 2.09932, + 2.14526, + 2.11047, + 2.12999, + 2.09918, + 2.13857, + 2.13681, + 2.12591, + 2.09873, + 2.11258, + 2.09789, + 2.10837, + 2.09302, + 2.05611, + 2.11237, + 2.09868, + 2.13083, + 2.07146, + 2.11314, + 2.10693, + 2.10226, + 2.16095, + 2.12994, + 2.12499, + 2.10417, + 2.09787, + 2.14465, + 2.07466, + 2.12115, + 2.11671, + 2.14006, + 2.13841, + 2.15919, + 2.10292, + 2.15698, + 2.12656, + 2.10877, + 2.1537, + 2.15074, + 2.10501, + 2.12851, + 2.06822, + 2.11096, + 2.09334, + 2.14231, + 2.1149, + 2.10343, + 2.13568, + 2.10919, + 2.06212, + 2.14188, + 2.10983, + 2.14342, + 2.10149, + 2.10594, + 2.09393, + 2.12907, + 2.10547, + 2.14079, + 2.10112, + 2.1024, + 2.11135, + 2.13122, + 2.14234, + 2.13394, + 2.1343, + 2.11667, + 2.15002, + 2.07717, + 2.09863, + 2.10294, + 2.11124, + 2.13817, + 2.12715, + 2.10742, + 2.12945, + 2.07979, + 2.11329, + 2.10245, + 2.11476, + 2.10666, + 2.12662, + 2.09066, + 2.13525, + 2.15508, + 2.11572, + 2.09151, + 2.13588, + 2.12427, + 2.07667, + 2.10647, + 2.09852, + 2.12708, + 2.10559, + 2.09543, + 2.11798, + 2.10156, + 2.08074, + 2.16775, + 2.0821, + 2.11155, + 2.07267, + 2.11383, + 2.15074, + 2.12435, + 2.13439, + 2.13878, + 2.13466, + 2.10563, + 2.14833, + 2.13105, + 2.11144, + 2.10283, + 2.11132, + 2.16253, + 2.13083, + 2.12205, + 2.11975, + 2.14621, + 2.1179, + 2.11658, + 2.11814, + 2.12209, + 2.12992, + 2.14866, + 2.12431, + 2.07592, + 2.09754, + 2.11437, + 2.10174, + 2.1532, + 2.1097, + 2.09777, + 2.1132, + 2.12782, + 2.11668, + 2.10415, + 2.10071, + 2.07662, + 2.08775, + 2.11871, + 2.15896, + 2.14489, + 2.11918, + 2.09371, + 2.12675, + 2.13066, + 2.10031, + 2.08973, + 2.13965, + 2.12181, + 2.12068, + 2.0862, + 2.11716, + 2.13296, + 2.10429, + 2.10337, + 2.1663, + 2.12839, + 2.14981, + 2.09164, + 2.09305, + 2.08868, + 2.0809, + 2.11478, + 2.12271, + 2.14028, + 2.1456, + 2.08634, + 2.12598, + 2.16927, + 2.12709, + 2.07928, + 2.07875, + 2.10032, + 2.07097, + 2.12703, + 2.0748, + 2.15601, + 2.04427, + 2.15366, + 2.10555, + 2.16358, + 2.16841, + 2.11347, + 2.11532, + 2.14135, + 2.08267, + 2.14937, + 2.10843, + 2.06433, + 2.12438, + 2.06865, + 2.11036, + 2.10042, + 2.14013, + 2.1162, + 2.08568, + 2.09292, + 2.0854, + 2.16585, + 2.12376, + 2.11553, + 2.06899, + 2.10559, + 2.1145, + 2.09611, + 2.1624, + 2.1083, + 2.12812, + 2.14808, + 2.13212, + 2.06439, + 2.15418, + 2.11621, + 2.0956, + 2.10022, + 2.12325, + 2.12367, + 2.10142, + 2.14421, + 2.13841, + 2.07838, + 2.07186, + 2.12188, + 2.15406, + 2.14266, + 2.1229, + 2.11076, + 2.10514, + 2.0762, + 2.14684, + 2.13763, + 2.13527, + 2.05441, + 2.11823, + 2.09946, + 2.1464, + 2.11881, + 2.11644, + 2.15045, + 2.11092, + 2.09864, + 2.08114, + 2.13503, + 2.12081, + 2.15014, + 2.11874, + 2.10068, + 2.11017, + 2.1104, + 2.07771, + 2.13573, + 2.14541, + 2.13773, + 2.12585, + 2.07406, + 2.07394, + 2.11684, + 2.09787, + 2.10144, + 2.10216, + 2.14838, + 2.11385, + 2.13748, + 2.13107, + 2.11188, + 2.12136, + 2.10122, + 2.15393, + 2.10399, + 2.1372, + 2.11311, + 2.1312, + 2.09991, + 2.10515, + 2.09197, + 2.11815, + 2.12686, + 2.13439, + 2.13564, + 2.11732, + 2.13738, + 2.1037, + 2.1166, + 2.10967, + 2.11031, + 2.12079, + 2.08297, + 2.1031, + 2.08526, + 2.11682, + 2.09061, + 2.0816, + 2.10823, + 2.06917, + 2.10493, + 2.19266, + 2.06893, + 2.1334, + 2.15658, + 2.13214, + 2.13136, + 2.1256, + 2.13736, + 2.10044, + 2.08031, + 2.14049, + 2.10938, + 2.12393, + 2.13127, + 2.09463, + 2.11427, + 2.12542, + 2.14941, + 2.13633, + 2.0972, + 2.11632, + 2.10902, + 2.09105, + 2.07251, + 2.11304, + 2.04841, + 2.10883, + 2.07946, + 2.07144, + 2.12564, + 2.12779, + 2.08207, + 2.12264, + 2.03334, + 2.08839, + 2.13933, + 2.13504, + 2.12715, + 2.07327, + 2.08083, + 2.10245, + 2.11919, + 2.1179, + 2.11169, + 2.10775, + 2.09161, + 2.12922, + 2.14466, + 2.1176, + 2.10895, + 2.12638, + 2.1217, + 2.1236, + 2.062, + 2.11499, + 2.11532, + 2.11533, + 2.12165, + 2.05903, + 2.05048, + 2.11155, + 2.08588, + 2.14275, + 2.14686, + 2.08855, + 2.08491, + 2.11618, + 2.12594, + 2.12694, + 2.0507, + 2.06586, + 2.07829, + 2.0957, + 2.10548, + 2.10286, + 2.08992, + 2.06176, + 2.16347, + 2.10563, + 2.12687, + 2.09314, + 2.10999, + 2.16416, + 2.1525, + 2.14271, + 2.09874, + 2.11999, + 2.08824, + 2.12786, + 2.10107, + 2.13507, + 2.0694, + 2.05255, + 2.1406, + 2.0938, + 2.08902, + 2.08339, + 2.09782, + 2.1093, + 2.1057, + 2.1015, + 2.09923, + 2.08497, + 2.10736, + 2.09418, + 2.05813, + 2.1128, + 2.12381, + 2.10771, + 2.14169, + 2.08912, + 2.09353, + 2.11167, + 2.10226, + 2.10304, + 2.15715, + 2.06084, + 2.09316, + 2.04001, + 2.14578, + 2.13184, + 2.14647, + 2.08318, + 2.1242, + 2.10819, + 2.09615, + 2.12652, + 2.1688, + 2.09062, + 2.10937, + 2.1056, + 2.12596, + 2.10903, + 2.08865, + 2.09684, + 2.0953, + 2.10568, + 2.08781, + 2.09239, + 2.0882, + 2.13025, + 2.08914, + 2.0843, + 2.10737, + 2.08174, + 2.09075, + 2.12883, + 2.10422, + 2.09078, + 2.09076, + 2.10793, + 2.15559, + 2.12571, + 2.0969, + 2.10006, + 2.06794, + 2.10081, + 2.10797, + 2.08278, + 2.08529, + 2.09632, + 2.12571, + 2.10009, + 2.09381, + 2.11587, + 2.0916, + 2.06305, + 2.13881, + 2.08573, + 2.08954, + 2.12742, + 2.10051, + 2.11899, + 2.119, + 2.10857, + 2.0609, + 2.1132, + 2.1187, + 2.11131, + 2.11885, + 2.12773, + 2.10396, + 2.11555, + 2.12243, + 2.13098, + 2.09087, + 2.1037, + 2.12126, + 2.1262, + 2.08191, + 2.10034, + 2.10169, + 2.08573, + 2.11542, + 2.11536, + 2.09658, + 2.10137, + 2.0822, + 2.1477, + 2.08404, + 2.08256, + 2.07026, + 2.11902, + 2.07066, + 2.13347, + 2.10546, + 2.08366, + 2.1391, + 2.06905, + 2.0822, + 2.06181, + 2.10263, + 2.09687, + 2.11236, + 2.06395, + 2.0989, + 2.11544, + 2.11754, + 2.09087, + 2.10556, + 2.11526, + 2.10532, + 2.11946, + 2.1017, + 2.12131, + 2.10685, + 2.09847, + 2.09136, + 2.13061, + 2.0925, + 2.11353, + 2.13076, + 2.09426, + 2.10268, + 2.11683, + 2.11117, + 2.09733, + 2.10809, + 2.10898, + 2.10014, + 2.08859, + 2.05355, + 2.08973, + 2.12353, + 2.11629, + 2.1302, + 2.10023, + 2.10594, + 2.08855, + 2.0856, + 2.1062, + 2.12423, + 2.09963, + 2.09202, + 2.05013, + 2.11092, + 2.08575, + 2.17081, + 2.14317, + 2.07335, + 2.08635, + 2.07546, + 2.16259, + 2.148, + 2.1365, + 2.10186, + 2.09534, + 2.10661, + 2.12105, + 2.07725, + 2.10682, + 2.08054, + 2.08816, + 2.11856, + 2.10141, + 2.12913, + 2.08397, + 2.10721, + 2.09556, + 2.12001, + 2.09538, + 2.11098, + 2.11675, + 2.09161, + 2.13679, + 2.07696, + 2.10134, + 2.10029, + 2.07851, + 2.10683, + 2.08231, + 2.11878, + 2.10359, + 2.09802, + 2.1655, + 2.17459, + 2.11559, + 2.05537, + 2.11955, + 2.08611, + 2.0985, + 2.10376, + 2.08761, + 2.12019, + 2.05312, + 2.09649, + 2.10215, + 2.07715, + 2.09539, + 2.11081, + 2.07505, + 2.09207, + 2.12478, + 2.0814, + 2.12825, + 2.09797, + 2.10614, + 2.0788, + 2.09873, + 2.11141, + 2.10013, + 2.10456, + 2.10275, + 2.12107, + 2.07007, + 2.11339, + 2.06818, + 2.09674, + 2.07993, + 2.1209, + 2.12027, + 2.11478, + 2.0946, + 2.12106, + 2.11344, + 2.0964, + 2.08432, + 2.17123, + 2.06489, + 2.06496, + 2.12209, + 2.08492, + 2.09291, + 2.11554, + 2.09089, + 2.13346, + 2.09253, + 2.09334, + 2.12004, + 2.12385, + 2.12791, + 2.12034, + 2.13092, + 2.14082, + 2.11062, + 2.09416, + 2.08322, + 2.10757, + 2.13516, + 2.1486, + 2.12679, + 2.14402, + 2.10016, + 2.10142, + 2.06724, + 2.12923, + 2.10272, + 2.10503, + 2.13334, + 2.11112, + 2.14127, + 2.12135, + 2.12854, + 2.09047, + 2.11605, + 2.09861, + 2.08075, + 2.09016, + 2.0851, + 2.12463, + 2.10433, + 2.12242, + 2.10118, + 2.13192, + 2.09297, + 2.07851, + 2.08258, + 2.11345, + 2.13759, + 2.09233, + 2.13678, + 2.10654, + 2.12496, + 2.06254, + 2.07418, + 2.08389, + 2.05478, + 2.1006, + 2.14225, + 2.09367, + 2.09963, + 2.08671, + 2.07201, + 2.13346, + 2.10889, + 2.08936, + 2.13049, + 2.08738, + 2.11575, + 2.10834, + 2.09693, + 2.16835, + 2.09483, + 2.09864, + 2.13117, + 2.12231, + 2.11713, + 2.10095, + 2.10958, + 2.1074, + 2.05837, + 2.07441, + 2.08849, + 2.08541, + 2.12236, + 2.11222, + 2.10835, + 2.1094, + 2.13227, + 2.07565, + 2.06678, + 2.09589, + 2.08653, + 2.07551, + 2.08663, + 2.06998, + 2.08961, + 2.11457, + 2.07528, + 2.11256, + 2.09992, + 2.08741, + 2.09757, + 2.12835, + 2.10383, + 2.12511, + 2.09195, + 2.09593, + 2.13512, + 2.09902, + 2.06434, + 2.08625, + 2.11179, + 2.10545, + 2.11185, + 2.09286, + 2.05862, + 2.0833, + 2.11229, + 2.09577, + 2.11248, + 2.07811, + 2.11289, + 2.04395, + 2.10967, + 2.09016, + 2.10445, + 2.13323, + 2.09937, + 2.0905, + 2.09134, + 2.11346, + 2.10284, + 2.10076, + 2.12552, + 2.10759, + 2.12309, + 2.11907, + 2.16316, + 2.09405, + 2.10661, + 2.10951, + 2.1044, + 2.09601, + 2.14319, + 2.13767, + 2.12855, + 2.15743, + 2.13383, + 2.0933, + 2.13527, + 2.12198, + 2.14071, + 2.12616, + 2.16645, + 2.12557, + 2.16896, + 2.15717, + 2.08972, + 2.15932, + 2.1134, + 2.12489, + 2.09882, + 2.15485, + 2.08909, + 2.10607, + 2.05191, + 2.11141, + 2.10934, + 2.10798, + 2.1033, + 2.08456, + 2.07636, + 2.07837, + 2.13496, + 2.09643, + 2.11455, + 2.10343, + 2.10321, + 2.09973, + 2.1121, + 2.10006, + 2.05961, + 2.10401, + 2.10049, + 2.14238, + 2.10851, + 2.09455, + 2.07084, + 2.09814, + 2.06783, + 2.0998, + 2.08823, + 2.14169, + 2.13139, + 2.06817, + 2.04504, + 2.08312, + 2.09165, + 2.10754, + 2.1246, + 2.13016, + 2.10119, + 2.11131, + 2.13605, + 2.11911, + 2.08954, + 2.10385, + 2.12509, + 2.092, + 2.09581, + 2.13514, + 2.09897, + 2.06428, + 2.08628, + 2.11177, + 2.10561, + 2.11216, + 2.09304, + 2.05879, + 2.08348, + 2.11267, + 2.0955, + 2.11276, + 2.07812, + 2.11317, + 2.04434, + 2.1098, + 2.09018, + 2.10443, + 2.13322, + 2.09939, + 2.09052, + 2.09134, + 2.11337, + 2.10292, + 2.1008, + 2.12559, + 2.10747, + 2.12321, + 2.11915, + 2.16266, + 2.09374, + 2.10667, + 2.10957, + 2.10416, + 2.09595, + 2.14307, + 2.1324, + 2.08768, + 2.1324, + 2.11586, + 2.08046, + 2.1134, + 2.10567, + 2.11588, + 2.10786, + 2.15328, + 2.1159, + 2.13031, + 2.11987, + 2.05435, + 2.13161, + 2.09307, + 2.10958, + 2.06581, + 2.12824, + 2.06724, + 2.09124, + 2.13078, + 2.12588, + 2.12134, + 2.10528, + 2.08407, + 2.11277, + 2.11056, + 2.08924, + 2.11989, + 2.07131, + 2.09351, + 2.09357, + 2.10894, + 2.11871, + 2.11277, + 2.08631, + 2.11436, + 2.14298, + 2.06895, + 2.09966, + 2.07538, + 2.09502, + 2.07037, + 2.13407, + 2.08811, + 2.09918, + 2.10239, + 2.14773, + 2.09637, + 2.09676, + 2.04734, + 2.07151, + 2.12237, + 2.07237, + 2.10426, + 2.14383, + 2.08661, + 2.12782, + 2.06748, + 2.0871, + 2.0999, + 2.08179, + 2.12103, + 2.10404, + 2.12417, + 2.06728, + 2.11108, + 2.10973, + 2.07025, + 2.08332, + 2.07144, + 2.11024, + 2.06834, + 2.10748, + 2.11418, + 2.12133, + 2.09432, + 2.10385, + 2.15316, + 2.09387, + 2.14333, + 2.09369, + 2.06787, + 2.09103, + 2.05213, + 2.15258, + 2.11999, + 2.09972, + 2.06161, + 2.13498, + 2.07523, + 2.08574, + 2.03125, + 2.09567, + 2.12747, + 2.14236, + 2.13313, + 2.06481, + 2.0936, + 2.13754, + 2.09769, + 2.07196, + 2.10742, + 2.141, + 2.08099, + 2.10648, + 2.14101, + 2.0656, + 2.07148, + 2.10422, + 2.12623, + 2.14751, + 2.08189, + 2.08156, + 2.12093, + 2.10611, + 2.08514, + 2.12521, + 2.13582, + 2.07225, + 2.09676, + 2.09669, + 2.08848, + 2.03674, + 2.0724, + 2.10142, + 2.11808, + 2.10209, + 2.11128, + 2.07591, + 2.12053, + 2.09825, + 2.10078, + 2.11936, + 2.07833, + 2.13521, + 2.11673, + 2.14116, + 2.099, + 2.09872, + 2.11647, + 2.09999, + 2.1321, + 2.09224, + 2.06726, + 2.08, + 2.10369, + 2.06814, + 2.1236, + 2.06975, + 2.10169, + 2.06154, + 2.09703, + 2.12044, + 2.08402, + 2.06741, + 2.12646, + 2.11801, + 2.13434, + 2.14057, + 2.10057, + 2.10402, + 2.11245, + 2.10053, + 2.10266, + 2.09836, + 2.07688, + 2.12974, + 2.0731, + 2.13473, + 2.08735, + 2.14243, + 2.07735, + 2.08035, + 2.1475, + 2.11681, + 2.09822, + 2.10717, + 2.11196, + 2.11311, + 2.08322, + 2.09443, + 2.11489, + 2.08463, + 2.09878, + 2.11821, + 2.09373, + 2.08053, + 2.10385, + 2.11338, + 2.11182, + 2.1359, + 2.08034, + 2.11564, + 2.11028, + 2.09547, + 2.10754, + 2.05115, + 2.12086, + 2.09529, + 2.09539, + 2.11435, + 2.06017, + 2.10198, + 2.10129, + 2.11379, + 2.10922, + 2.08196, + 2.08235, + 2.09316, + 2.09473, + 2.06074, + 2.09008, + 2.11558, + 2.06168, + 2.04899, + 2.13167, + 2.07514, + 2.0657, + 2.05858, + 2.13046, + 2.06957, + 2.08703, + 2.08972, + 2.10367, + 2.11116, + 2.12866, + 2.08427, + 2.09166, + 2.12225, + 2.06212, + 2.09346, + 2.10469, + 2.11802, + 2.0951, + 2.08621, + 2.089, + 2.10053, + 2.11112, + 2.12166, + 2.07351, + 2.07086, + 2.11991, + 2.08847, + 2.09969, + 2.08987, + 2.13822, + 2.09394, + 2.08502, + 2.09523, + 2.0664, + 2.09318, + 2.10795, + 2.15593, + 2.08014, + 2.12669, + 2.07, + 2.11125, + 2.09611, + 2.10782, + 2.10584, + 2.10432, + 2.11452, + 2.08957, + 2.1039, + 2.12054, + 2.12427, + 2.13049, + 2.10253, + 2.09089, + 2.06794, + 2.10768, + 2.08209, + 2.11417, + 2.08014, + 2.12132, + 2.09373, + 2.0605, + 2.08931, + 2.09021, + 2.11118, + 2.09853, + 2.08579, + 2.0702, + 2.12662, + 2.12348, + 2.13885, + 2.12671, + 2.05302, + 2.11984, + 2.07264, + 2.12689, + 2.03701, + 2.11099, + 2.08242, + 2.06807, + 2.09228, + 2.15375, + 2.10134, + 2.04924, + 2.08427, + 2.13279, + 2.11157, + 2.13081, + 2.09664, + 2.0798, + 2.15527, + 2.13708, + 2.07399, + 2.10856, + 2.09424, + 2.07676, + 2.12892, + 2.05308, + 2.08168, + 2.11769, + 2.05781, + 2.12467, + 2.08988, + 2.1375, + 2.09106, + 2.10885, + 2.06267, + 2.08971, + 2.09516, + 2.09701, + 2.06081, + 2.11809, + 2.11845, + 2.13437, + 2.06495, + 2.10327, + 2.05966, + 2.07574, + 2.06925, + 2.07874, + 2.09389, + 2.06341, + 2.07773, + 2.07421, + 2.11104, + 2.04235, + 2.09856, + 2.13038, + 2.10812, + 2.0618, + 2.10282, + 2.12047, + 2.1379, + 2.12604, + 2.09465, + 2.12027, + 2.05536, + 2.06585, + 2.07283, + 2.09314, + 2.08156, + 2.09773, + 2.09311, + 2.08832, + 2.08206, + 2.09767, + 2.12737, + 2.12048, + 2.09093, + 2.15471, + 2.00003, + 2.10537, + 2.06497, + 2.07986, + 2.07597, + 2.10255, + 2.07982, + 2.12385, + 2.10461, + 2.15121, + 2.10165, + 2.09726, + 2.1101, + 2.11545, + 2.09468, + 2.06628, + 2.12442, + 2.12598, + 2.07944, + 2.0538, + 2.11384, + 2.06292, + 2.10443, + 2.08688, + 2.11002, + 2.09943, + 2.08693, + 2.11298, + 2.02259, + 2.11681, + 2.12197, + 2.10672, + 2.08883, + 2.09375, + 2.09969, + 2.11866, + 2.11617, + 2.12659, + 2.07292, + 2.0781, + 2.10871, + 2.11787, + 2.09411, + 2.13548, + 2.11227, + 2.09332, + 2.11571, + 2.13785, + 2.06586, + 2.09005, + 2.04047, + 2.12497, + 2.11605, + 2.09245, + 2.05766, + 2.09222, + 2.09161, + 2.09476, + 2.07674, + 2.11504, + 2.12976, + 2.09222, + 2.1253, + 2.15186, + 2.09651, + 2.05625, + 2.08863, + 2.13027, + 2.08821, + 2.09687, + 2.09658, + 2.11429, + 2.08166, + 2.11065, + 2.10563, + 2.11231, + 2.12958, + 2.09018, + 2.11388, + 2.10017, + 2.11136, + 2.1114, + 2.1202, + 2.11537, + 2.12565, + 2.10027, + 2.10328, + 2.0766, + 2.11225, + 2.06139, + 2.04301, + 2.08991, + 2.08229, + 2.09654, + 2.10403, + 2.09937, + 2.08194, + 2.07951, + 2.12614, + 2.11067, + 2.08105, + 2.10351, + 2.05756, + 2.0708, + 2.12028, + 2.11107, + 2.06484, + 2.07546, + 2.06042, + 2.08996, + 2.08669, + 2.07811, + 2.08105, + 2.13315, + 2.09134, + 2.11837, + 2.11918, + 2.11397, + 2.10322, + 2.03457, + 2.09114, + 2.10641, + 2.08809, + 2.11127, + 2.0929, + 2.07461, + 2.13201, + 2.1, + 2.07983, + 2.05016, + 2.11926, + 2.09402, + 2.09424, + 2.0407, + 2.07725, + 2.13009, + 2.0863, + 2.08075, + 2.04933, + 2.11939, + 2.09537, + 2.11806, + 2.07563, + 2.0732, + 2.11964, + 2.1085, + 2.1678, + 2.10751, + 2.08208, + 2.0874, + 2.09751, + 2.02705, + 2.1027, + 2.10972, + 2.06049, + 2.08074, + 2.0693, + 2.10067, + 2.12153, + 2.09802, + 2.10666, + 2.08899, + 2.03996, + 2.13123, + 2.09047, + 2.08445, + 2.09419, + 2.07958, + 2.1101, + 2.12156, + 2.0984, + 2.06641, + 2.12267, + 2.07243, + 2.09189, + 2.08061, + 2.14167, + 2.13256, + 2.0944, + 2.08772, + 2.07841, + 2.1044, + 2.0728, + 2.10042, + 2.12066, + 2.08692, + 2.05475, + 2.07194, + 2.07746, + 2.09341, + 2.07412, + 2.11191, + 2.06382, + 2.1197, + 2.10776, + 2.11953, + 2.09591, + 2.13968, + 2.11585, + 2.1467, + 2.10557, + 2.10006, + 2.07337, + 2.0651, + 2.1098, + 2.11514, + 2.10837, + 2.08931, + 2.08453, + 2.1203, + 2.02606, + 2.09877, + 2.0765, + 2.1027, + 2.09517, + 2.07433, + 2.09534, + 2.11624, + 2.0879, + 2.07413, + 2.1031, + 2.09143, + 2.07034, + 2.0763, + 2.07013, + 2.07654, + 2.09725, + 2.08833, + 2.11137, + 2.0836, + 2.10489, + 2.10347, + 2.09001, + 2.03992, + 2.08092, + 2.10671, + 2.07911, + 2.08061, + 2.08642, + 2.08222, + 2.10061, + 2.08912, + 2.08715, + 2.09146, + 2.05037, + 2.08328, + 2.10473, + 2.12535, + 2.11547, + 2.13278, + 2.07959, + 2.03649, + 2.04683, + 2.08181, + 2.13441, + 2.09196, + 2.12319, + 2.0978, + 2.09405, + 2.07381, + 2.09497, + 2.1336, + 2.1476, + 2.10042, + 2.12433, + 2.08461, + 2.0586, + 2.11721, + 2.08698, + 2.10823, + 2.09564, + 2.12007, + 2.07142, + 2.09724, + 2.11452, + 2.11077, + 2.04676, + 2.07262, + 2.05052, + 2.04568, + 2.11771, + 2.05858, + 2.12589, + 2.11001, + 2.08672, + 2.10446, + 2.12478, + 2.06013, + 2.06934, + 2.08455, + 2.10222, + 2.11318, + 2.10892, + 2.09463, + 2.1009, + 2.07613, + 2.08639, + 2.11295, + 2.08638, + 2.05296, + 2.08926, + 2.04999, + 2.07934, + 2.08437, + 2.12289, + 2.06711, + 2.12135, + 2.06803, + 2.09185, + 2.11472, + 2.03603, + 2.07015, + 2.11787, + 2.07796, + 2.08919, + 2.0838, + 2.11849, + 2.10949, + 2.11639, + 2.08362, + 2.09219, + 2.10379, + 2.07892, + 2.13159, + 2.13565, + 2.13879, + 2.09135, + 2.09996, + 2.08503, + 2.11075, + 2.06709, + 2.08659, + 2.08976, + 2.12967, + 2.05811, + 2.08639, + 2.02437, + 2.08323, + 2.10559, + 2.09048, + 2.09136, + 2.03587, + 2.13308, + 2.06462, + 2.06395, + 2.07907, + 2.13731, + 2.12066, + 2.10337, + 2.09609, + 2.10533, + 2.09973, + 2.11423, + 2.04909, + 2.13439, + 2.09195, + 2.12315, + 2.09779, + 2.09418, + 2.07373, + 2.09508, + 2.13369, + 2.14796, + 2.10015, + 2.12438, + 2.08458, + 2.05884, + 2.1175, + 2.08747, + 2.10876, + 2.09519, + 2.12018, + 2.07168, + 2.09807, + 2.11454, + 2.11068, + 2.0472, + 2.07282, + 2.05064, + 2.04584, + 2.11857, + 2.05853, + 2.1256, + 2.11004, + 2.08697, + 2.10408, + 2.12443, + 2.06017, + 2.06937, + 2.08432, + 2.10238, + 2.11337, + 2.10874, + 2.0939, + 2.10093, + 2.0769, + 2.08623, + 2.11314, + 2.08608, + 2.05477, + 2.08955, + 2.0504, + 2.07974, + 2.08445, + 2.12293, + 2.06754, + 2.12157, + 2.0679, + 2.09183, + 2.11491, + 2.03558, + 2.06995, + 2.11809, + 2.07815, + 2.08901, + 2.08319, + 2.11867, + 2.10972, + 2.11619, + 2.08425, + 2.09194, + 2.10369, + 2.07944, + 2.13195, + 2.13616, + 2.13907, + 2.09137, + 2.10014, + 2.08522, + 2.11125, + 2.06722, + 2.08681, + 2.08979, + 2.12976, + 2.05845, + 2.08641, + 2.02469, + 2.08325, + 2.10554, + 2.0904, + 2.092, + 2.03593, + 2.13276, + 2.06471, + 2.06334, + 2.0786, + 2.13688, + 2.12118, + 2.1033, + 2.09583, + 2.10538, + 2.10035, + 2.1138, + 2.04889, + 2.04289, + 2.04691, + 2.09922, + 2.12097, + 2.13194, + 2.07754, + 2.0612, + 2.15512, + 2.07488, + 2.05054, + 2.09664, + 2.09831, + 2.06057, + 2.0965, + 2.06725, + 2.08369, + 2.09128, + 2.07436, + 2.08583, + 2.06845, + 2.0827, + 2.10783, + 2.10186, + 2.14613, + 2.09824, + 2.09723, + 2.10645, + 2.10689, + 2.08293, + 2.08173, + 2.0602, + 2.11949, + 2.09526, + 2.10137, + 2.08709, + 2.07324, + 2.06737, + 2.11184, + 2.0775, + 2.08746, + 2.08486, + 2.09847, + 2.11629, + 2.10249, + 2.05841, + 2.10626, + 2.05666, + 2.10754, + 2.06704, + 2.11023, + 2.08425, + 2.05884, + 2.06716, + 2.10135, + 2.08181, + 2.06685, + 2.0911, + 2.1347, + 2.07458, + 2.07549, + 2.07925, + 2.1053, + 2.07424, + 2.1146, + 2.11257, + 2.11152, + 2.09372, + 2.10031, + 2.13394, + 2.05025, + 2.07571, + 2.02393, + 2.08141, + 2.07007, + 2.10897, + 2.07025, + 2.05638, + 2.04464, + 2.07345, + 2.14502, + 2.08775, + 2.08409, + 2.10322, + 2.10695, + 2.07463, + 2.10133, + 2.09982, + 2.07712, + 2.07024, + 2.12441, + 2.09999, + 2.09197, + 2.09026, + 2.09286, + 2.11957, + 2.07738, + 2.05048, + 2.09967, + 2.06101, + 2.0905, + 2.08941, + 2.06632, + 2.13217, + 2.101, + 2.07864, + 2.07156, + 2.12795, + 2.10655, + 2.09343, + 2.0503, + 2.08784, + 2.07271, + 2.09959, + 2.09446, + 2.08776, + 2.03948, + 2.06637, + 2.10863, + 2.04401, + 2.08815, + 2.08574, + 2.08404, + 2.09443, + 2.08955, + 2.04146, + 2.05584, + 2.09305, + 2.08704, + 2.0587, + 2.02268, + 2.07957, + 2.06195, + 2.10838, + 2.1086, + 2.09949, + 2.11813, + 2.10691, + 2.07836, + 2.1, + 2.11768, + 2.15881, + 2.05739, + 2.05395, + 2.063, + 2.10729, + 2.09813, + 2.09254, + 2.09126, + 2.10648, + 2.12479, + 2.07773, + 2.09705, + 2.08614, + 2.0683, + 2.12441, + 2.05408, + 2.1024, + 2.08646, + 2.04864, + 2.08595, + 2.11069, + 2.12415, + 2.13584, + 2.05826, + 2.15183, + 2.08533, + 2.08579, + 2.10263, + 2.05604, + 2.09913, + 2.0426, + 2.09536, + 2.09949, + 2.12122, + 2.09356, + 2.09187, + 2.061, + 2.06944, + 2.08944, + 2.0963, + 2.12999, + 2.08213, + 2.04805, + 2.10029, + 2.07195, + 2.0886, + 2.10707, + 2.10623, + 2.10845, + 2.09652, + 2.13214, + 2.13584, + 2.10206, + 2.0829, + 2.09791, + 2.09588, + 2.13023, + 2.10339, + 2.09214, + 2.07051, + 2.12472, + 2.10342, + 2.10598, + 2.08505, + 2.08838, + 2.09039, + 2.11055, + 2.12397, + 2.06223, + 2.0918, + 2.09842, + 2.08748, + 2.08887, + 2.05685, + 2.08731, + 2.12245, + 2.05449, + 2.07866, + 2.11917, + 2.0922, + 2.06907, + 2.09925, + 2.07451, + 2.05826, + 2.08682, + 2.10202, + 2.08652, + 2.10335, + 2.09913, + 2.10716, + 2.09881, + 2.08714, + 2.1251, + 2.12328, + 2.09031, + 2.11961, + 2.0931, + 2.07796, + 2.15007, + 2.11835, + 2.05743, + 2.07616, + 2.06552, + 2.10627, + 2.09284, + 2.06918, + 2.0734, + 2.07621, + 2.06208, + 2.09916, + 2.0627, + 2.07966, + 2.08952, + 2.07785, + 2.12109, + 2.10251, + 2.02107, + 2.06974, + 2.05881, + 2.09446, + 2.09775, + 2.07788, + 2.08673, + 2.08469, + 2.04777, + 2.11251, + 2.10486, + 2.09493, + 2.09553, + 2.0723, + 2.13109, + 2.10334, + 2.08097, + 2.09396, + 2.12636, + 2.12286, + 2.07346, + 2.10427, + 2.08923, + 2.07212, + 2.12381, + 2.08856, + 2.08012, + 2.11567, + 2.10469, + 2.06984, + 2.08729, + 2.12328, + 2.08989, + 2.08642, + 2.08523, + 2.08854, + 2.085, + 2.04304, + 2.05886, + 2.09755, + 2.10323, + 2.10132, + 2.12043, + 2.06787, + 2.03554, + 2.0957, + 2.10313, + 2.05696, + 2.10489, + 2.05021, + 2.11158, + 2.12675, + 2.12208, + 2.0765, + 2.06034, + 2.07848, + 2.09132, + 2.07292, + 2.09782, + 2.11947, + 2.10653, + 2.12227, + 2.0748, + 2.06801, + 2.07298, + 2.05972, + 2.06571, + 2.06922, + 2.08372, + 2.10146, + 2.10018, + 2.07359, + 2.08328, + 2.10039, + 2.10386, + 2.11963, + 2.11858, + 2.0812, + 2.07245, + 2.06842, + 2.06073, + 2.11729, + 2.13842, + 2.13436, + 2.13398, + 2.04752, + 2.05488, + 2.09527, + 2.13393, + 2.11515, + 2.11088, + 2.09179, + 2.05163, + 2.07817, + 2.116, + 2.06634, + 2.05998, + 2.01873, + 2.07106, + 2.1448, + 2.07112, + 2.02371, + 2.06006, + 2.02195, + 2.08308, + 2.11839, + 2.10119, + 2.13485, + 2.12654, + 2.07129, + 2.13548, + 2.06165, + 2.07055, + 2.10295, + 2.08998, + 2.07216, + 2.05962, + 2.07752, + 2.06957, + 2.11763, + 2.11275, + 2.08079, + 2.08301, + 2.10635, + 2.06846, + 2.02151, + 2.11866, + 2.09562, + 2.10763, + 2.06944, + 2.06856, + 2.11621, + 2.1065, + 2.09911, + 2.05517, + 2.09748, + 2.08566, + 2.09452, + 2.10373, + 2.09792, + 2.07524, + 2.1093, + 2.06658, + 2.06717, + 2.09922, + 2.09453, + 2.08397, + 2.10798, + 2.12758, + 2.11995, + 2.1065, + 2.07729, + 2.10613, + 2.13148, + 2.11141, + 2.11728, + 2.07739, + 2.12254, + 2.07265, + 2.06665, + 2.09089, + 2.09769, + 2.06281, + 2.06896, + 2.11468, + 2.09628, + 2.08994, + 2.06794, + 2.10469, + 2.07076, + 2.08426, + 2.106, + 2.06419, + 2.07929, + 2.10119, + 2.0587, + 2.09376, + 2.09313, + 2.13314, + 2.12789, + 2.09447, + 2.04731, + 2.03974, + 2.03627, + 2.11309, + 2.08333, + 2.0584, + 2.11016, + 2.04904, + 2.09975, + 2.09743, + 2.07581, + 2.09565, + 2.088, + 2.07598, + 2.09794, + 2.06686, + 2.06295, + 2.12246, + 2.07078, + 2.11724, + 2.13111, + 2.1144, + 2.08208, + 2.10715, + 2.06639, + 2.04684, + 2.07558, + 2.13074, + 2.09625, + 2.10731, + 2.11323, + 2.05347, + 2.13191, + 2.07187, + 2.06746, + 2.12448, + 2.09149, + 2.08851, + 2.10077, + 2.03253, + 2.08439, + 2.10265, + 2.03517, + 2.07242, + 2.03287, + 2.09627, + 2.09448, + 2.05116, + 2.11545, + 2.06232, + 2.1289, + 2.07363, + 2.07365, + 2.05519, + 2.08325, + 2.07023, + 2.11855, + 2.1246, + 2.06969, + 2.02418, + 2.06376, + 2.07419, + 2.11971, + 2.09144, + 2.06944, + 2.05285, + 2.09272, + 2.06798, + 2.0879, + 2.07679, + 2.06037, + 2.04153, + 2.06114, + 2.07846, + 2.09302, + 2.09872, + 2.06204, + 2.09117, + 2.07405, + 2.06132, + 2.11032, + 2.12258, + 2.12476, + 2.10153, + 2.05844, + 2.09875, + 2.06078, + 2.09617, + 2.09009, + 2.05718, + 2.08136, + 2.09068, + 2.10408, + 2.0709, + 2.06394, + 2.10259, + 2.07684, + 2.01176, + 2.09628, + 2.0951, + 2.08657, + 2.06408, + 2.09429, + 2.0895, + 2.10804, + 2.13887, + 2.08537, + 2.08856, + 2.10149, + 2.10213, + 2.06974, + 2.10697, + 2.03775, + 2.12834, + 2.09157, + 2.08567, + 2.10145, + 2.08087, + 2.07896, + 2.08834, + 2.07865, + 2.09297, + 2.11197, + 2.10232, + 2.13835, + 2.10429, + 2.10778, + 2.06674, + 2.08575, + 2.09611, + 2.0998, + 2.08506, + 2.07854, + 2.06014, + 2.13055, + 2.11796, + 2.08149, + 2.10116, + 2.01822, + 2.09331, + 2.10711, + 2.08424, + 2.10424, + 2.14944, + 2.06657, + 2.07341, + 2.09647, + 2.09436, + 2.09904, + 2.07487, + 2.08358, + 2.11845, + 2.08397, + 2.09633, + 2.10993, + 2.10346, + 2.07718, + 2.10695, + 2.11706, + 2.04332, + 2.07797, + 2.08331, + 2.10631, + 2.09146, + 2.06963, + 2.05271, + 2.09263, + 2.06798, + 2.08777, + 2.07683, + 2.06034, + 2.04114, + 2.06142, + 2.07831, + 2.09312, + 2.09842, + 2.06201, + 2.09172, + 2.07431, + 2.06118, + 2.11033, + 2.12265, + 2.12487, + 2.10151, + 2.05839, + 2.09875, + 2.06046, + 2.09599, + 2.09009, + 2.05691, + 2.08128, + 2.09096, + 2.10413, + 2.07097, + 2.06415, + 2.10242, + 2.07668, + 2.01179, + 2.09641, + 2.09538, + 2.08651, + 2.06548, + 2.09417, + 2.08952, + 2.10764, + 2.13901, + 2.08488, + 2.08864, + 2.10134, + 2.10199, + 2.07005, + 2.10724, + 2.03783, + 2.12822, + 2.09169, + 2.08593, + 2.1016, + 2.08095, + 2.07898, + 2.08822, + 2.07875, + 2.09309, + 2.1117, + 2.10225, + 2.13878, + 2.10413, + 2.10775, + 2.06687, + 2.08583, + 2.0961, + 2.09915, + 2.08492, + 2.07844, + 2.05969, + 2.13047, + 2.11809, + 2.08153, + 2.10135, + 2.0182, + 2.09345, + 2.10656, + 2.08473, + 2.10427, + 2.14908, + 2.06661, + 2.07316, + 2.09662, + 2.09375, + 2.09916, + 2.07504, + 2.08343, + 2.11746, + 2.08373, + 2.09611, + 2.10981, + 2.10323, + 2.07728, + 2.10722, + 2.11695, + 2.04346, + 2.07806, + 2.08347, + 2.10663, + 2.08043, + 2.04505, + 2.1048, + 2.08303, + 2.07854, + 2.05536, + 2.11643, + 2.06591, + 2.10849, + 2.09725, + 2.08039, + 2.07709, + 2.12408, + 2.07253, + 2.08683, + 2.12794, + 2.09084, + 2.12566, + 2.07755, + 2.06987, + 2.07661, + 2.1023, + 2.09358, + 2.11616, + 2.05576, + 2.09122, + 2.09471, + 2.10692, + 2.0665, + 2.08946, + 2.08255, + 2.12395, + 2.12509, + 2.04766, + 2.07894, + 2.07597, + 2.10236, + 2.03503, + 2.06975, + 2.07148, + 2.05525, + 2.08864, + 2.09491, + 2.03657, + 2.09354, + 2.07463, + 2.09701, + 2.07202, + 2.06547, + 2.10918, + 2.12351, + 2.09561, + 2.09525, + 2.11662, + 2.09051, + 2.11144, + 2.07958, + 2.10655, + 2.03949, + 2.07171, + 2.09375, + 2.06162, + 2.10012, + 2.08185, + 2.07238, + 2.08966, + 2.11654, + 2.06334, + 2.0926, + 2.06076, + 2.07291, + 2.05788, + 2.06538, + 2.08936, + 2.1104, + 2.05993, + 2.06691, + 2.07988, + 2.12817, + 2.10208, + 2.07474, + 2.05988, + 2.0868, + 2.01628, + 2.14018, + 2.07299, + 2.03875, + 2.09557, + 2.10139, + 2.05867, + 2.05316, + 2.05812, + 2.0623, + 2.04358, + 2.09851, + 2.0675, + 2.05869, + 2.03702, + 2.08454, + 2.05864, + 2.09884, + 2.08665, + 2.07063, + 2.06642, + 2.0885, + 2.06934, + 2.06589, + 2.07052, + 2.10257, + 2.09019, + 2.11186, + 2.14445, + 2.03977, + 2.08416, + 2.08654, + 2.0924, + 2.11458, + 2.03922, + 2.1272, + 2.06544, + 2.05078, + 2.09775, + 2.08163, + 2.09138, + 2.05996, + 2.10267, + 2.08119, + 2.10443, + 2.07308, + 2.04093, + 2.08307, + 2.07903, + 2.10926, + 2.06683, + 2.08505, + 2.03746, + 2.10187, + 2.07522, + 2.09414, + 2.06713, + 2.0813, + 2.12283, + 2.07033, + 2.096, + 2.0552, + 2.08068, + 2.09601, + 2.12776, + 2.09016, + 2.10288, + 2.06026, + 2.07984, + 2.04847, + 2.08397, + 2.1003, + 2.10518, + 2.10366, + 2.08387, + 2.0902, + 2.04577, + 2.06658, + 2.07087, + 2.08707, + 2.08373, + 2.07321, + 2.07081, + 2.10632, + 2.10721, + 2.08504, + 2.10297, + 2.07605, + 2.1469, + 2.12291, + 2.05689, + 2.09461, + 2.08428, + 2.09265, + 2.07257, + 2.06616, + 2.07735, + 2.05198, + 2.07846, + 2.0764, + 2.04547, + 2.11645, + 2.10511, + 2.06025, + 2.1253, + 2.06085, + 2.07713, + 2.07634, + 2.06057, + 2.0578, + 2.11922, + 2.06137, + 2.07451, + 2.07419, + 2.07277, + 2.05022, + 2.08168, + 2.12137, + 2.12011, + 2.03465, + 2.08435, + 2.09123, + 2.12258, + 2.08346, + 2.07602, + 2.09872, + 2.09051, + 2.05632, + 2.087, + 2.06028, + 2.0466, + 2.06252, + 2.04798, + 2.10266, + 2.06713, + 2.1217, + 2.05497, + 2.07324, + 2.1148, + 2.09923, + 2.08689, + 2.07593, + 2.11822, + 2.0619, + 2.08733, + 2.098, + 2.09384, + 2.10911, + 2.05167, + 2.08098, + 2.09456, + 2.06901, + 2.07216, + 2.04075, + 2.06373, + 2.11588, + 2.09423, + 2.09993, + 2.06928, + 2.12473, + 2.05194, + 2.11029, + 2.06026, + 2.10506, + 2.0804, + 2.08087, + 2.05112, + 2.0843, + 2.10935, + 2.0985, + 2.06056, + 2.10068, + 2.05948, + 2.04805, + 2.12716, + 2.07627, + 2.07049, + 2.09788, + 2.07515, + 2.11238, + 2.09656, + 2.12371, + 2.07977, + 2.09153, + 2.10288, + 2.07111, + 2.07405, + 2.06376, + 2.06079, + 2.08842, + 2.11169, + 2.08552, + 2.08482, + 2.02204, + 2.0772, + 2.09601, + 2.05512, + 2.11255, + 2.10262, + 2.0636, + 2.06416, + 2.08982, + 2.11174, + 2.09312, + 2.13062, + 2.06198, + 2.06375, + 2.08542, + 2.07611, + 2.10387, + 2.09522, + 2.12607, + 2.08022, + 2.07528, + 2.06532, + 2.10365, + 2.08761, + 2.05663, + 2.06875, + 2.05836, + 2.08143, + 2.09483, + 2.05902, + 2.09163, + 2.10836, + 2.08567, + 2.05533, + 2.07711, + 2.12288, + 2.10423, + 2.06923, + 2.1203, + 2.10564, + 2.06994, + 2.12217, + 2.03497, + 2.07911, + 2.11873, + 2.08968, + 2.10346, + 2.09182, + 2.06833, + 2.03355, + 2.05659, + 2.06155, + 2.09926, + 2.05596, + 2.06278, + 2.11847, + 2.10373, + 2.08777, + 2.05289, + 2.12416, + 2.05798, + 2.06442, + 2.12758, + 2.11889, + 2.0416, + 2.08452, + 2.02277, + 2.07556, + 2.08256, + 2.02478, + 2.04719, + 2.11391, + 2.08714, + 2.06351, + 2.10666, + 2.06932, + 2.08329, + 2.06435, + 2.11976, + 2.11844, + 2.0873, + 2.05953, + 2.11118, + 2.08226, + 2.07769, + 2.08505, + 2.09095, + 2.05275, + 2.08866, + 2.09562, + 2.04215, + 2.05068, + 2.1001, + 2.05694, + 2.12675, + 2.0334, + 2.06717, + 2.08989, + 2.06923, + 2.09298, + 2.06426, + 2.0629, + 2.02511, + 2.07929, + 2.04437, + 2.08417, + 2.06064, + 2.09003, + 2.06628, + 2.06122, + 2.11097, + 2.09126, + 2.10252, + 2.06604, + 2.06349, + 2.07337, + 2.05215, + 2.08857, + 2.13949, + 2.06609, + 2.07581, + 2.12268, + 2.06477, + 2.09056, + 2.05787, + 2.00883, + 2.08707, + 2.09604, + 2.07625, + 2.09148, + 2.06991, + 2.11352, + 2.0438, + 2.08512, + 2.06766, + 2.05929, + 2.08035, + 2.11654, + 2.09132, + 2.11966, + 2.1159, + 2.07105, + 2.09959, + 2.09889, + 2.09091, + 2.08547, + 2.05556, + 2.08718, + 2.09751, + 2.09123, + 2.09681, + 2.06888, + 2.04773, + 2.08595, + 2.10319, + 2.0929, + 2.05359, + 2.08184, + 2.06045, + 2.12861, + 2.08992, + 2.08418, + 2.06194, + 2.11682, + 2.11539, + 2.05905, + 2.11134, + 2.05981, + 2.08274, + 2.06057, + 2.08552, + 2.05969, + 2.07935, + 2.10099, + 2.09862, + 2.0588, + 2.09788, + 2.09069, + 2.07122, + 2.12526, + 2.07846, + 2.12294, + 2.06142, + 2.09649, + 2.10652, + 2.06719, + 2.06306, + 2.08764, + 2.06519, + 2.07706, + 2.09012, + 2.06024, + 2.06515, + 2.06063, + 2.06292, + 2.12548, + 2.08961, + 2.12033, + 2.09931, + 2.06415, + 2.14557, + 2.03202, + 2.10872, + 2.053, + 2.09556, + 2.06367, + 2.05812, + 2.08683, + 2.0491, + 2.03682, + 2.08205, + 2.06524, + 2.06201, + 2.05524, + 2.09024, + 2.06554, + 2.09236, + 2.08219, + 2.08024, + 2.0805, + 2.07846, + 2.10037, + 2.05679, + 2.07127, + 2.08339, + 2.07768, + 2.07857, + 2.07662, + 2.07109, + 2.0986, + 2.04538, + 2.06269, + 2.08985, + 2.0621, + 2.08073, + 2.05557, + 2.12261, + 2.09842, + 2.07569, + 2.11347, + 2.08591, + 2.1163, + 2.02601, + 2.05824, + 2.00829, + 2.05696, + 2.0615, + 2.05655, + 2.06949, + 2.11406, + 2.1244, + 2.07441, + 2.05983, + 2.10407, + 2.07259, + 2.08, + 2.05796, + 2.09392, + 2.05073, + 2.12743, + 2.05912, + 2.08566, + 2.0682, + 2.05966, + 2.05903, + 2.04884, + 2.08183, + 2.11952, + 2.07953, + 2.08785, + 2.05368, + 2.03879, + 2.0548, + 2.06324, + 2.09984, + 2.06099, + 2.09321, + 2.08512, + 2.05445, + 2.0597, + 2.08637, + 2.05671, + 1.99227, + 2.04717, + 2.02678, + 2.03974, + 2.09651, + 2.08302, + 2.08366, + 2.07526, + 2.06673, + 2.0294, + 2.067, + 2.0567, + 2.06297, + 2.04506, + 2.11536, + 2.04981, + 2.05585, + 2.04318, + 2.04887, + 2.10711, + 2.07321, + 2.08547, + 2.09732, + 2.06317, + 2.07037, + 2.07334, + 2.07226, + 2.07104, + 2.03595, + 2.10362, + 2.02985, + 2.08893, + 2.08775, + 2.11041, + 2.07342, + 2.10319, + 2.07934, + 2.09242, + 2.08463, + 2.10033, + 2.07327, + 2.09963, + 2.06216, + 2.08503, + 2.10085, + 2.04542, + 2.09524, + 2.03729, + 2.08433, + 2.07364, + 2.06008, + 2.05635, + 2.06348, + 2.03741, + 2.04527, + 2.08118, + 2.02316, + 2.07548, + 2.06789, + 2.09955, + 2.07938, + 2.08133, + 2.09237, + 2.02361, + 2.06733, + 2.08178, + 2.0531, + 2.05742, + 2.10409, + 2.07953, + 2.03531, + 2.04234, + 2.05826, + 2.07766, + 2.03685, + 2.08491, + 2.05073, + 2.07777, + 2.06776, + 2.08128, + 2.0701, + 2.07449, + 2.12519, + 2.0408, + 2.09978, + 2.03957, + 2.10379, + 2.04729, + 2.10488, + 2.05869, + 2.07174, + 2.06904, + 2.09313, + 2.07434, + 2.05022, + 2.08851, + 2.05876, + 2.0425, + 2.10804, + 2.07809, + 2.09268, + 2.08669, + 2.1114, + 2.04435, + 2.05874, + 2.08143, + 2.0483, + 2.08565, + 2.09463, + 2.0664, + 2.08522, + 2.0932, + 2.108, + 2.05429, + 2.07244, + 2.11475, + 2.12878, + 2.10167, + 2.07024, + 2.03518, + 2.11433, + 2.08113, + 2.03473, + 2.05096, + 2.0971, + 2.04405, + 2.09277, + 2.06344, + 2.08085, + 2.0826, + 2.07086, + 2.06865, + 2.09876, + 2.07484, + 2.10361, + 2.10566, + 2.09083, + 2.06321, + 2.05549, + 2.12655, + 2.0783, + 2.09003, + 2.08244, + 2.06561, + 2.08722, + 2.08595, + 2.01068, + 2.04847, + 2.08158, + 2.10165, + 2.08706, + 2.04755, + 2.07976, + 2.03745, + 2.06788, + 2.0838, + 2.0957, + 2.05815, + 2.07837, + 2.04492, + 2.06233, + 2.06889, + 2.05383, + 2.04364, + 2.04696, + 2.08086, + 2.10603, + 2.07821, + 2.07552, + 2.07279, + 2.06644, + 2.05424, + 2.05115, + 2.06567, + 2.08855, + 2.07676, + 2.0535, + 2.03515, + 2.07661, + 2.08295, + 2.07087, + 2.12964, + 2.1083, + 2.07008, + 2.07236, + 2.08364, + 2.06902, + 2.07303, + 2.04524, + 2.04759, + 2.06112, + 2.07253, + 2.05656, + 2.07857, + 2.08133, + 2.09672, + 2.09143, + 2.08258, + 2.07353, + 2.10649, + 2.00744, + 2.10176, + 2.111, + 2.05974, + 2.05428, + 2.07754, + 2.06603, + 2.08125, + 2.11034, + 2.08609, + 2.03903, + 2.09737, + 2.10204, + 2.06438, + 2.0723, + 2.08264, + 2.03853, + 2.07443, + 2.0853, + 2.05132, + 2.06242, + 2.07401, + 2.06993, + 2.11031, + 2.08853, + 2.04626, + 2.09489, + 2.06417, + 2.07078, + 2.12536, + 2.06705, + 2.06293, + 2.057, + 2.06853, + 2.08192, + 2.11164, + 2.08612, + 2.05315, + 2.02937, + 2.11841, + 2.09766, + 2.01826, + 2.07782, + 2.03111, + 2.10365, + 2.06427, + 2.03151, + 2.13872, + 2.04938, + 2.09609, + 2.11322, + 2.07392, + 2.08912, + 2.07484, + 2.09911, + 2.08997, + 2.06037, + 2.06054, + 2.1092, + 2.06866, + 2.07059, + 2.05486, + 2.07062, + 2.11486, + 2.06138, + 2.08323, + 2.05476, + 2.0595, + 2.07122, + 2.06643, + 2.08598, + 2.04996, + 2.06984, + 2.07735, + 2.05319, + 2.10446, + 2.11218, + 2.12446, + 2.10195, + 2.09207, + 2.07045, + 2.09209, + 2.07994, + 2.03823, + 2.10558, + 2.05995, + 2.08283, + 2.04201, + 2.04279, + 2.05379, + 2.10799, + 2.05601, + 2.11753, + 2.10003, + 2.08922, + 2.03212, + 2.02351, + 2.08876, + 2.06804, + 2.1154, + 2.03402, + 2.04906, + 2.09092, + 2.08807, + 2.03694, + 2.06683, + 2.10941, + 2.07538, + 2.08424, + 2.03637, + 2.07526, + 2.0696, + 2.08612, + 2.09094, + 2.07163, + 2.07926, + 2.0436, + 2.04763, + 2.07245, + 2.07232, + 2.03811, + 2.03332, + 2.07774, + 2.081, + 2.11632, + 2.0517, + 2.04891, + 2.04275, + 2.08843, + 2.07145, + 2.09188, + 2.09834, + 2.07899, + 2.06966, + 2.09097, + 2.08361, + 2.09158, + 2.06205, + 2.0416, + 2.07187, + 2.08834, + 2.06646, + 2.05203, + 2.06597, + 2.10952, + 2.08278, + 2.03716, + 2.0806, + 2.02703, + 2.06257, + 2.10693, + 2.02978, + 2.07814, + 2.07698, + 2.07721, + 2.08516, + 2.09677, + 2.04944, + 2.09755, + 2.05212, + 2.09593, + 2.08961, + 2.06584, + 2.05998, + 2.11107, + 2.06061, + 2.07297, + 2.08069, + 2.0974, + 2.08085, + 2.08304, + 2.03449, + 2.05481, + 2.03087, + 2.0516, + 2.09421, + 2.09367, + 2.03753, + 2.08647, + 2.03627, + 2.08571, + 2.10527, + 2.08331, + 2.05384, + 2.04836, + 2.08465, + 2.04643, + 2.13185, + 2.05415, + 2.10417, + 2.06103, + 2.07331, + 2.08225, + 2.08421, + 2.07497, + 2.11551, + 2.1103, + 2.09086, + 2.06248, + 2.02085, + 2.07909, + 2.09713, + 2.10516, + 2.03844, + 2.02803, + 2.04845, + 2.03926, + 2.07185, + 2.09035, + 2.10247, + 2.08527, + 2.06027, + 2.08861, + 2.05728, + 2.06764, + 2.11167, + 2.04776, + 2.03874, + 2.0677, + 2.09069, + 2.06484, + 2.06663, + 2.06817, + 2.08222, + 2.07262, + 2.12079, + 2.06122, + 2.05905, + 2.03688, + 2.06852, + 2.11339, + 2.05377, + 2.0445, + 2.10575, + 2.1056, + 2.11083, + 2.06392, + 2.08807, + 2.03652, + 2.1092, + 2.10076, + 2.10486, + 2.06538, + 2.07225, + 2.08579, + 2.0326, + 2.05998, + 2.07024, + 2.07479, + 2.04807, + 2.0728, + 2.09785, + 2.05145, + 2.04431, + 2.11824, + 2.04312, + 2.03268, + 2.09024, + 2.03737, + 2.10626, + 2.12688, + 2.09582, + 2.06452, + 2.09179, + 2.08186, + 2.09928, + 2.06191, + 2.09476, + 2.01981, + 2.047, + 2.03228, + 2.00172, + 2.09233, + 2.07273, + 2.05614, + 2.08759, + 2.06359, + 2.08411, + 2.09002, + 2.07199, + 2.0966, + 2.0663, + 2.11224, + 2.07224, + 2.03215, + 2.0657, + 2.09718, + 2.08311, + 2.08796, + 2.09028, + 2.05719, + 2.09571, + 2.06604, + 2.07665, + 2.11751, + 2.05893, + 2.04589, + 2.05035, + 2.12615, + 2.08933, + 2.03781, + 2.03699, + 2.04465, + 2.09132, + 2.06001, + 2.02439, + 2.04713, + 2.08635, + 2.08251, + 2.05064, + 2.05604, + 2.03746, + 2.08633, + 2.04423, + 2.04517, + 2.10912, + 2.04242, + 2.04988, + 2.05275, + 2.02955, + 2.07594, + 2.03874, + 2.12035, + 2.04269, + 2.10422, + 2.1321, + 2.07987, + 2.0338, + 2.05583, + 2.02542, + 2.05657, + 2.05868, + 2.08488, + 2.03435, + 2.03493, + 2.11027, + 2.04879, + 2.07019, + 2.04808, + 2.04899, + 2.03533, + 2.09001, + 2.05763, + 2.06704, + 2.05423, + 2.0094, + 2.05476, + 2.06344, + 2.08255, + 2.05822, + 2.04538, + 2.07641, + 2.11605, + 2.06253, + 2.10053, + 2.0454, + 2.08173, + 2.0958, + 2.06008, + 2.04141, + 2.10506, + 2.06804, + 2.10793, + 2.1113, + 2.08151, + 2.04239, + 2.08228, + 2.03401, + 2.07153, + 2.09194, + 2.11955, + 2.05519, + 2.13479, + 2.08353, + 2.05744, + 2.04628, + 2.03103, + 2.04818, + 2.09127, + 2.07482, + 2.09692, + 2.08122, + 2.05804, + 2.09636, + 2.07358, + 2.07065, + 2.04836, + 2.06417, + 2.07228, + 2.09008, + 2.06119, + 2.08591, + 1.98737, + 2.07877, + 2.07344, + 2.06367, + 2.05838, + 2.0747, + 2.04492, + 2.09362, + 2.10211, + 2.06115, + 2.07565, + 2.03927, + 2.05576, + 2.1045, + 2.06089, + 2.07477, + 2.09973, + 2.10691, + 2.08703, + 2.08386, + 2.04263, + 2.07413, + 2.04991, + 2.05306, + 2.05785, + 2.09713, + 2.04, + 2.07001, + 2.06954, + 2.09927, + 2.04752, + 2.05949, + 2.05096, + 2.12425, + 2.06031, + 2.08131, + 2.06549, + 2.03506, + 2.05842, + 2.09037, + 2.05977, + 2.06899, + 2.04334, + 2.08199, + 2.03997, + 2.09957, + 2.07667, + 2.02675, + 2.0637, + 2.07252, + 2.09879, + 2.10545, + 2.02426, + 2.05537, + 2.04638, + 2.08495, + 2.09223, + 2.09918, + 2.04542, + 2.03041, + 2.11142, + 2.0758, + 2.02005, + 2.06528, + 2.09088, + 2.03257, + 2.09392, + 2.05435, + 2.10514, + 2.04785, + 2.07381, + 2.0773, + 2.06316, + 2.04501, + 2.07996, + 2.06552, + 2.11218, + 2.10057, + 2.06543, + 2.04405, + 2.02708, + 2.03475, + 2.07201, + 2.06388, + 2.09521, + 2.10629, + 2.05307, + 2.07467, + 2.07584, + 2.10318, + 2.09129, + 2.08565, + 2.11003, + 2.0314, + 2.05657, + 2.06752, + 2.10609, + 2.08033, + 2.08165, + 2.04454, + 2.07803, + 2.0591, + 2.1017, + 2.10863, + 2.07402, + 2.04595, + 2.08145, + 2.04032, + 2.06491, + 2.06006, + 2.07435, + 2.05599, + 2.08956, + 2.078, + 2.06495, + 2.06656, + 2.08641, + 2.08241, + 2.0823, + 2.08903, + 2.04061, + 2.06527, + 2.09438, + 2.08173, + 2.11144, + 2.08193, + 2.04989, + 2.05816, + 2.08623, + 2.09481, + 2.05844, + 2.04585, + 2.0281, + 2.04477, + 2.04074, + 2.07343, + 2.04321, + 2.07098, + 2.09753, + 2.09038, + 2.11503, + 2.06641, + 2.05276, + 2.09645, + 2.07398, + 2.08126, + 2.09451, + 2.0589, + 2.04451, + 2.05744, + 2.06871, + 2.07664, + 2.1098, + 2.04961, + 2.06867, + 2.05256, + 2.05141, + 2.06876, + 2.06913, + 2.09934, + 2.07355, + 2.08036, + 2.03735, + 2.1077, + 2.09777, + 2.11925, + 2.08052, + 2.09469, + 2.08265, + 2.05817, + 2.04492, + 2.06288, + 2.09642, + 2.08577, + 2.05511, + 2.04801, + 2.0758, + 2.04557, + 2.05793, + 2.02491, + 2.08815, + 2.10922, + 2.09084, + 2.05135, + 2.07325, + 2.04706, + 2.0154, + 2.02765, + 2.0913, + 2.06243, + 1.9949, + 2.04451, + 2.03504, + 2.10352, + 2.04774, + 2.07402, + 2.05802, + 2.01303, + 2.07871, + 2.09751, + 2.07597, + 2.06821, + 2.05218, + 2.10225, + 2.10491, + 2.05806, + 2.04556, + 2.14102, + 2.07449, + 2.08151, + 2.06749, + 2.03969, + 2.1059, + 2.06709, + 2.13775, + 2.07773, + 2.07881, + 2.09716, + 2.07145, + 2.04586, + 2.11348, + 2.04382, + 2.06848, + 2.06425, + 2.09541, + 2.05727, + 2.09571, + 2.09677, + 2.05239, + 2.05834, + 2.04982, + 2.06149, + 2.05031, + 2.0554, + 2.04473, + 2.1298, + 2.09963, + 2.0506, + 2.0853, + 2.08459, + 2.02537, + 2.07238, + 2.06157, + 2.09353, + 2.07341, + 2.07942, + 2.06609, + 2.05659, + 2.01597, + 2.05387, + 2.04831, + 2.11018, + 2.09594, + 2.05744, + 2.07539, + 2.07705, + 2.12184, + 2.06034, + 2.04273, + 2.00969, + 2.1075, + 2.09496, + 2.04663, + 2.08296, + 2.06888, + 2.05665, + 2.05057, + 2.07947, + 2.07115, + 2.09229, + 2.06313, + 2.07687, + 2.09609, + 2.08649, + 2.09809, + 2.08379, + 2.03045, + 2.08328, + 2.09646, + 2.11508, + 2.06418, + 2.08226, + 2.14535, + 2.0782, + 2.0672, + 2.08399, + 2.02413, + 2.06002, + 2.06956, + 2.06763, + 2.09652, + 2.02934, + 2.04722, + 2.05634, + 2.0643, + 2.05565, + 2.04201, + 2.04117, + 2.07521, + 2.06606, + 2.0917, + 2.07226, + 2.03138, + 2.04496, + 2.05672, + 2.05884, + 2.06376, + 2.03163, + 2.10323, + 2.06051, + 2.08882, + 2.05615, + 2.10374, + 2.0503, + 2.10046, + 2.07639, + 2.05222, + 2.04735, + 2.06247, + 2.04949, + 2.05873, + 2.06981, + 2.05954, + 2.0731, + 2.10982, + 2.04023, + 2.06787, + 2.03663, + 2.1172, + 2.0539, + 2.07288, + 2.08881, + 2.06794, + 2.04086, + 2.0744, + 2.04996, + 2.06058, + 2.09462, + 2.09685, + 2.09389, + 2.05206, + 2.0722, + 2.07621, + 2.05716, + 2.08468, + 2.09906, + 2.08742, + 2.0136, + 2.06123, + 2.0188, + 2.07659, + 2.10099, + 2.07016, + 2.09132, + 2.08453, + 2.07252, + 1.97667, + 2.04901, + 2.08879, + 2.08173, + 2.03213, + 2.07158, + 2.06173, + 2.07976, + 2.05656, + 2.02242, + 2.02673, + 2.04831, + 2.09884, + 2.09832, + 2.0495, + 2.08063, + 2.03231, + 2.09724, + 2.09128, + 2.03108, + 2.1062, + 2.07741, + 2.07042, + 2.02213, + 2.05987, + 2.03948, + 2.03855, + 2.10079, + 2.11157, + 2.03026, + 2.03894, + 2.05506, + 2.04623, + 2.10682, + 2.10896, + 2.06236, + 2.04543, + 2.07251, + 2.06593, + 2.06126, + 2.05703, + 2.03603, + 2.0266, + 2.05137, + 2.05257, + 2.11632, + 2.07882, + 2.11579, + 2.06083, + 2.12163, + 2.047, + 2.10293, + 2.07675, + 2.01206, + 2.07546, + 2.09803, + 2.06398, + 2.06775, + 2.07545, + 2.09841, + 2.04833, + 2.08732, + 2.07691, + 2.06115, + 2.02649, + 2.13323, + 2.02234, + 2.06283, + 2.08298, + 2.07213, + 2.09094, + 2.04938, + 2.07172, + 2.0698, + 2.07841, + 2.02131, + 2.08268, + 2.04224, + 2.0695, + 2.03673, + 2.04604, + 2.04904, + 2.08746, + 2.0491, + 2.05123, + 2.09723, + 2.08269, + 2.05124, + 2.07054, + 2.10118, + 2.08105, + 2.06108, + 2.0915, + 2.05991, + 2.05882, + 2.06397, + 2.03865, + 2.09982, + 2.06927, + 2.07037, + 2.03851, + 2.07727, + 2.08466, + 2.04756, + 2.0518, + 2.03833, + 2.04635, + 2.07881, + 2.04457, + 2.06897, + 2.07481, + 2.08105, + 2.05199, + 2.12006, + 2.0454, + 2.03682, + 2.07238, + 2.05344, + 2.09753, + 2.02979, + 2.07929, + 2.06087, + 2.04431, + 2.11623, + 2.04065, + 2.04942, + 2.05687, + 2.08458, + 2.08085, + 2.05046, + 2.08918, + 2.03928, + 2.05363, + 2.00712, + 2.0735, + 2.05258, + 2.05499, + 2.05847, + 2.0914, + 2.05494, + 2.08039, + 2.01086, + 2.09805, + 2.07575, + 2.10792, + 2.11025, + 2.06458, + 2.0273, + 2.05811, + 2.04642, + 2.09066, + 2.04924, + 2.06526, + 2.02682, + 2.04789, + 2.10452, + 2.01919, + 2.07131, + 2.07442, + 2.11376, + 2.06014, + 2.0615, + 2.11177, + 2.06651, + 2.04953, + 2.06775, + 2.0567, + 2.08066, + 2.05155, + 2.02535, + 2.08063, + 2.07325, + 2.09533, + 2.0943, + 2.03607, + 2.0792, + 2.08868, + 2.06284, + 2.07879, + 2.08687, + 2.07723, + 2.08824, + 2.07305, + 2.07188, + 2.06916, + 2.04886, + 2.05256, + 2.09059, + 2.10037, + 2.05897, + 2.05534, + 2.02594, + 2.063, + 2.09497, + 2.09092, + 2.07039, + 2.07083, + 2.0666, + 2.12682, + 2.09667, + 2.02766, + 2.07734, + 2.09582, + 2.10131, + 2.02342, + 2.0425, + 2.05154, + 2.06863, + 2.03837, + 2.0839, + 2.02418, + 2.0881, + 2.08475, + 2.02315, + 2.09048, + 2.06403, + 2.0433, + 2.04349, + 2.02662, + 2.09695, + 2.06178, + 2.07451, + 2.08244, + 2.06202, + 2.05895, + 2.06559, + 2.06002, + 2.04423, + 2.0658, + 2.07005, + 2.06321, + 2.04857, + 2.04002, + 2.04688, + 2.06172, + 2.10751, + 2.02393, + 1.99349, + 2.03704, + 2.01605, + 2.11855, + 2.10612, + 2.08396, + 2.04103, + 2.07212, + 2.06869, + 2.08831, + 2.06112, + 2.053, + 2.06579, + 2.04157, + 2.05572, + 2.01758, + 2.07438, + 2.04125, + 2.06797, + 2.068, + 2.03829, + 2.05513, + 2.0797, + 2.05015, + 2.0817, + 2.06168, + 2.0538, + 2.03781, + 2.07469, + 2.08785, + 2.09313, + 2.07224, + 2.05207, + 2.04484, + 2.07601, + 2.05114, + 2.07108, + 2.03635, + 2.05828, + 2.06879, + 2.06825, + 2.09608, + 2.02772, + 2.07735, + 2.07481, + 2.0561, + 2.10218, + 2.05183, + 2.05943, + 2.05363, + 2.02933, + 2.04582, + 2.07108, + 2.1126, + 2.09854, + 2.04744, + 2.0731, + 2.05374, + 2.04776, + 2.09109, + 2.08215, + 2.07233, + 2.07128, + 2.07266, + 2.06832, + 2.06511, + 2.08429, + 2.03042, + 2.0661, + 2.03241, + 2.02887, + 2.06301, + 2.07562, + 2.07054, + 2.02542, + 2.07439, + 2.05013, + 2.08904, + 2.06968, + 2.03345, + 2.04215, + 2.03525, + 2.04019, + 2.05763, + 2.05524, + 2.08205, + 2.01128, + 2.0674, + 2.10451, + 2.06705, + 2.04287, + 2.03218, + 2.03945, + 2.05258, + 2.03794, + 2.04784, + 2.08807, + 2.05793, + 2.08379, + 2.04009, + 2.05416, + 2.07032, + 2.07983, + 2.09094, + 2.06061, + 2.09135, + 2.09565, + 2.09122, + 2.01277, + 2.11322, + 2.02085, + 2.07146, + 2.05154, + 2.04755, + 2.06514, + 2.04912, + 2.0506, + 2.09276, + 2.01748, + 2.11268, + 2.06466, + 2.102, + 2.0888, + 2.06228, + 2.07457, + 2.0545, + 2.05416, + 2.07107, + 2.05555, + 2.07771, + 2.08619, + 2.03492, + 2.08688, + 2.06589, + 2.07428, + 2.05994, + 2.07196, + 2.08413, + 2.09792, + 2.03176, + 2.04281, + 2.07963, + 2.08783, + 2.10229, + 2.0806, + 2.06436, + 2.06393, + 2.07591, + 2.04416, + 2.06419, + 2.02994, + 2.07, + 2.06459, + 2.04818, + 2.05616, + 2.05595, + 2.05967, + 2.10924, + 2.07207, + 2.07944, + 2.04368, + 2.03419, + 2.07548, + 2.05645, + 2.07395, + 2.07202, + 2.09124, + 2.10283, + 2.06007, + 2.06086, + 2.06013, + 2.0613, + 2.05274, + 2.11108, + 2.07372, + 2.08513, + 2.04595, + 2.04625, + 2.11262, + 2.06451, + 2.05242, + 2.05972, + 2.08432, + 2.08604, + 2.07219, + 2.04963, + 2.04076, + 2.06975, + 2.08389, + 2.11041, + 2.07472, + 2.08351, + 2.06993, + 2.03487, + 2.06355, + 2.07169, + 2.06573, + 2.05064, + 2.06776, + 2.10188, + 2.03205, + 2.08174, + 2.05715, + 2.04901, + 2.06824, + 2.06143, + 2.056, + 2.07084, + 2.05222, + 2.03319, + 2.08047, + 2.07566, + 2.12745, + 2.08515, + 2.06198, + 2.10327, + 2.09468, + 2.05548, + 2.03834, + 2.11002, + 2.08029, + 2.05268, + 2.0335, + 2.02677, + 2.06304, + 2.04452, + 2.09899, + 2.05809, + 2.07477, + 2.03045, + 2.03504, + 2.05041, + 2.08417, + 2.03559, + 2.02935, + 2.03407, + 2.07136, + 2.07384, + 2.05954, + 2.02755, + 2.06172, + 2.09393, + 2.06967, + 2.07662, + 2.0216, + 2.1009, + 2.06231, + 2.07253, + 2.08237, + 2.06263, + 2.04769, + 2.04909, + 2.08691, + 2.07693, + 2.06829, + 2.04875, + 2.05418, + 2.08913, + 2.03112, + 2.04847, + 2.06328, + 2.07853, + 2.10147, + 2.04872, + 2.06594, + 2.02462, + 2.07055, + 2.05633, + 2.13906, + 2.10186, + 2.06236, + 2.06541, + 2.08143, + 2.06161, + 2.07694, + 2.0402, + 2.02456, + 2.05621, + 2.03083, + 2.09178, + 2.05554, + 2.06884, + 2.04159, + 2.01934, + 2.03423, + 2.09268, + 2.08845, + 2.04913, + 2.07277, + 2.10327, + 2.06987, + 2.07943, + 2.05538, + 2.04082, + 2.03667, + 2.05249, + 2.04705, + 2.06035, + 2.0747, + 2.04502, + 2.07857, + 2.05529, + 2.07013, + 2.07326, + 2.05817, + 2.06388, + 2.07611, + 2.07169, + 2.07389, + 2.05946, + 2.05697, + 2.05845, + 2.02988, + 2.06169, + 2.06378, + 2.07877, + 2.09078, + 2.05866, + 2.05292, + 2.05089, + 2.04567, + 2.06807, + 2.05176, + 2.09768, + 2.05187, + 2.07603, + 2.09116, + 2.06851, + 2.08508, + 2.05732, + 2.0648, + 2.03648, + 2.08369, + 2.08778, + 2.06682, + 2.07705, + 2.08575, + 2.07415, + 2.04854, + 2.00188, + 2.0663, + 2.04615, + 2.07906, + 2.02555, + 2.07715, + 2.05058, + 2.08828, + 2.0185, + 2.06391, + 2.05002, + 2.06629, + 2.02972, + 2.03557, + 2.08113, + 2.03979, + 2.04057, + 2.04033, + 2.04492, + 2.06139, + 2.0621, + 2.06174, + 2.07726, + 2.08054, + 2.08416, + 2.08596, + 2.03534, + 2.0732, + 2.06318, + 2.0642, + 2.06995, + 2.09707, + 2.0473, + 2.03983, + 2.03072, + 2.10328, + 2.06546, + 2.06347, + 2.07614, + 2.02531, + 2.10226, + 2.02717, + 2.07241 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 53183, + "step_interval": 5, + "values": [ + 956236928.0, + 966297984.0, + 931263232.0, + 979001984.0, + 1017102592.0, + 1115523200.0, + 1228648832.0, + 1260442880.0, + 1274906240.0, + 1188215936.0, + 1114331392.0, + 1063800192.0, + 1034780672.0, + 1023118592.0, + 1031812800.0, + 997922496.0, + 990128448.0, + 1007822656.0, + 954958528.0, + 979914752.0, + 976519296.0, + 966956864.0, + 983542592.0, + 935246336.0, + 949317120.0, + 972322432.0, + 966361728.0, + 989361920.0, + 959320256.0, + 939321856.0, + 972486592.0, + 967056640.0, + 973175616.0, + 976699264.0, + 941081664.0, + 960376576.0, + 970076032.0, + 976963840.0, + 969814912.0, + 945497856.0, + 971986176.0, + 957465472.0, + 964594816.0, + 970090496.0, + 945187264.0, + 948235648.0, + 970803840.0, + 971995776.0, + 967290752.0, + 970860672.0, + 955190080.0, + 989670592.0, + 974899328.0, + 969701504.0, + 977055232.0, + 956681152.0, + 959799040.0, + 968847296.0, + 973418496.0, + 958463104.0, + 948492928.0, + 946244672.0, + 982634880.0, + 962569216.0, + 967340096.0, + 963788032.0, + 937076544.0, + 982140928.0, + 969179136.0, + 966437440.0, + 955682944.0, + 950046656.0, + 965051776.0, + 974682240.0, + 965249472.0, + 994598272.0, + 965535232.0, + 958391808.0, + 964343168.0, + 965317888.0, + 981618368.0, + 952652416.0, + 942381056.0, + 959562112.0, + 974225152.0, + 971466880.0, + 969723904.0, + 935331712.0, + 972597760.0, + 964452608.0, + 958906752.0, + 962584768.0, + 955827328.0, + 968080896.0, + 983626752.0, + 981340864.0, + 958177280.0, + 952030976.0, + 943679744.0, + 978380160.0, + 973635776.0, + 963469696.0, + 973458368.0, + 952654720.0, + 993118208.0, + 982178048.0, + 978971008.0, + 978863616.0, + 946708736.0, + 971266880.0, + 962552896.0, + 954115968.0, + 977178624.0, + 948182912.0, + 943696896.0, + 969076096.0, + 975933888.0, + 982984704.0, + 964016256.0, + 941500288.0, + 972584896.0, + 992368000.0, + 974312832.0, + 967078336.0, + 940384960.0, + 950985024.0, + 972144256.0, + 962619520.0, + 972211840.0, + 956094720.0, + 949694336.0, + 955943040.0, + 974435328.0, + 976947584.0, + 959628928.0, + 940096320.0, + 956687872.0, + 966752256.0, + 969991680.0, + 965977088.0, + 946613504.0, + 983479360.0, + 970198272.0, + 962031360.0, + 978563328.0, + 953855104.0, + 933921984.0, + 980918144.0, + 980894848.0, + 968294912.0, + 950791168.0, + 940875904.0, + 977888128.0, + 959555968.0, + 961631616.0, + 956901120.0, + 937276800.0, + 990016000.0, + 980194304.0, + 966400256.0, + 962776704.0, + 963650432.0, + 948112320.0, + 975020992.0, + 981020864.0, + 979346560.0, + 954804352.0, + 961996288.0, + 968445952.0, + 961078784.0, + 969625600.0, + 989069184.0, + 939656064.0, + 971510528.0, + 962650240.0, + 970263616.0, + 979359616.0, + 949088384.0, + 954937344.0, + 968487424.0, + 970966528.0, + 965073792.0, + 941464256.0, + 954787840.0, + 969760320.0, + 963802880.0, + 961585792.0, + 961546688.0, + 950831040.0, + 986249216.0, + 953181696.0, + 983777984.0, + 969822016.0, + 944355648.0, + 974090560.0, + 981993984.0, + 963965952.0, + 968954432.0, + 945811392.0, + 966583872.0, + 971404288.0, + 963074304.0, + 978777856.0, + 963672896.0, + 945809728.0, + 980356736.0, + 988883712.0, + 968083840.0, + 966711168.0, + 953608512.0, + 952222976.0, + 971077568.0, + 988861184.0, + 967546368.0, + 945471168.0, + 959263552.0, + 967589568.0, + 959563008.0, + 974096512.0, + 960774272.0, + 945660416.0, + 964831936.0, + 982000384.0, + 966573824.0, + 953778560.0, + 941442432.0, + 952174720.0, + 960408064.0, + 971333632.0, + 959543040.0, + 935563520.0, + 970196864.0, + 975607680.0, + 969626752.0, + 977067584.0, + 955251904.0, + 946566208.0, + 974689856.0, + 961485824.0, + 969863168.0, + 975770816.0, + 928496704.0, + 971732736.0, + 983123392.0, + 971397888.0, + 972253952.0, + 946673536.0, + 968406272.0, + 967845888.0, + 977969664.0, + 964665600.0, + 951950656.0, + 965283648.0, + 957817408.0, + 966574720.0, + 962980544.0, + 960866624.0, + 960872448.0, + 971006720.0, + 967430912.0, + 964223616.0, + 976873600.0, + 943776896.0, + 972782592.0, + 971944320.0, + 963222976.0, + 972755200.0, + 949749248.0, + 972270464.0, + 946714368.0, + 976009024.0, + 975114624.0, + 942428352.0, + 937521216.0, + 971873664.0, + 964832896.0, + 980996544.0, + 958193792.0, + 949157760.0, + 981266304.0, + 1002562816.0, + 965688576.0, + 956397696.0, + 947556992.0, + 967352960.0, + 985068928.0, + 961939456.0, + 958531328.0, + 940314432.0, + 948597504.0, + 954072320.0, + 976647488.0, + 977725952.0, + 977351104.0, + 923629696.0, + 968122112.0, + 962981248.0, + 970977280.0, + 960578688.0, + 947681280.0, + 970398720.0, + 965265920.0, + 968329280.0, + 972982912.0, + 958098816.0, + 956131008.0, + 963140736.0, + 975662912.0, + 972161024.0, + 957985728.0, + 949574336.0, + 967115008.0, + 955687616.0, + 955139520.0, + 957795968.0, + 948440384.0, + 991033088.0, + 972434304.0, + 958435328.0, + 974467520.0, + 946778432.0, + 953109632.0, + 970676608.0, + 981506048.0, + 982325056.0, + 943241472.0, + 955595968.0, + 971163008.0, + 972335872.0, + 971438592.0, + 952993600.0, + 941876352.0, + 968755520.0, + 980965760.0, + 975712896.0, + 968755648.0, + 926065152.0, + 967955328.0, + 968369600.0, + 954213120.0, + 966003840.0, + 940838656.0, + 950562816.0, + 964996672.0, + 966226432.0, + 973740160.0, + 962446464.0, + 953449728.0, + 973701440.0, + 977707008.0, + 974458048.0, + 970564736.0, + 951166208.0, + 977151872.0, + 953486272.0, + 986293440.0, + 978351744.0, + 944845952.0, + 964798976.0, + 968518528.0, + 957154304.0, + 952551552.0, + 962480448.0, + 961705472.0, + 963932160.0, + 966965888.0, + 963232128.0, + 968922112.0, + 919231744.0, + 971251456.0, + 953488384.0, + 963616768.0, + 973595520.0, + 941224960.0, + 946671616.0, + 980045824.0, + 974265152.0, + 971957248.0, + 955011072.0, + 961865216.0, + 982746368.0, + 952993536.0, + 973301760.0, + 958616448.0, + 934147072.0, + 959319680.0, + 959587456.0, + 988043520.0, + 970044480.0, + 949872640.0, + 960568192.0, + 960477504.0, + 948289280.0, + 981668032.0, + 967253568.0, + 974346240.0, + 968881280.0, + 972328064.0, + 963505472.0, + 975099456.0, + 949332864.0, + 975490304.0, + 961732352.0, + 969003136.0, + 975262336.0, + 954261696.0, + 960925952.0, + 959660544.0, + 957844352.0, + 973904192.0, + 948029696.0, + 966380736.0, + 969579328.0, + 953091648.0, + 955097664.0, + 945219584.0, + 940006144.0, + 965635392.0, + 966299776.0, + 971419968.0, + 971268736.0, + 938026560.0, + 962939392.0, + 973374016.0, + 985977408.0, + 966907008.0, + 944082816.0, + 956681856.0, + 985219072.0, + 971489536.0, + 960750848.0, + 935828992.0, + 947535104.0, + 956713408.0, + 965886272.0, + 960114944.0, + 958588928.0, + 947630272.0, + 960947456.0, + 960160832.0, + 975881984.0, + 965135808.0, + 945328384.0, + 965250688.0, + 969733376.0, + 956886784.0, + 963201024.0, + 954089088.0, + 945766016.0, + 983172032.0, + 959089856.0, + 968875136.0, + 971375616.0, + 929161600.0, + 967081856.0, + 975473536.0, + 979295552.0, + 969007488.0, + 944139392.0, + 965862656.0, + 980288704.0, + 960557312.0, + 960808384.0, + 960665344.0, + 945841536.0, + 967415040.0, + 980777280.0, + 959611904.0, + 963326848.0, + 936646336.0, + 973895296.0, + 973523072.0, + 984626368.0, + 965800960.0, + 951103424.0, + 964475392.0, + 967130496.0, + 972868480.0, + 968606592.0, + 937799936.0, + 963920768.0, + 962300672.0, + 984582336.0, + 970657152.0, + 958129408.0, + 945137280.0, + 963545984.0, + 980697216.0, + 965970944.0, + 971669952.0, + 940721472.0, + 981216384.0, + 963291840.0, + 962634752.0, + 967161408.0, + 945838336.0, + 970257152.0, + 965920000.0, + 963273664.0, + 978148160.0, + 945108864.0, + 941872768.0, + 973247872.0, + 970531136.0, + 965414400.0, + 961477888.0, + 947346944.0, + 985874304.0, + 974578560.0, + 981267520.0, + 970101568.0, + 941165632.0, + 954045696.0, + 968758080.0, + 975334208.0, + 979983040.0, + 946234112.0, + 957536256.0, + 948876160.0, + 971205440.0, + 975455296.0, + 954846976.0, + 957184448.0, + 977263104.0, + 982726400.0, + 968362880.0, + 968661696.0, + 956578048.0, + 963730048.0, + 961888384.0, + 975290752.0, + 972071680.0, + 952020608.0, + 966721728.0, + 979876736.0, + 958467712.0, + 968135424.0, + 970088384.0, + 952620672.0, + 987006976.0, + 968030720.0, + 965132288.0, + 966259456.0, + 935491072.0, + 981837824.0, + 960136192.0, + 980994048.0, + 964894144.0, + 946168192.0, + 962419840.0, + 970129216.0, + 967397120.0, + 950755456.0, + 962047872.0, + 971795328.0, + 982853120.0, + 984033024.0, + 966213888.0, + 979698368.0, + 936401344.0, + 974222656.0, + 975151872.0, + 974611584.0, + 963445312.0, + 956257728.0, + 985656960.0, + 960890496.0, + 959103104.0, + 971417984.0, + 953449984.0, + 953272064.0, + 974320384.0, + 957978880.0, + 980414336.0, + 968114048.0, + 957925376.0, + 959204096.0, + 967840768.0, + 978194816.0, + 981490432.0, + 949241344.0, + 974498944.0, + 962907520.0, + 971319808.0, + 967826688.0, + 940208384.0, + 946853888.0, + 976296512.0, + 964332800.0, + 953401472.0, + 967096576.0, + 967335104.0, + 987259520.0, + 974338688.0, + 970915584.0, + 969659200.0, + 962167744.0, + 977161728.0, + 965629184.0, + 970142848.0, + 969767360.0, + 936472320.0, + 965654144.0, + 979920896.0, + 982816768.0, + 961410688.0, + 943136192.0, + 941828480.0, + 962931840.0, + 972480896.0, + 977744384.0, + 961236480.0, + 937120576.0, + 959086848.0, + 966152960.0, + 971771136.0, + 981055296.0, + 948983424.0, + 967500928.0, + 969970176.0, + 959233280.0, + 991930880.0, + 958040320.0, + 954914560.0, + 971846016.0, + 971645056.0, + 969226112.0, + 967635136.0, + 940400704.0, + 975749376.0, + 988319488.0, + 969703040.0, + 962130176.0, + 937729664.0, + 961836288.0, + 976724224.0, + 957261440.0, + 968533120.0, + 956409856.0, + 957384448.0, + 968198272.0, + 968694528.0, + 980996736.0, + 965114176.0, + 942542976.0, + 970263296.0, + 987176320.0, + 972393344.0, + 957116160.0, + 962226688.0, + 991216768.0, + 979054720.0, + 973000448.0, + 974246464.0, + 956047488.0, + 963014272.0, + 971058240.0, + 977931648.0, + 981451136.0, + 948277248.0, + 934772480.0, + 971566080.0, + 971026688.0, + 977299328.0, + 951372928.0, + 956004544.0, + 975343616.0, + 958989632.0, + 956213120.0, + 981110976.0, + 937820544.0, + 969835008.0, + 956856832.0, + 965621504.0, + 972665344.0, + 957806976.0, + 949370112.0, + 972162304.0, + 972793984.0, + 955829632.0, + 964673536.0, + 953344768.0, + 991925888.0, + 973686848.0, + 952864832.0, + 961605248.0, + 944941952.0, + 979913216.0, + 980744064.0, + 980410752.0, + 954187008.0, + 947690432.0, + 947004672.0, + 975350528.0, + 962248064.0, + 988725632.0, + 944005376.0, + 950973824.0, + 966515200.0, + 975706240.0, + 978185536.0, + 976357120.0, + 943320192.0, + 966277376.0, + 962358080.0, + 976203264.0, + 971541952.0, + 937391616.0, + 965716352.0, + 978746752.0, + 972062144.0, + 977814912.0, + 958274176.0, + 938146816.0, + 972887808.0, + 973872064.0, + 958181952.0, + 971533504.0, + 956207232.0, + 971964800.0, + 975739136.0, + 983632960.0, + 959550976.0, + 922478528.0, + 967331584.0, + 958768576.0, + 959299584.0, + 977023232.0, + 949655168.0, + 944128000.0, + 955172480.0, + 971687616.0, + 977042176.0, + 952715584.0, + 934506944.0, + 966462016.0, + 965424256.0, + 981044864.0, + 969115392.0, + 949028864.0, + 978318464.0, + 977286016.0, + 967010496.0, + 969966848.0, + 938616576.0, + 953810880.0, + 962589248.0, + 981771840.0, + 978158144.0, + 968694144.0, + 956072960.0, + 968669184.0, + 959074688.0, + 990117056.0, + 984952192.0, + 945928000.0, + 955999360.0, + 961347264.0, + 967386496.0, + 970175936.0, + 938555008.0, + 951180480.0, + 960621952.0, + 972563584.0, + 969886080.0, + 965413760.0, + 955745920.0, + 972470912.0, + 961199232.0, + 954917504.0, + 974695168.0, + 953781504.0, + 974168192.0, + 965886848.0, + 979201152.0, + 970595712.0, + 944832256.0, + 970407680.0, + 978049024.0, + 978761024.0, + 958308160.0, + 943358528.0, + 959222656.0, + 960499008.0, + 965978496.0, + 981567232.0, + 975720448.0, + 947471488.0, + 969540288.0, + 974729984.0, + 977585856.0, + 961660480.0, + 947232128.0, + 972027776.0, + 972947776.0, + 973900288.0, + 963578624.0, + 947418880.0, + 956223872.0, + 973477952.0, + 942272768.0, + 973858496.0, + 975669632.0, + 937300480.0, + 964836224.0, + 979479424.0, + 965719040.0, + 950291648.0, + 943686400.0, + 985054720.0, + 971481088.0, + 972492928.0, + 972867264.0, + 948047616.0, + 969571840.0, + 967249280.0, + 971339072.0, + 964827840.0, + 973121536.0, + 932679680.0, + 964294528.0, + 985944064.0, + 962825856.0, + 947888064.0, + 936149888.0, + 953951488.0, + 970412160.0, + 966899712.0, + 975869632.0, + 931199296.0, + 962632192.0, + 966259968.0, + 976717696.0, + 984519040.0, + 952739712.0, + 951672448.0, + 975127808.0, + 967755392.0, + 988302016.0, + 965631104.0, + 944607360.0, + 963863424.0, + 973068800.0, + 960641408.0, + 966871232.0, + 959102208.0, + 963087616.0, + 966583488.0, + 974475136.0, + 964317504.0, + 961807360.0, + 944256000.0, + 978687872.0, + 972219392.0, + 966101184.0, + 982098944.0, + 958169216.0, + 969383552.0, + 976667776.0, + 972001216.0, + 967387264.0, + 929629824.0, + 972970432.0, + 966004736.0, + 957420864.0, + 978226816.0, + 936304896.0, + 973770304.0, + 962480384.0, + 981225344.0, + 961436992.0, + 945802624.0, + 947120000.0, + 962646272.0, + 960313728.0, + 975292672.0, + 957344832.0, + 931126336.0, + 971525248.0, + 965347264.0, + 973184512.0, + 985979456.0, + 943119616.0, + 950755712.0, + 973222016.0, + 943791104.0, + 972633216.0, + 960040064.0, + 943144704.0, + 967239168.0, + 984837952.0, + 975966464.0, + 954906304.0, + 932064960.0, + 971269952.0, + 964653312.0, + 952385408.0, + 968069440.0, + 967820032.0, + 975079040.0, + 974181632.0, + 965506816.0, + 969878848.0, + 972414080.0, + 965286784.0, + 969768256.0, + 975729024.0, + 965469824.0, + 976016000.0, + 927634304.0, + 969923968.0, + 972692480.0, + 966305280.0, + 979099520.0, + 933469376.0, + 970328704.0, + 975082880.0, + 968108608.0, + 971076480.0, + 921772928.0, + 954107712.0, + 982986112.0, + 976599936.0, + 969982976.0, + 952207488.0, + 948687360.0, + 970931392.0, + 965315328.0, + 980079872.0, + 963099136.0, + 956383936.0, + 973570048.0, + 969001216.0, + 958367616.0, + 967154048.0, + 944004096.0, + 944353152.0, + 977154560.0, + 971526016.0, + 968135552.0, + 970517504.0, + 961082880.0, + 968432128.0, + 971897472.0, + 941140224.0, + 953927552.0, + 954830848.0, + 969211648.0, + 976125504.0, + 967907200.0, + 951694336.0, + 933555968.0, + 958688896.0, + 974772992.0, + 990033152.0, + 957152000.0, + 941381952.0, + 933954048.0, + 967968512.0, + 976938368.0, + 965889088.0, + 964921408.0, + 951561856.0, + 963441152.0, + 957167360.0, + 969800576.0, + 970812928.0, + 933750336.0, + 987980160.0, + 963943680.0, + 968096512.0, + 968938112.0, + 941729024.0, + 948668672.0, + 960978304.0, + 967097536.0, + 975592448.0, + 960261056.0, + 927577600.0, + 952773440.0, + 955839296.0, + 956968000.0, + 966235648.0, + 940525440.0, + 968861312.0, + 966428864.0, + 972941952.0, + 973784064.0, + 942931712.0, + 957293184.0, + 976446464.0, + 977009216.0, + 960880448.0, + 975425344.0, + 955295872.0, + 984794944.0, + 977519360.0, + 962804352.0, + 956125184.0, + 940138112.0, + 974768512.0, + 956950336.0, + 964995456.0, + 964968448.0, + 958196736.0, + 957048704.0, + 974119168.0, + 975092160.0, + 978090112.0, + 950592192.0, + 947219712.0, + 961843328.0, + 957277568.0, + 980805184.0, + 936176640.0, + 952659392.0, + 974612032.0, + 969829376.0, + 962165888.0, + 966396032.0, + 953853952.0, + 958404352.0, + 976985088.0, + 955728000.0, + 975196416.0, + 960412800.0, + 973993728.0, + 963404480.0, + 967338368.0, + 962311552.0, + 950462848.0, + 954982784.0, + 979908096.0, + 968403392.0, + 981193984.0, + 967248448.0, + 941855872.0, + 973427136.0, + 955793024.0, + 971974784.0, + 971067264.0, + 953390080.0, + 955315200.0, + 976971392.0, + 967621184.0, + 962955392.0, + 940864128.0, + 950788096.0, + 968097536.0, + 975609728.0, + 979082368.0, + 981442048.0, + 939197312.0, + 967601152.0, + 955614144.0, + 965604544.0, + 976276864.0, + 958159232.0, + 969673728.0, + 964368896.0, + 976473920.0, + 984933120.0, + 945408512.0, + 955131008.0, + 968269696.0, + 989501120.0, + 973395072.0, + 974450432.0, + 945549888.0, + 959462208.0, + 957757568.0, + 963945600.0, + 971289984.0, + 948245888.0, + 970380032.0, + 969388160.0, + 978407296.0, + 965915264.0, + 942466624.0, + 969376192.0, + 989745664.0, + 976958592.0, + 973684800.0, + 970581760.0, + 944723968.0, + 992036992.0, + 969085120.0, + 965606144.0, + 954714368.0, + 949960320.0, + 990495488.0, + 959941760.0, + 977775616.0, + 974907520.0, + 940307968.0, + 954688896.0, + 969823872.0, + 977357056.0, + 969442816.0, + 968550784.0, + 944871936.0, + 960301312.0, + 955657408.0, + 966825408.0, + 972898816.0, + 947804032.0, + 971944832.0, + 965897344.0, + 966991360.0, + 985332608.0, + 946609792.0, + 966702208.0, + 984187840.0, + 989248512.0, + 976693120.0, + 956147264.0, + 958625152.0, + 956838208.0, + 965746112.0, + 968585984.0, + 970818496.0, + 963311168.0, + 979459328.0, + 962145152.0, + 962750336.0, + 954498688.0, + 927377280.0, + 971597440.0, + 985275776.0, + 982057984.0, + 967315584.0, + 949563264.0, + 960774528.0, + 982319936.0, + 983654656.0, + 976209408.0, + 960582592.0, + 946093312.0, + 975270848.0, + 984077312.0, + 978947072.0, + 978699136.0, + 934841984.0, + 982260352.0, + 982412224.0, + 967934720.0, + 979692096.0, + 969859392.0, + 965724928.0, + 967185600.0, + 951217664.0, + 973305216.0, + 959712512.0, + 972240512.0, + 959816576.0, + 949676672.0, + 982215040.0, + 978217216.0, + 956105216.0, + 963003392.0, + 962008064.0, + 972696448.0, + 952320768.0, + 938416768.0, + 969812352.0, + 973631104.0, + 962018880.0, + 972861632.0, + 956590720.0, + 952745216.0, + 978028672.0, + 972173440.0, + 964957568.0, + 957725952.0, + 946529792.0, + 971824128.0, + 973380544.0, + 973034048.0, + 969466752.0, + 942162304.0, + 965866240.0, + 972854016.0, + 973553600.0, + 978981504.0, + 938434304.0, + 963183040.0, + 978777216.0, + 963204224.0, + 968651008.0, + 939730496.0, + 945842176.0, + 982510976.0, + 969312896.0, + 984278464.0, + 980115712.0, + 946382912.0, + 955306752.0, + 971466432.0, + 974870400.0, + 976486656.0, + 959631168.0, + 959441984.0, + 974943104.0, + 984933952.0, + 970557440.0, + 953767936.0, + 952936704.0, + 980647808.0, + 976730240.0, + 981763584.0, + 974525568.0, + 951145984.0, + 972715520.0, + 953703616.0, + 972640832.0, + 965368832.0, + 929201408.0, + 974378368.0, + 972664256.0, + 975873216.0, + 977676160.0, + 941912448.0, + 945939584.0, + 982339328.0, + 989044736.0, + 975330560.0, + 964403456.0, + 953013504.0, + 964140032.0, + 960992640.0, + 983076736.0, + 971134848.0, + 932200192.0, + 964982656.0, + 970636416.0, + 966597376.0, + 971914176.0, + 958890880.0, + 965859904.0, + 961412224.0, + 968295296.0, + 965042688.0, + 976074112.0, + 955784128.0, + 967541632.0, + 955408064.0, + 960772544.0, + 953401856.0, + 951111680.0, + 956564480.0, + 963308928.0, + 966602112.0, + 957272832.0, + 944127616.0, + 954476160.0, + 977947904.0, + 972748800.0, + 967345792.0, + 950356736.0, + 926433344.0, + 959305920.0, + 983548032.0, + 976030592.0, + 965808512.0, + 942812800.0, + 992129536.0, + 963470656.0, + 984910528.0, + 963058368.0, + 944563712.0, + 968320768.0, + 966872768.0, + 974587712.0, + 961067776.0, + 952780992.0, + 941043456.0, + 957669824.0, + 968178496.0, + 957092992.0, + 956137216.0, + 935319680.0, + 961558528.0, + 969268288.0, + 945601344.0, + 977856000.0, + 956514816.0, + 964333184.0, + 980359680.0, + 981116160.0, + 981550464.0, + 965524160.0, + 960060992.0, + 965492096.0, + 966940608.0, + 964796160.0, + 961017216.0, + 961000064.0, + 966589888.0, + 971398656.0, + 958346624.0, + 956560512.0, + 945636864.0, + 969575424.0, + 963311616.0, + 969463936.0, + 964146816.0, + 945761536.0, + 950282496.0, + 974740224.0, + 972449152.0, + 970820224.0, + 965580928.0, + 941215616.0, + 964771712.0, + 985743744.0, + 981028352.0, + 960709888.0, + 937586048.0, + 972650368.0, + 981054592.0, + 982141632.0, + 961028736.0, + 942443776.0, + 962297216.0, + 966968448.0, + 974794496.0, + 971104640.0, + 960944384.0, + 947720192.0, + 955030720.0, + 970907968.0, + 962854336.0, + 969850880.0, + 954673280.0, + 977656320.0, + 965586816.0, + 964284736.0, + 977895808.0, + 950171904.0, + 958758272.0, + 975057792.0, + 981652736.0, + 964278528.0, + 953100224.0, + 936073088.0, + 976656384.0, + 955601536.0, + 967410880.0, + 964629632.0, + 946551872.0, + 979427584.0, + 980291968.0, + 976661760.0, + 959077312.0, + 937599104.0, + 964687232.0, + 964531456.0, + 968297344.0, + 977308288.0, + 951500544.0, + 952577536.0, + 961679424.0, + 977802880.0, + 957297280.0, + 961520896.0, + 941937920.0, + 990111936.0, + 971157824.0, + 969659008.0, + 982089280.0, + 942284928.0, + 961127104.0, + 967933056.0, + 960637696.0, + 969640128.0, + 944865472.0, + 976667776.0, + 969624064.0, + 968694848.0, + 954255616.0, + 958824448.0, + 963376640.0, + 975696256.0, + 956984832.0, + 979015936.0, + 948632768.0, + 957725952.0, + 972760832.0, + 962197632.0, + 972281024.0, + 971318528.0, + 953186432.0, + 973235584.0, + 967958464.0, + 958712832.0, + 972651520.0, + 960120960.0, + 945822592.0, + 979486784.0, + 961022720.0, + 981902464.0, + 968142784.0, + 936793984.0, + 975751552.0, + 968800512.0, + 982655104.0, + 981753856.0, + 942031040.0, + 972898688.0, + 961089792.0, + 977049728.0, + 976967296.0, + 952619264.0, + 937529024.0, + 960402688.0, + 974264192.0, + 983761792.0, + 952518528.0, + 946042752.0, + 969584256.0, + 972417408.0, + 965862464.0, + 967098368.0, + 952154816.0, + 970673088.0, + 973226880.0, + 961164352.0, + 951871488.0, + 931615232.0, + 985304000.0, + 973270784.0, + 972243392.0, + 967320256.0, + 943751424.0, + 946028416.0, + 969689216.0, + 961680640.0, + 968185472.0, + 963840576.0, + 954674944.0, + 968198080.0, + 969529280.0, + 965901760.0, + 972870464.0, + 943331968.0, + 963033984.0, + 962295552.0, + 973162176.0, + 981048320.0, + 960194752.0, + 945251840.0, + 964505728.0, + 972163456.0, + 974918016.0, + 976994048.0, + 951481216.0, + 976355456.0, + 949967680.0, + 972655232.0, + 978462464.0, + 941959424.0, + 973197568.0, + 962812288.0, + 984604032.0, + 945226112.0, + 982712320.0, + 968570816.0, + 953119488.0, + 982344384.0, + 950385152.0, + 955500032.0, + 959667072.0, + 963720576.0, + 976224640.0, + 968207104.0, + 953179648.0, + 956425088.0, + 968585088.0, + 965475968.0, + 969178048.0, + 959304704.0, + 973148288.0, + 972890816.0, + 969935360.0, + 958288896.0, + 948720256.0, + 962796544.0, + 971312512.0, + 964073728.0, + 960969344.0, + 930392960.0, + 945751936.0, + 990380160.0, + 968074240.0, + 956704896.0, + 967846272.0, + 955607808.0, + 957716736.0, + 984708288.0, + 978233600.0, + 973357184.0, + 935562624.0, + 957242880.0, + 966722688.0, + 969499136.0, + 981839616.0, + 928371776.0, + 949352320.0, + 966846336.0, + 966686272.0, + 967394816.0, + 949245952.0, + 957081920.0, + 969882368.0, + 974471168.0, + 959456768.0, + 958894592.0, + 956754176.0, + 977281856.0, + 976832960.0, + 962951552.0, + 975217408.0, + 963593152.0, + 977185472.0, + 966663296.0, + 974025280.0, + 966772800.0, + 959408640.0, + 963792128.0, + 977484160.0, + 967485056.0, + 984401536.0, + 959565824.0, + 948574720.0, + 972245120.0, + 982372736.0, + 962659264.0, + 963855360.0, + 948211008.0, + 963775616.0, + 958247808.0, + 969518400.0, + 987503104.0, + 951810432.0, + 950395968.0, + 966734976.0, + 982498816.0, + 965418368.0, + 972902080.0, + 936970880.0, + 968694784.0, + 979824128.0, + 971072256.0, + 971791488.0, + 939068672.0, + 971568768.0, + 957750400.0, + 968755456.0, + 961146240.0, + 933924608.0, + 957175040.0, + 968922112.0, + 969693952.0, + 971914560.0, + 979325824.0, + 951648768.0, + 970210816.0, + 953955136.0, + 971113344.0, + 979363200.0, + 959137856.0, + 959911936.0, + 960096896.0, + 969816896.0, + 954936512.0, + 942594624.0, + 965458880.0, + 982078592.0, + 978908864.0, + 970193024.0, + 949624704.0, + 945822272.0, + 981924352.0, + 968849280.0, + 988431104.0, + 956877376.0, + 940951552.0, + 971059584.0, + 983921152.0, + 983396544.0, + 967999936.0, + 958767360.0, + 961822592.0, + 968904704.0, + 978123648.0, + 975329024.0, + 974969664.0, + 942589696.0, + 959713280.0, + 975106688.0, + 982049536.0, + 979469632.0, + 940608000.0, + 974395456.0, + 979087360.0, + 967504192.0, + 960086016.0, + 943152896.0, + 967960064.0, + 980230144.0, + 963831680.0, + 963265536.0, + 959640512.0, + 970199872.0, + 970729344.0, + 962030848.0, + 981835392.0, + 964593024.0, + 959666688.0, + 968112000.0, + 968565504.0, + 971795712.0, + 968122624.0, + 945530176.0, + 963123328.0, + 974173440.0, + 963489664.0, + 957261888.0, + 949538240.0, + 957148416.0, + 953684864.0, + 979784768.0, + 986819200.0, + 947400704.0, + 948909952.0, + 965028992.0, + 975494144.0, + 968528896.0, + 968991296.0, + 952064896.0, + 974659712.0, + 963534848.0, + 964100864.0, + 965353408.0, + 943095936.0, + 950772096.0, + 969513216.0, + 964380160.0, + 984301824.0, + 964561216.0, + 950735296.0, + 961816320.0, + 980082432.0, + 963702016.0, + 953082944.0, + 951740416.0, + 969242368.0, + 964727616.0, + 959251456.0, + 967797632.0, + 946596032.0, + 962079680.0, + 980372224.0, + 965237248.0, + 982809344.0, + 960378240.0, + 965200768.0, + 958090560.0, + 975113728.0, + 960176256.0, + 947768128.0, + 959303680.0, + 978732672.0, + 969075968.0, + 957632512.0, + 963698432.0, + 942094784.0, + 966145984.0, + 966619776.0, + 983282432.0, + 988539712.0, + 966372736.0, + 944180480.0, + 968811008.0, + 985685120.0, + 974531072.0, + 964031680.0, + 966544512.0, + 967491264.0, + 963823360.0, + 995027200.0, + 973191680.0, + 938402944.0, + 964524032.0, + 972792320.0, + 968313600.0, + 961465728.0, + 936090880.0, + 962700288.0, + 967591488.0, + 977029248.0, + 956073344.0, + 960740096.0, + 946767104.0, + 982017344.0, + 988210944.0, + 966330112.0, + 962442752.0, + 934132800.0, + 980256512.0, + 976386816.0, + 963885696.0, + 977186560.0, + 956614016.0, + 982651008.0, + 952333696.0, + 973792960.0, + 974501760.0, + 953039936.0, + 939703872.0, + 981249280.0, + 972881280.0, + 977926912.0, + 951061184.0, + 937516672.0, + 977339328.0, + 967702208.0, + 990167296.0, + 975674240.0, + 947367680.0, + 970703232.0, + 970009216.0, + 974930176.0, + 979701696.0, + 932856192.0, + 965022208.0, + 979660160.0, + 965323648.0, + 972670144.0, + 962995968.0, + 950673344.0, + 972606720.0, + 951478016.0, + 960643968.0, + 965316736.0, + 941754304.0, + 967909760.0, + 960803776.0, + 965674240.0, + 969266176.0, + 952763264.0, + 984044736.0, + 990052288.0, + 968375936.0, + 967405824.0, + 962972544.0, + 942650752.0, + 987261056.0, + 979284480.0, + 992133376.0, + 971017280.0, + 951307264.0, + 982885760.0, + 974063488.0, + 968568576.0, + 961594688.0, + 944972864.0, + 983837568.0, + 978412032.0, + 967581888.0, + 968756096.0, + 941574400.0, + 971292224.0, + 958283264.0, + 975812608.0, + 974360256.0, + 971620480.0, + 931969664.0, + 965538688.0, + 978798464.0, + 979266048.0, + 983707520.0, + 957975808.0, + 983873536.0, + 977417472.0, + 963129984.0, + 979024896.0, + 943335168.0, + 961540352.0, + 973266752.0, + 970047040.0, + 969316288.0, + 970616832.0, + 944042240.0, + 986351616.0, + 960342016.0, + 973579136.0, + 962190208.0, + 955545856.0, + 978440448.0, + 968560640.0, + 972779072.0, + 973495808.0, + 946637888.0, + 973024192.0, + 958180736.0, + 978572608.0, + 985661952.0, + 951968960.0, + 940693504.0, + 987063552.0, + 971913600.0, + 970914496.0, + 964771456.0, + 934606336.0, + 986079744.0, + 969507584.0, + 967233024.0, + 962025600.0, + 947726336.0, + 969480256.0, + 970779648.0, + 973080448.0, + 983468032.0, + 951103744.0, + 939465920.0, + 963918016.0, + 980930432.0, + 971177856.0, + 979467008.0, + 950412288.0, + 985938304.0, + 970857536.0, + 961497856.0, + 956633920.0, + 945690496.0, + 968481280.0, + 983780480.0, + 971184256.0, + 969637056.0, + 952246400.0, + 961509248.0, + 976643136.0, + 981730048.0, + 980609664.0, + 967668608.0, + 939772032.0, + 970320000.0, + 963732736.0, + 977485760.0, + 981631424.0, + 945746816.0, + 972116480.0, + 973540736.0, + 973175360.0, + 966066944.0, + 936670080.0, + 952732032.0, + 977313024.0, + 967006464.0, + 980247552.0, + 951831808.0, + 949984896.0, + 975022912.0, + 981808256.0, + 958861568.0, + 978811136.0, + 953703360.0, + 968368960.0, + 977667712.0, + 968228864.0, + 982963456.0, + 947629248.0, + 955507584.0, + 969670016.0, + 967550272.0, + 980648576.0, + 952615680.0, + 970705408.0, + 963557760.0, + 968057344.0, + 974339968.0, + 959936256.0, + 947985728.0, + 956355712.0, + 985459328.0, + 963088064.0, + 957991360.0, + 951522432.0, + 966915328.0, + 977176064.0, + 986378240.0, + 976842752.0, + 957545856.0, + 949887552.0, + 987582720.0, + 970992768.0, + 966588672.0, + 954783296.0, + 956379072.0, + 965881472.0, + 968599424.0, + 967134720.0, + 984683136.0, + 931338688.0, + 949491008.0, + 970887104.0, + 970963776.0, + 971379136.0, + 959562368.0, + 963597376.0, + 961184192.0, + 982921664.0, + 979050624.0, + 952621440.0, + 949265920.0, + 978269056.0, + 977521408.0, + 962387072.0, + 979011264.0, + 958561792.0, + 965200640.0, + 968900224.0, + 972240384.0, + 975677952.0, + 947801216.0, + 979185920.0, + 977730688.0, + 974997440.0, + 959979648.0, + 942900096.0, + 952712960.0, + 962836864.0, + 959496512.0, + 983437696.0, + 982361984.0, + 941725248.0, + 982578304.0, + 984915520.0, + 972806016.0, + 978331776.0, + 937670272.0, + 967641536.0, + 981484288.0, + 990962048.0, + 959851968.0, + 956485760.0, + 938229376.0, + 974449088.0, + 959002944.0, + 973131392.0, + 961139840.0, + 945260032.0, + 977570624.0, + 987683968.0, + 962928000.0, + 983368832.0, + 930780800.0, + 986718720.0, + 963263104.0, + 971655168.0, + 982111040.0, + 969881216.0, + 964076160.0, + 956213568.0, + 948041472.0, + 964980992.0, + 957953920.0, + 950926336.0, + 953789952.0, + 979125696.0, + 955324928.0, + 952301312.0, + 957732800.0, + 969389568.0, + 977259648.0, + 958580352.0, + 962569984.0, + 945890432.0, + 948026944.0, + 966418304.0, + 984258368.0, + 984983872.0, + 943260544.0, + 952384512.0, + 980540800.0, + 978144896.0, + 969622528.0, + 973972608.0, + 940000064.0, + 962032896.0, + 970968704.0, + 987005312.0, + 962866880.0, + 949542912.0, + 966065024.0, + 962585856.0, + 964585856.0, + 985850368.0, + 940117760.0, + 949747392.0, + 975297600.0, + 972442624.0, + 966982272.0, + 970937472.0, + 939975552.0, + 965705152.0, + 973486592.0, + 973362944.0, + 970977728.0, + 950963904.0, + 979199616.0, + 970035456.0, + 967635264.0, + 963358080.0, + 952247168.0, + 956216064.0, + 969788800.0, + 958001088.0, + 960883584.0, + 957624960.0, + 948788480.0, + 961669184.0, + 978087296.0, + 977028224.0, + 981930816.0, + 938700288.0, + 969013760.0, + 972265600.0, + 971086528.0, + 966399488.0, + 946396800.0, + 956897920.0, + 986979712.0, + 969291456.0, + 989720960.0, + 956655360.0, + 930761152.0, + 963077312.0, + 972295232.0, + 983035520.0, + 956374720.0, + 938088960.0, + 978049664.0, + 973334016.0, + 944131456.0, + 962438848.0, + 946681536.0, + 960536576.0, + 965082880.0, + 958125376.0, + 963724352.0, + 943107264.0, + 966611200.0, + 982909056.0, + 966287872.0, + 963279872.0, + 980414848.0, + 941665152.0, + 976234496.0, + 982362496.0, + 971164032.0, + 969297600.0, + 943890688.0, + 982564992.0, + 977436288.0, + 978886912.0, + 970827392.0, + 945931520.0, + 950228480.0, + 977412352.0, + 985059072.0, + 989978176.0, + 958051072.0, + 946830720.0, + 966662784.0, + 978381952.0, + 971252736.0, + 973885952.0, + 943174080.0, + 962659136.0, + 971300352.0, + 975618176.0, + 971404480.0, + 948232576.0, + 961759488.0, + 973642880.0, + 980135424.0, + 971769344.0, + 957572864.0, + 933775872.0, + 973487424.0, + 969372992.0, + 961126848.0, + 974677632.0, + 944122112.0, + 978242816.0, + 983408128.0, + 978427968.0, + 954968192.0, + 936573312.0, + 987430400.0, + 972124544.0, + 965832960.0, + 975606784.0, + 947903616.0, + 950006656.0, + 975150912.0, + 953439360.0, + 968940608.0, + 961036352.0, + 935909312.0, + 979123456.0, + 963945152.0, + 966544512.0, + 968057920.0, + 935623808.0, + 969181952.0, + 995754240.0, + 978976256.0, + 980901376.0, + 951608320.0, + 971471744.0, + 959721152.0, + 970636416.0, + 984667520.0, + 982811264.0, + 934178112.0, + 975963648.0, + 956830080.0, + 972798720.0, + 984363712.0, + 941791872.0, + 961542656.0, + 973753216.0, + 980186880.0, + 969692416.0, + 961281792.0, + 954728768.0, + 989910400.0, + 964453120.0, + 960015744.0, + 949367808.0, + 954594752.0, + 975065280.0, + 967038848.0, + 969236096.0, + 964217472.0, + 962300096.0, + 971509184.0, + 971435008.0, + 974802816.0, + 965583296.0, + 947338048.0, + 970809984.0, + 971921856.0, + 978742016.0, + 996777728.0, + 949276288.0, + 933999744.0, + 968274304.0, + 977914944.0, + 958532288.0, + 950861056.0, + 952761856.0, + 971412864.0, + 969254656.0, + 969823808.0, + 985973760.0, + 946511232.0, + 969796480.0, + 968647104.0, + 958945216.0, + 975352448.0, + 960958528.0, + 968443648.0, + 972584896.0, + 960072640.0, + 972977664.0, + 951475712.0, + 955927232.0, + 967173440.0, + 986208128.0, + 965668032.0, + 976196928.0, + 940602752.0, + 964360512.0, + 966548096.0, + 972474880.0, + 974100224.0, + 947771840.0, + 965123264.0, + 985146112.0, + 975958592.0, + 966414976.0, + 954538112.0, + 933791744.0, + 985552512.0, + 990465536.0, + 963272320.0, + 971467712.0, + 949330112.0, + 977442304.0, + 967678912.0, + 966750528.0, + 965843520.0, + 943925824.0, + 979668096.0, + 960466368.0, + 970657152.0, + 983659968.0, + 980694080.0, + 944319104.0, + 969219456.0, + 972360000.0, + 973532480.0, + 957519936.0, + 948992768.0, + 953068672.0, + 969274624.0, + 959968000.0, + 971228224.0, + 950749376.0, + 973302208.0, + 959227840.0, + 970578944.0, + 966622400.0, + 956279104.0, + 962315520.0, + 970164032.0, + 963272064.0, + 957413888.0, + 966982464.0, + 950112960.0, + 963435840.0, + 982521920.0, + 981439424.0, + 957886400.0, + 953618880.0, + 972140800.0, + 972574528.0, + 969552192.0, + 963967168.0, + 937931840.0, + 959792320.0, + 982695360.0, + 969096832.0, + 967604480.0, + 962319296.0, + 953353728.0, + 964435776.0, + 971693760.0, + 966006912.0, + 971449792.0, + 965964608.0, + 983068992.0, + 965355328.0, + 973981632.0, + 985763264.0, + 950380544.0, + 962849856.0, + 984696640.0, + 978032448.0, + 970939136.0, + 969445056.0, + 947336320.0, + 959564608.0, + 977603968.0, + 975451264.0, + 985860032.0, + 956168704.0, + 972917696.0, + 973708928.0, + 961488832.0, + 985186048.0, + 949030336.0, + 975965760.0, + 971664960.0, + 966653440.0, + 976054528.0, + 945996928.0, + 965548416.0, + 973599680.0, + 980302656.0, + 967617664.0, + 956744832.0, + 956168704.0, + 974829056.0, + 978900416.0, + 963803456.0, + 965899456.0, + 935298240.0, + 975768832.0, + 983533120.0, + 981822784.0, + 977400960.0, + 957507904.0, + 961753600.0, + 971365312.0, + 979127104.0, + 984951168.0, + 982093312.0, + 941529472.0, + 983868928.0, + 966979840.0, + 982691456.0, + 961335424.0, + 952575552.0, + 980760384.0, + 976750016.0, + 965706752.0, + 969000832.0, + 959332160.0, + 979323392.0, + 963239808.0, + 981069568.0, + 967778048.0, + 955402048.0, + 952766464.0, + 956145024.0, + 967793408.0, + 962232448.0, + 958466176.0, + 946095744.0, + 982546496.0, + 964325952.0, + 980637248.0, + 974888256.0, + 951892608.0, + 970130944.0, + 969289472.0, + 980805888.0, + 982004480.0, + 940931840.0, + 970395136.0, + 978573056.0, + 975142976.0, + 968097984.0, + 958159040.0, + 937506624.0, + 976905280.0, + 973024256.0, + 960868608.0, + 965629312.0, + 928453504.0, + 964290176.0, + 980607360.0, + 977911680.0, + 969675648.0, + 944643072.0, + 974050688.0, + 984023808.0, + 970787136.0, + 964618560.0, + 959463872.0, + 954479488.0, + 972360256.0, + 956101120.0, + 976733952.0, + 985840576.0, + 958384128.0, + 969573056.0, + 963288576.0, + 976199104.0, + 977610560.0, + 953632128.0, + 975708160.0, + 976330944.0, + 979344704.0, + 973920896.0, + 953017600.0, + 952767040.0, + 981303360.0, + 984029120.0, + 964543168.0, + 965946624.0, + 951044608.0, + 975743616.0, + 976876416.0, + 968810112.0, + 976216000.0, + 946182144.0, + 972659456.0, + 981967040.0, + 971432320.0, + 968908800.0, + 948963648.0, + 936902784.0, + 973200320.0, + 980805248.0, + 979578176.0, + 971279552.0, + 955651840.0, + 980159488.0, + 957699264.0, + 982226176.0, + 971690368.0, + 955794304.0, + 982354240.0, + 967976896.0, + 967325696.0, + 973205504.0, + 955916928.0, + 964352000.0, + 982668672.0, + 983293952.0, + 964787264.0, + 955178944.0, + 942254784.0, + 973436608.0, + 970794112.0, + 961046720.0, + 962908160.0, + 949851456.0, + 983325376.0, + 984209856.0, + 974678528.0, + 984976128.0, + 946474496.0, + 972187328.0, + 970179840.0, + 972786432.0, + 986351808.0, + 966793920.0, + 955481920.0, + 973164544.0, + 970475200.0, + 974539520.0, + 961372672.0, + 944087808.0, + 980474368.0, + 974160064.0, + 977514496.0, + 971245376.0, + 938116672.0, + 939856000.0, + 989607104.0, + 971937984.0, + 962472256.0, + 969840768.0, + 964964544.0, + 979000512.0, + 960978048.0, + 983261120.0, + 989539008.0, + 944341952.0, + 993746880.0, + 964276480.0, + 963232512.0, + 976610624.0, + 944407488.0, + 977418368.0, + 978834624.0, + 971871104.0, + 975734464.0, + 962815872.0, + 962920512.0, + 977155456.0, + 952620800.0, + 968188736.0, + 964801856.0, + 958062656.0, + 974032384.0, + 978925888.0, + 971758976.0, + 972924800.0, + 934113408.0, + 969001344.0, + 983635776.0, + 977360000.0, + 981351744.0, + 930858368.0, + 938177408.0, + 973956800.0, + 965073088.0, + 967858304.0, + 949253376.0, + 953109632.0, + 971789376.0, + 963601728.0, + 963075008.0, + 976382208.0, + 950176512.0, + 971641536.0, + 967857792.0, + 986224768.0, + 980344640.0, + 941307904.0, + 955159872.0, + 975757440.0, + 979380672.0, + 979350720.0, + 961437568.0, + 946262592.0, + 968123456.0, + 963922944.0, + 966870272.0, + 974525824.0, + 952431168.0, + 987822272.0, + 970064896.0, + 964392832.0, + 968238784.0, + 938703168.0, + 996356672.0, + 969584320.0, + 978894144.0, + 979707904.0, + 949733824.0, + 963307456.0, + 964943424.0, + 976390528.0, + 967674688.0, + 983212992.0, + 931121728.0, + 966041216.0, + 979260992.0, + 977151808.0, + 970127168.0, + 928813632.0, + 976481216.0, + 985536896.0, + 969624064.0, + 986035072.0, + 935797824.0, + 957608896.0, + 966046400.0, + 968013504.0, + 963445248.0, + 957385472.0, + 943979200.0, + 966506624.0, + 975255552.0, + 978663168.0, + 964205312.0, + 948695552.0, + 963496896.0, + 964567808.0, + 972784960.0, + 961207232.0, + 961298752.0, + 974965504.0, + 976105728.0, + 952883968.0, + 962219136.0, + 943610496.0, + 948535232.0, + 971740352.0, + 968575616.0, + 961145408.0, + 951484032.0, + 946801792.0, + 980573632.0, + 973289856.0, + 954094720.0, + 980628608.0, + 958189568.0, + 966422080.0, + 977641984.0, + 973641152.0, + 968993472.0, + 960825344.0, + 943203776.0, + 960585408.0, + 969358272.0, + 973605696.0, + 971886848.0, + 944143104.0, + 975812544.0, + 965290496.0, + 971470080.0, + 969047168.0, + 940294400.0, + 963904832.0, + 947056960.0, + 974076544.0, + 962073216.0, + 957711360.0, + 963994624.0, + 965937536.0, + 978425344.0, + 981726848.0, + 948685504.0, + 937389824.0, + 962448832.0, + 960662528.0, + 966016960.0, + 970505728.0, + 961904768.0, + 978014784.0, + 968929536.0, + 969781696.0, + 963823872.0, + 932158976.0, + 956682368.0, + 985824960.0, + 965333824.0, + 960746048.0, + 950900160.0, + 945037440.0, + 978180096.0, + 984947904.0, + 958612096.0, + 968185408.0, + 956194880.0, + 976281216.0, + 964788992.0, + 968903936.0, + 986458624.0, + 937148928.0, + 970235712.0, + 974094272.0, + 979672512.0, + 969672256.0, + 941497536.0, + 951448832.0, + 951018560.0, + 968859584.0, + 955667456.0, + 962440384.0, + 952574912.0, + 962459456.0, + 972357632.0, + 973204672.0, + 952295168.0, + 941006208.0, + 966426880.0, + 998354240.0, + 976476416.0, + 962262592.0, + 941357248.0, + 958793280.0, + 961055552.0, + 972029440.0, + 977576704.0, + 974241152.0, + 955667904.0, + 967431104.0, + 980837184.0, + 958991040.0, + 968756352.0, + 936932416.0, + 967534720.0, + 980463488.0, + 974646016.0, + 954913280.0, + 948394048.0, + 959638976.0, + 990254336.0, + 967258560.0, + 974963584.0, + 970684224.0, + 955156928.0, + 976667840.0, + 960294784.0, + 961231936.0, + 959308800.0, + 937475264.0, + 962245248.0, + 967650176.0, + 975082560.0, + 979618752.0, + 953874944.0, + 950754368.0, + 963804416.0, + 960271936.0, + 979702016.0, + 971587648.0, + 954566080.0, + 953463936.0, + 972294016.0, + 967461952.0, + 967282240.0, + 950986496.0, + 969834816.0, + 974811072.0, + 961141952.0, + 960868480.0, + 944243968.0, + 973321344.0, + 980513472.0, + 965077824.0, + 973763456.0, + 924311168.0, + 973399680.0, + 980765056.0, + 974949632.0, + 951117312.0, + 944539456.0, + 925608448.0, + 989776576.0, + 983093056.0, + 976174528.0, + 969236352.0, + 952627648.0, + 977000832.0, + 982029312.0, + 976495616.0, + 974812224.0, + 949060416.0, + 964321344.0, + 969488320.0, + 982912896.0, + 971767744.0, + 947757376.0, + 962411136.0, + 963763712.0, + 975741376.0, + 977233664.0, + 965918784.0, + 936192896.0, + 977779072.0, + 960361728.0, + 966538688.0, + 973043584.0, + 954648000.0, + 959451776.0, + 976656576.0, + 974861056.0, + 966620032.0, + 942063168.0, + 969118272.0, + 982134784.0, + 971667840.0, + 967658560.0, + 976212480.0, + 943523648.0, + 972270272.0, + 980114624.0, + 960195840.0, + 978223936.0, + 954960128.0, + 968459648.0, + 982481472.0, + 957186432.0, + 966880256.0, + 937487552.0, + 952872960.0, + 979948096.0, + 978890624.0, + 982442304.0, + 951320256.0, + 934107776.0, + 975766592.0, + 972871616.0, + 984904960.0, + 965993728.0, + 954231424.0, + 980875968.0, + 966290368.0, + 966201280.0, + 969668224.0, + 951651712.0, + 964609792.0, + 974064640.0, + 971761280.0, + 969500032.0, + 966415680.0, + 966637632.0, + 977847104.0, + 960212096.0, + 971532480.0, + 965213184.0, + 963248896.0, + 990388288.0, + 958538880.0, + 976756864.0, + 983425024.0, + 931321344.0, + 946745408.0, + 972389376.0, + 970839680.0, + 980935616.0, + 959234944.0, + 963986496.0, + 972310144.0, + 976823744.0, + 975771712.0, + 963359296.0, + 939804224.0, + 983545472.0, + 990107008.0, + 969120832.0, + 973733120.0, + 945268800.0, + 972478592.0, + 971448576.0, + 958999168.0, + 985219392.0, + 980530880.0, + 960931008.0, + 953292608.0, + 965451648.0, + 978077120.0, + 969804544.0, + 956380352.0, + 977689280.0, + 976501440.0, + 967911232.0, + 971495936.0, + 944195136.0, + 974261376.0, + 973308672.0, + 975996864.0, + 950649984.0, + 951448192.0, + 972720128.0, + 969294272.0, + 961792384.0, + 973032576.0, + 973866496.0, + 958256256.0, + 977567168.0, + 964839680.0, + 967831232.0, + 978984896.0, + 928985984.0, + 973935488.0, + 981719744.0, + 963765568.0, + 979261120.0, + 955877952.0, + 967651520.0, + 963543552.0, + 981258176.0, + 976177216.0, + 958088000.0, + 945731328.0, + 974651520.0, + 996439424.0, + 967843456.0, + 975134272.0, + 933767232.0, + 971477952.0, + 976842560.0, + 987009536.0, + 978941376.0, + 951325632.0, + 975767296.0, + 968266304.0, + 944866624.0, + 979275904.0, + 966534080.0, + 965749504.0, + 977553216.0, + 975725184.0, + 980912256.0, + 963014208.0, + 956772672.0, + 965539456.0, + 965396736.0, + 977848640.0, + 977259328.0, + 974586368.0, + 974931648.0, + 972626752.0, + 971565696.0, + 983223424.0, + 968934592.0, + 962259904.0, + 980496960.0, + 972112256.0, + 973174080.0, + 965890816.0, + 941965760.0, + 980546688.0, + 977131008.0, + 972129920.0, + 971405248.0, + 936352000.0, + 968445888.0, + 975153344.0, + 979059008.0, + 976662976.0, + 928849856.0, + 978131328.0, + 979579904.0, + 964862272.0, + 969209408.0, + 965940416.0, + 950791616.0, + 972296896.0, + 970938816.0, + 987498560.0, + 967758592.0, + 944513792.0, + 973016064.0, + 970758656.0, + 978738624.0, + 972522752.0, + 947268032.0, + 974494336.0, + 979807680.0, + 972941952.0, + 972914688.0, + 947223040.0, + 949709632.0, + 976846592.0, + 971902272.0, + 979733056.0, + 973786752.0, + 944968192.0, + 980787648.0, + 981227456.0, + 969726080.0, + 965378240.0, + 956140992.0, + 983781056.0, + 983824000.0, + 980612032.0, + 969728704.0, + 953852800.0, + 941328320.0, + 963630016.0, + 988763456.0, + 987013184.0, + 968937088.0, + 955058368.0, + 962529024.0, + 966191232.0, + 966160128.0, + 983290624.0, + 936971200.0, + 969623360.0, + 977266048.0, + 976023872.0, + 980393920.0, + 957279232.0, + 963027968.0, + 956338176.0, + 968107584.0, + 963630016.0, + 946412992.0, + 949717888.0, + 972425792.0, + 953770624.0, + 956161728.0, + 957709952.0, + 951672064.0, + 982406272.0, + 971004096.0, + 963427136.0, + 969586176.0, + 965564544.0, + 963809280.0, + 960527616.0, + 976778688.0, + 979100224.0, + 970700672.0, + 973844736.0, + 980557184.0, + 973676864.0, + 961148928.0, + 955967552.0, + 934774656.0, + 960542400.0, + 966358144.0, + 967413504.0, + 975995840.0, + 947116800.0, + 959785088.0, + 971377152.0, + 966559168.0, + 977737920.0, + 942668736.0, + 953736576.0, + 971814400.0, + 957328192.0, + 979194368.0, + 954583360.0, + 940405952.0, + 988628608.0, + 972020096.0, + 973802688.0, + 969470848.0, + 948660992.0, + 966444352.0, + 966197696.0, + 976904704.0, + 975301888.0, + 945847872.0, + 958453248.0, + 968476032.0, + 953920512.0, + 967651392.0, + 953145280.0, + 963428480.0, + 971401216.0, + 976572160.0, + 978156544.0, + 974490880.0, + 946837632.0, + 977234944.0, + 975239232.0, + 954075072.0, + 970649472.0, + 952555840.0, + 970667520.0, + 971792512.0, + 967248640.0, + 949294336.0, + 934664832.0, + 959160576.0, + 978588288.0, + 982095872.0, + 967414592.0, + 962372608.0, + 938147008.0, + 954839040.0, + 967599104.0, + 987279104.0, + 973881408.0, + 944140736.0, + 974096064.0, + 970029824.0, + 988972928.0, + 982314752.0, + 945278016.0, + 958064320.0, + 971393856.0, + 974845568.0, + 969471424.0, + 949740864.0, + 951452288.0, + 966450880.0, + 968281408.0, + 964171008.0, + 956763072.0, + 945851264.0, + 967526272.0, + 980497408.0, + 953512768.0, + 960849664.0, + 967291264.0, + 977291584.0, + 967267520.0, + 979975552.0, + 957254144.0, + 962218048.0, + 950189888.0, + 976278400.0, + 971407488.0, + 980312704.0, + 972296576.0, + 945828928.0, + 952708992.0, + 977351872.0, + 976028864.0, + 973840448.0, + 939853376.0, + 975404544.0, + 977270144.0, + 983293440.0, + 955462208.0, + 956524288.0, + 943288000.0, + 960540736.0, + 977475264.0, + 984475968.0, + 966799168.0, + 952593280.0, + 976813440.0, + 965177728.0, + 966935488.0, + 971482048.0, + 944571904.0, + 974077632.0, + 970348416.0, + 969883968.0, + 971506368.0, + 949940096.0, + 948415936.0, + 967998144.0, + 970786048.0, + 972610304.0, + 953778816.0, + 949085120.0, + 970402240.0, + 973548480.0, + 971664192.0, + 950142400.0, + 957999680.0, + 987353024.0, + 980863680.0, + 956866048.0, + 959761984.0, + 962540928.0, + 968469760.0, + 982511232.0, + 956334912.0, + 976498368.0, + 938281856.0, + 938656896.0, + 968072128.0, + 975133888.0, + 959514048.0, + 974384832.0, + 945356096.0, + 964806016.0, + 963140800.0, + 971082752.0, + 985360768.0, + 941469248.0, + 963634880.0, + 965207552.0, + 983131328.0, + 966267136.0, + 949436992.0, + 933252992.0, + 979782208.0, + 958031232.0, + 964578560.0, + 972007936.0, + 955061440.0, + 981651712.0, + 958466368.0, + 973604544.0, + 967792768.0, + 942698176.0, + 980495424.0, + 967711296.0, + 956541376.0, + 960934976.0, + 932012480.0, + 939512000.0, + 969221824.0, + 970176896.0, + 955228736.0, + 967148224.0, + 951535232.0, + 987683072.0, + 973311488.0, + 972248704.0, + 968304320.0, + 940715328.0, + 955683840.0, + 972289984.0, + 972432192.0, + 977282432.0, + 946449536.0, + 950327744.0, + 961743552.0, + 973305600.0, + 964289792.0, + 964008192.0, + 961436672.0, + 969741056.0, + 972801088.0, + 959189952.0, + 956217856.0, + 951800576.0, + 979267200.0, + 955622144.0, + 971251648.0, + 980316736.0, + 966459712.0, + 958822336.0, + 968083840.0, + 955938368.0, + 956038336.0, + 954539968.0, + 968531456.0, + 967929024.0, + 966696704.0, + 972142400.0, + 963902656.0, + 928926464.0, + 977321024.0, + 976504960.0, + 974799360.0, + 967733888.0, + 950444032.0, + 963469440.0, + 983125440.0, + 962636224.0, + 969218176.0, + 954742016.0, + 959397952.0, + 977733248.0, + 987229824.0, + 974280192.0, + 952094528.0, + 944122304.0, + 973594176.0, + 970815232.0, + 953764736.0, + 979919040.0, + 950571520.0, + 976964992.0, + 962998336.0, + 961976768.0, + 983838208.0, + 939549120.0, + 979587200.0, + 965891456.0, + 971683584.0, + 978816960.0, + 952414016.0, + 945802560.0, + 967777728.0, + 965661952.0, + 975286912.0, + 967464128.0, + 949828992.0, + 979188096.0, + 960283392.0, + 971307904.0, + 959975040.0, + 943335104.0, + 986146048.0, + 978715968.0, + 982196032.0, + 941391104.0, + 958416704.0, + 955412480.0, + 979742592.0, + 964329536.0, + 952458688.0, + 962585920.0, + 935138752.0, + 968731776.0, + 974533888.0, + 971529472.0, + 975038464.0, + 939388992.0, + 973917632.0, + 987897024.0, + 968189888.0, + 981193024.0, + 932611456.0, + 969980352.0, + 964373248.0, + 985266048.0, + 957972608.0, + 963796288.0, + 941077376.0, + 972322432.0, + 965118656.0, + 982258624.0, + 969098816.0, + 955848128.0, + 992000832.0, + 966236096.0, + 980576256.0, + 972248384.0, + 948820608.0, + 968422912.0, + 983495296.0, + 968379520.0, + 971286528.0, + 981129728.0, + 964410432.0, + 975215232.0, + 974163712.0, + 971359040.0, + 968993984.0, + 954499904.0, + 975915456.0, + 975861056.0, + 985295616.0, + 974192320.0, + 969102784.0, + 961317824.0, + 973245696.0, + 980958336.0, + 964872768.0, + 961061888.0, + 951701440.0, + 984447808.0, + 960826624.0, + 971121856.0, + 955659072.0, + 966056384.0, + 965210496.0, + 972345408.0, + 968244032.0, + 978429632.0, + 950635584.0, + 970614656.0, + 973470272.0, + 967378048.0, + 981500928.0, + 930009728.0, + 961955712.0, + 967930176.0, + 971063360.0, + 975972608.0, + 960872064.0, + 950836544.0, + 977347328.0, + 977384128.0, + 982418304.0, + 977347712.0, + 942442752.0, + 970529984.0, + 963182080.0, + 978538368.0, + 976776768.0, + 953436544.0, + 951689728.0, + 978092608.0, + 975700416.0, + 946662208.0, + 962189952.0, + 950867392.0, + 978599616.0, + 968208704.0, + 972271808.0, + 973348800.0, + 940888960.0, + 974958976.0, + 979534592.0, + 989962496.0, + 970006336.0, + 955223872.0, + 963987328.0, + 969159104.0, + 992095360.0, + 976756288.0, + 940654656.0, + 944364672.0, + 957784896.0, + 980825536.0, + 975541120.0, + 972887168.0, + 942410432.0, + 975195200.0, + 978565056.0, + 975548672.0, + 988348736.0, + 947441664.0, + 962531264.0, + 967766528.0, + 957954048.0, + 972555840.0, + 934506112.0, + 962717952.0, + 984748224.0, + 975013184.0, + 976998208.0, + 963122688.0, + 951635712.0, + 962124672.0, + 964161088.0, + 980128704.0, + 967977472.0, + 956174720.0, + 959794368.0, + 972108608.0, + 970626880.0, + 969361088.0, + 946458816.0, + 934309888.0, + 981432768.0, + 964879104.0, + 979482496.0, + 950446464.0, + 962714560.0, + 971536512.0, + 966210368.0, + 984085760.0, + 990649600.0, + 957426496.0, + 967576320.0, + 954460672.0, + 971948992.0, + 977640640.0, + 931561536.0, + 974222016.0, + 958423488.0, + 971424896.0, + 974600896.0, + 951440768.0, + 959566144.0, + 965252544.0, + 971064704.0, + 975333056.0, + 972011520.0, + 946616384.0, + 964608896.0, + 975104128.0, + 980903360.0, + 972813568.0, + 946703360.0, + 985879552.0, + 959701696.0, + 978619712.0, + 973641664.0, + 956983936.0, + 967820224.0, + 970038336.0, + 967709952.0, + 965205760.0, + 975709504.0, + 951745536.0, + 972494784.0, + 966351552.0, + 960954432.0, + 969165440.0, + 945948224.0, + 968908864.0, + 970833856.0, + 963325568.0, + 972647552.0, + 947188864.0, + 964141120.0, + 966924736.0, + 974957440.0, + 988913600.0, + 952238016.0, + 950326784.0, + 949767040.0, + 965159104.0, + 968921216.0, + 967732480.0, + 925482752.0, + 972807488.0, + 972638080.0, + 957369664.0, + 960858688.0, + 942446336.0, + 950831616.0, + 965830144.0, + 960531648.0, + 964774784.0, + 952980288.0, + 966027456.0, + 972790400.0, + 976626304.0, + 965603840.0, + 973089920.0, + 962951424.0, + 984466560.0, + 976216576.0, + 960892864.0, + 953216576.0, + 960806272.0, + 976360704.0, + 975529728.0, + 965753536.0, + 966348096.0, + 952085760.0, + 961088768.0, + 965697792.0, + 973895168.0, + 957637248.0, + 977637696.0, + 940232064.0, + 977431936.0, + 969338432.0, + 978101120.0, + 962238848.0, + 945607296.0, + 970621376.0, + 971733888.0, + 988034880.0, + 975479360.0, + 947674176.0, + 960562112.0, + 973360000.0, + 960894528.0, + 958956928.0, + 966526144.0, + 938854848.0, + 979477120.0, + 965198720.0, + 968328576.0, + 971859008.0, + 951716480.0, + 965420736.0, + 973760704.0, + 975044480.0, + 976613568.0, + 943884992.0, + 978484224.0, + 979261824.0, + 971783424.0, + 971739072.0, + 956646528.0, + 963846336.0, + 983289344.0, + 960728704.0, + 961292672.0, + 962509696.0, + 940788736.0, + 970893056.0, + 968734912.0, + 962900992.0, + 969508352.0, + 952155712.0, + 970346432.0, + 962669120.0, + 967300288.0, + 976827264.0, + 964134784.0, + 963821312.0, + 977887680.0, + 958922816.0, + 983797504.0, + 974620288.0, + 937600960.0, + 963017408.0, + 971395200.0, + 983263872.0, + 979736128.0, + 937672000.0, + 961483456.0, + 950204544.0, + 970087040.0, + 982427968.0, + 952478720.0, + 967691200.0, + 977851776.0, + 962691968.0, + 965434752.0, + 956612928.0, + 945445184.0, + 975929152.0, + 969228544.0, + 954448128.0, + 957755456.0, + 936189888.0, + 979276544.0, + 965163648.0, + 971635520.0, + 957348096.0, + 945257728.0, + 955305408.0, + 966231616.0, + 966333696.0, + 971360832.0, + 953111744.0, + 949290624.0, + 981340800.0, + 963663616.0, + 967803456.0, + 962046656.0, + 944950208.0, + 968349696.0, + 967084928.0, + 969202624.0, + 977582784.0, + 946554432.0, + 963036608.0, + 980124992.0, + 963762368.0, + 967440064.0, + 953014016.0, + 952111744.0, + 964207552.0, + 968005824.0, + 963228224.0, + 984584128.0, + 944364160.0, + 969063552.0, + 975689664.0, + 958785408.0, + 974479168.0, + 950242240.0, + 971004416.0, + 970004224.0, + 963171136.0, + 963596160.0, + 954199296.0, + 960654592.0, + 982819584.0, + 970337088.0, + 966501056.0, + 961341696.0, + 953177664.0, + 972313728.0, + 987355072.0, + 974503680.0, + 956472384.0, + 945806016.0, + 966235136.0, + 988140288.0, + 978116608.0, + 960206208.0, + 941950784.0, + 943693696.0, + 970237824.0, + 968935040.0, + 977637120.0, + 954881408.0, + 956555840.0, + 983993536.0, + 968422400.0, + 981401408.0, + 974248256.0, + 946328704.0, + 966728768.0, + 975775616.0, + 966496256.0, + 971699840.0, + 959260800.0, + 951018304.0, + 957813632.0, + 964649472.0, + 981483776.0, + 953678976.0, + 948986176.0, + 969763264.0, + 978162752.0, + 974768192.0, + 960720896.0, + 934270528.0, + 961092672.0, + 975365376.0, + 972710208.0, + 964899072.0, + 956035200.0, + 973742336.0, + 978201344.0, + 979485888.0, + 959934976.0, + 959615616.0, + 954542592.0, + 975416256.0, + 975719936.0, + 958922432.0, + 950817024.0, + 954942912.0, + 979512064.0, + 964267584.0, + 973486016.0, + 967681792.0, + 935557696.0, + 961839872.0, + 974424960.0, + 988294464.0, + 985091328.0, + 941165504.0, + 963614208.0, + 971402368.0, + 959588096.0, + 973921856.0, + 958716800.0, + 943572800.0, + 960335872.0, + 975819648.0, + 952713152.0, + 983175360.0, + 948491392.0, + 962829632.0, + 957288128.0, + 959541888.0, + 983565056.0, + 962983296.0, + 960064960.0, + 964155456.0, + 950264576.0, + 959635456.0, + 957470656.0, + 963542016.0, + 969230272.0, + 966453312.0, + 987144640.0, + 966569920.0, + 941984064.0, + 974474752.0, + 978442624.0, + 976999616.0, + 961451648.0, + 959529344.0, + 967994752.0, + 982728832.0, + 974488832.0, + 959375936.0, + 942917504.0, + 959750272.0, + 966918976.0, + 966538624.0, + 972441472.0, + 961642816.0, + 944569152.0, + 971878272.0, + 963299840.0, + 967215552.0, + 987664640.0, + 947288896.0, + 984886080.0, + 971314304.0, + 970495680.0, + 981465088.0, + 948857600.0, + 968643968.0, + 951244352.0, + 972461184.0, + 956593216.0, + 957309312.0, + 940704512.0, + 976784256.0, + 961705728.0, + 974186112.0, + 970002880.0, + 958595904.0, + 967958720.0, + 972104896.0, + 991389248.0, + 974030464.0, + 934730496.0, + 962359552.0, + 968602944.0, + 972818048.0, + 976059392.0, + 959127936.0, + 949671424.0, + 980125120.0, + 958315584.0, + 961110272.0, + 962059840.0, + 936578176.0, + 973996992.0, + 958719936.0, + 978700672.0, + 979829760.0, + 929410240.0, + 953891392.0, + 969671360.0, + 979375808.0, + 956561088.0, + 942290176.0, + 944030528.0, + 960044864.0, + 968718016.0, + 970754880.0, + 959313856.0, + 946086912.0, + 970983680.0, + 969499392.0, + 952019328.0, + 974469888.0, + 952712448.0, + 980567808.0, + 968682176.0, + 972784192.0, + 958615040.0, + 954550272.0, + 962916608.0, + 967968960.0, + 967909824.0, + 955607360.0, + 960908096.0, + 965459968.0, + 966661632.0, + 966662528.0, + 997560448.0, + 975216256.0, + 958295936.0, + 978651136.0, + 966134208.0, + 987465536.0, + 982706432.0, + 952116224.0, + 957602688.0, + 973381376.0, + 995193792.0, + 974494976.0, + 956035840.0, + 935559168.0, + 979505408.0, + 973369600.0, + 995180928.0, + 974482048.0, + 956048512.0, + 935546880.0, + 979492224.0, + 972321152.0, + 967976704.0, + 977072960.0, + 934262272.0, + 992121216.0, + 979542144.0, + 986180608.0, + 969832320.0, + 965121792.0, + 971854272.0, + 963149312.0, + 968050112.0, + 975986368.0, + 966238784.0, + 976454784.0, + 974676672.0, + 969408768.0, + 964701056.0, + 967743616.0, + 954235712.0, + 978781120.0, + 977436224.0, + 967240192.0, + 963770752.0, + 951522240.0, + 974659008.0, + 972527424.0, + 963813952.0, + 967693888.0, + 942688192.0, + 981055488.0, + 973114368.0, + 969197696.0, + 972257856.0, + 950853760.0, + 944255296.0, + 980598336.0, + 963370176.0, + 981818624.0, + 979003648.0, + 950562944.0, + 961397504.0, + 984405760.0, + 971902144.0, + 978915904.0, + 944721152.0, + 967431168.0, + 963746112.0, + 974065536.0, + 970237504.0, + 957807680.0, + 940188672.0, + 977651200.0, + 967448128.0, + 974191424.0, + 978437248.0, + 958137024.0, + 970507136.0, + 982706688.0, + 968413312.0, + 977110784.0, + 947945920.0, + 988735936.0, + 966843776.0, + 969401920.0, + 965002496.0, + 953648576.0, + 963052672.0, + 959078208.0, + 969904576.0, + 980597120.0, + 971864256.0, + 944882944.0, + 966466688.0, + 972540352.0, + 962410816.0, + 959906560.0, + 958274560.0, + 992011520.0, + 976084672.0, + 970262272.0, + 979911168.0, + 954764800.0, + 946356928.0, + 978702592.0, + 973806400.0, + 982366720.0, + 963095104.0, + 934132928.0, + 965146880.0, + 974949312.0, + 986778688.0, + 973217536.0, + 942887936.0, + 961124416.0, + 971254400.0, + 964488000.0, + 962902912.0, + 952610176.0, + 960660928.0, + 976027968.0, + 972744448.0, + 986592704.0, + 954754048.0, + 953670592.0, + 970586496.0, + 970882688.0, + 962601280.0, + 961794176.0, + 946996416.0, + 970363840.0, + 965310976.0, + 981188416.0, + 963151808.0, + 933158272.0, + 965520448.0, + 981912576.0, + 957014080.0, + 974480704.0, + 934489280.0, + 955800512.0, + 968537024.0, + 973002432.0, + 959339904.0, + 954163968.0, + 950501952.0, + 964747776.0, + 955618048.0, + 976023936.0, + 977687424.0, + 934446400.0, + 953234432.0, + 977944704.0, + 964133248.0, + 969924800.0, + 951144640.0, + 965340992.0, + 972165504.0, + 956645888.0, + 969969472.0, + 977290560.0, + 947898752.0, + 973202816.0, + 959819712.0, + 978168000.0, + 977121728.0, + 952616832.0, + 978487488.0, + 981730624.0, + 984701952.0, + 967378880.0, + 935953280.0, + 964983872.0, + 973220928.0, + 967259520.0, + 962472576.0, + 972423360.0, + 947037568.0, + 974026624.0, + 983978048.0, + 958513472.0, + 955427008.0, + 950644288.0, + 980127488.0, + 968634944.0, + 963911104.0, + 974233536.0, + 940209280.0, + 966117440.0, + 973585024.0, + 981495424.0, + 976896640.0, + 957589248.0, + 948326848.0, + 963149376.0, + 982156864.0, + 989143744.0, + 979645376.0, + 928395904.0, + 971871296.0, + 979172864.0, + 969396544.0, + 976201472.0, + 939298304.0, + 962638848.0, + 949949568.0, + 964836864.0, + 984534144.0, + 949341696.0, + 946375040.0, + 965998336.0, + 973132416.0, + 974720064.0, + 965766400.0, + 947390528.0, + 975673024.0, + 965857088.0, + 963191488.0, + 970292096.0, + 948316352.0, + 968948224.0, + 951689792.0, + 962271040.0, + 966257728.0, + 946903936.0, + 977928768.0, + 986181952.0, + 957792704.0, + 965299904.0, + 947424576.0, + 951874240.0, + 990291136.0, + 979603456.0, + 968499648.0, + 960028416.0, + 945666880.0, + 964715136.0, + 968058752.0, + 972375168.0, + 969973504.0, + 947430464.0, + 974598144.0, + 972250624.0, + 953018752.0, + 972244608.0, + 976545920.0, + 941104768.0, + 972265728.0, + 968262208.0, + 971828288.0, + 981783744.0, + 946866944.0, + 957577280.0, + 965776384.0, + 965607232.0, + 972388160.0, + 942611776.0, + 971584256.0, + 965639360.0, + 968205440.0, + 977930752.0, + 946756096.0, + 967349440.0, + 971102976.0, + 982247104.0, + 966552256.0, + 971236864.0, + 940521152.0, + 966707328.0, + 967366336.0, + 979107328.0, + 943544448.0, + 935810240.0, + 968936448.0, + 963945920.0, + 965944384.0, + 964949312.0, + 940316992.0, + 969596224.0, + 982049984.0, + 972036160.0, + 967644608.0, + 946474944.0, + 938193728.0, + 971120384.0, + 974599296.0, + 982041024.0, + 977332608.0, + 939135424.0, + 991187200.0, + 970708800.0, + 955801536.0, + 973083136.0, + 950052736.0, + 980071168.0, + 976010624.0, + 968413696.0, + 976950336.0, + 947037312.0, + 955699008.0, + 976213056.0, + 960257536.0, + 977301248.0, + 985250624.0, + 965203584.0, + 979916032.0, + 979227712.0, + 970150016.0, + 959938688.0, + 956621248.0, + 976153344.0, + 960736512.0, + 973707776.0, + 978420800.0, + 944955648.0, + 960080064.0, + 964519104.0, + 969141440.0, + 957836544.0, + 961142080.0, + 939710912.0, + 975219392.0, + 967561280.0, + 994904640.0, + 961430080.0, + 942571200.0, + 967128832.0, + 973088000.0, + 979930176.0, + 968572416.0, + 946731264.0, + 958634176.0, + 984853568.0, + 960618752.0, + 972831040.0, + 970021824.0, + 948553088.0, + 961491776.0, + 963327232.0, + 959266240.0, + 971938496.0, + 957255488.0, + 968034176.0, + 961661120.0, + 969765376.0, + 966452096.0, + 947101504.0, + 959729536.0, + 969458304.0, + 965900672.0, + 977718144.0, + 963340864.0, + 966987072.0, + 972251008.0, + 974875328.0, + 965427648.0, + 957522048.0, + 942958592.0, + 961911360.0, + 969458368.0, + 977289536.0, + 959535552.0, + 938390848.0, + 958323072.0, + 971501440.0, + 967787136.0, + 970875136.0, + 944067264.0, + 943765568.0, + 980054080.0, + 976730368.0, + 971471872.0, + 953346048.0, + 943427712.0, + 971981120.0, + 963550016.0, + 971155072.0, + 969415488.0, + 939969408.0, + 969691712.0, + 962313216.0, + 973469312.0, + 992090816.0, + 953564992.0, + 948975232.0, + 970424896.0, + 962479360.0, + 960027264.0, + 961837568.0, + 952972416.0, + 975235136.0, + 964317248.0, + 972064640.0, + 975809728.0, + 943748032.0, + 969219904.0, + 965645632.0, + 969604864.0, + 986414080.0, + 957371328.0, + 965120896.0, + 981114048.0, + 966760640.0, + 965194688.0, + 948058880.0, + 932876032.0, + 981514496.0, + 969076928.0, + 980687424.0, + 959755520.0, + 939557376.0, + 955594752.0, + 980484992.0, + 978223040.0, + 969002304.0, + 946351936.0, + 957885632.0, + 979544512.0, + 963545600.0, + 974468032.0, + 961651136.0, + 944623808.0, + 981752960.0, + 989928896.0, + 979737536.0, + 962284864.0, + 945915392.0, + 970411712.0, + 957714944.0, + 966153408.0, + 985703744.0, + 944171200.0, + 965398848.0, + 968913792.0, + 962138112.0, + 953674752.0, + 954368640.0, + 986489088.0, + 964599232.0, + 961119296.0, + 965256832.0, + 945031616.0, + 936002880.0, + 975415296.0, + 974744512.0, + 970484352.0, + 984617088.0, + 954195008.0, + 970331456.0, + 972916992.0, + 956965952.0, + 966292928.0, + 943359680.0, + 959033856.0, + 982058240.0, + 971036480.0, + 978443584.0, + 965332352.0, + 935597504.0, + 971644672.0, + 964545344.0, + 976257856.0, + 976116032.0, + 954636096.0, + 976165376.0, + 977419264.0, + 961709056.0, + 991612800.0, + 956523904.0, + 956840896.0, + 975737472.0, + 985580608.0, + 984906112.0, + 950670720.0, + 929029888.0, + 967870912.0, + 977184128.0, + 961444032.0, + 974476544.0, + 950107200.0, + 987578688.0, + 980018304.0, + 970295040.0, + 966061120.0, + 949025600.0, + 976736448.0, + 963015680.0, + 975354816.0, + 971719040.0, + 939841344.0, + 964463872.0, + 975060864.0, + 968426112.0, + 963818816.0, + 964171328.0, + 954704512.0, + 972341952.0, + 977223040.0, + 964833344.0, + 983089600.0, + 935789568.0, + 963881024.0, + 966608320.0, + 983804992.0, + 970478848.0, + 951524416.0, + 944129984.0, + 988247616.0, + 965969920.0, + 952212288.0, + 957567808.0, + 938833984.0, + 967033344.0, + 969380224.0, + 965773440.0, + 973727296.0, + 940893760.0, + 969796416.0, + 987207744.0, + 979695616.0, + 957643008.0, + 951528768.0, + 979017472.0, + 975387520.0, + 975281408.0, + 968427840.0, + 968806592.0, + 978402368.0, + 980427264.0, + 964074688.0, + 972711808.0, + 970944832.0, + 945103616.0, + 985440256.0, + 978079488.0, + 968653760.0, + 967265792.0, + 949218112.0, + 987740736.0, + 981401856.0, + 961260928.0, + 963837440.0, + 963823872.0, + 964992832.0, + 977090944.0, + 973198528.0, + 971912960.0, + 961262656.0, + 936331456.0, + 966092544.0, + 1000624256.0, + 973620544.0, + 989009600.0, + 956136704.0, + 970453248.0, + 968043584.0, + 968487744.0, + 978424512.0, + 966799872.0, + 955270336.0, + 981133312.0, + 964456192.0, + 985901632.0, + 968218752.0, + 959426880.0, + 961755200.0, + 971472384.0, + 981381120.0, + 974785856.0, + 946603648.0, + 983058880.0, + 972203712.0, + 968703936.0, + 953199040.0, + 946063168.0, + 965680832.0, + 981508864.0, + 974784832.0, + 970561856.0, + 926223488.0, + 956196224.0, + 987872704.0, + 988890496.0, + 966574464.0, + 970932608.0, + 957729024.0, + 979138432.0, + 976908736.0, + 979244032.0, + 979929728.0, + 946818816.0, + 964716864.0, + 967669440.0, + 992563840.0, + 972361984.0, + 957813632.0, + 943059840.0, + 958729216.0, + 984136384.0, + 970941120.0, + 961854144.0, + 963400896.0, + 964438016.0, + 963765824.0, + 981154176.0, + 962837504.0, + 949981184.0, + 964162944.0, + 969636352.0, + 977646976.0, + 973118144.0, + 962051200.0, + 969115712.0, + 967173888.0, + 964661184.0, + 965281792.0, + 938461568.0, + 942789120.0, + 969238976.0, + 969396736.0, + 977326272.0, + 985693440.0, + 943355136.0, + 976669440.0, + 981866048.0, + 978464768.0, + 971240320.0, + 940368192.0, + 958882496.0, + 975565824.0, + 978469248.0, + 956037696.0, + 971318016.0, + 934791808.0, + 961199104.0, + 972597248.0, + 964259392.0, + 966891584.0, + 949945024.0, + 974521152.0, + 966959296.0, + 953346688.0, + 969797376.0, + 944896512.0, + 957087872.0, + 966351232.0, + 984740032.0, + 964399872.0, + 944806080.0, + 948838272.0, + 963391296.0, + 966104512.0, + 992895168.0, + 956093952.0, + 950773824.0, + 975550720.0, + 979162944.0, + 975228032.0, + 952794304.0, + 953541952.0, + 967666560.0, + 977321344.0, + 973576448.0, + 955081024.0, + 937280448.0, + 960970944.0, + 979243840.0, + 970645824.0, + 956387520.0, + 944582272.0, + 961511488.0, + 974060864.0, + 967481408.0, + 979095488.0, + 981448192.0, + 946732864.0, + 979993856.0, + 977129472.0, + 975372224.0, + 971553024.0, + 949612288.0, + 969716864.0, + 953815808.0, + 977586176.0, + 964361088.0, + 963590720.0, + 958937408.0, + 969643456.0, + 965128768.0, + 966118016.0, + 982338752.0, + 951279104.0, + 955521664.0, + 968892672.0, + 972106112.0, + 964865536.0, + 961278720.0, + 968992064.0, + 971422464.0, + 972100480.0, + 959760704.0, + 982879424.0, + 950610880.0, + 970486528.0, + 970533824.0, + 963341312.0, + 944189376.0, + 940487680.0, + 976971456.0, + 968511808.0, + 967965824.0, + 978763776.0, + 938520832.0, + 976066176.0, + 965320000.0, + 958779136.0, + 974729408.0, + 953506240.0, + 940081920.0, + 966190592.0, + 967302784.0, + 969921024.0, + 966736512.0, + 951392832.0, + 975828992.0, + 979206592.0, + 986264128.0, + 964680448.0, + 939334400.0, + 976793024.0, + 972326912.0, + 970404672.0, + 970494336.0, + 955573440.0, + 945401216.0, + 967255680.0, + 967032384.0, + 979673216.0, + 972223872.0, + 949601344.0, + 963855616.0, + 976013056.0, + 973998656.0, + 984590912.0, + 951088256.0, + 970067328.0, + 956061184.0, + 974937472.0, + 969055040.0, + 944543104.0, + 961078912.0, + 982184000.0, + 968457984.0, + 956830912.0, + 928821760.0, + 966601344.0, + 972727104.0, + 957699712.0, + 956924928.0, + 949783616.0, + 942032512.0, + 986361984.0, + 979171584.0, + 964691328.0, + 976037568.0, + 937390720.0, + 957477952.0, + 974595456.0, + 974311104.0, + 962558336.0, + 966012480.0, + 943301248.0, + 974594048.0, + 983782784.0, + 964934656.0, + 959768384.0, + 952992064.0, + 953711872.0, + 959589312.0, + 982365312.0, + 971797824.0, + 936081664.0, + 967763712.0, + 955761536.0, + 957234944.0, + 972708096.0, + 946432064.0, + 951500736.0, + 969433664.0, + 969855296.0, + 966247488.0, + 954553664.0, + 968611072.0, + 964777024.0, + 975212608.0, + 975459008.0, + 962989568.0, + 951605632.0, + 971357632.0, + 967008960.0, + 961796288.0, + 969693440.0, + 936850176.0, + 972468608.0, + 965346112.0, + 978498688.0, + 973979776.0, + 932054144.0, + 951860608.0, + 975564032.0, + 960246144.0, + 967539584.0, + 988022144.0, + 943540288.0, + 975703936.0, + 978688704.0, + 977150080.0, + 966899904.0, + 942507712.0, + 981041280.0, + 957581568.0, + 984980608.0, + 966805504.0, + 952115136.0, + 965811776.0, + 985910272.0, + 974078272.0, + 983529920.0, + 952556608.0, + 947170176.0, + 972406656.0, + 972955264.0, + 966760768.0, + 980416832.0, + 948150784.0, + 964207616.0, + 947524736.0, + 976332160.0, + 982941376.0, + 950301376.0, + 978155712.0, + 968844992.0, + 950886144.0, + 985023104.0, + 959110144.0, + 943816256.0, + 955002112.0, + 971378176.0, + 988853184.0, + 956716096.0, + 945667648.0, + 962857408.0, + 971720640.0, + 969484480.0, + 978926400.0, + 939906240.0, + 958570688.0, + 977714752.0, + 958491520.0, + 978134272.0, + 954262208.0, + 958182784.0, + 972283648.0, + 982909824.0, + 961628352.0, + 958913984.0, + 948644032.0, + 968260544.0, + 965479232.0, + 997951488.0, + 973870208.0, + 939940352.0, + 966812096.0, + 968759872.0, + 949725248.0, + 977650688.0, + 955403968.0, + 955030080.0, + 976225792.0, + 970213760.0, + 962492416.0, + 958755776.0, + 945295936.0, + 978518528.0, + 965980608.0, + 966358656.0, + 969410496.0, + 940608704.0, + 973240320.0, + 975068800.0, + 951888256.0, + 964066880.0, + 949149888.0, + 977665728.0, + 974602496.0, + 969645312.0, + 977445888.0, + 946845056.0, + 944096192.0, + 961941184.0, + 971781760.0, + 980031744.0, + 971509696.0, + 946439488.0, + 970772800.0, + 975968896.0, + 969260160.0, + 973054144.0, + 941027456.0, + 975760192.0, + 972611840.0, + 976027328.0, + 965119616.0, + 957061056.0, + 931256448.0, + 979264192.0, + 960038336.0, + 965137344.0, + 958527360.0, + 966014400.0, + 973020352.0, + 964743296.0, + 968654272.0, + 981821632.0, + 955935872.0, + 991025024.0, + 968775744.0, + 973782272.0, + 959377344.0, + 947800384.0, + 949367232.0, + 966707200.0, + 980937088.0, + 960609088.0, + 957851904.0, + 941302592.0, + 975655424.0, + 979904256.0, + 965988800.0, + 986714112.0, + 952366272.0, + 970783104.0, + 970343616.0, + 974974528.0, + 971842752.0, + 941395648.0, + 948387520.0, + 980668736.0, + 980053760.0, + 982500096.0, + 970084736.0, + 936919040.0, + 969876352.0, + 981326784.0, + 992018560.0, + 958539648.0, + 950516480.0, + 956740608.0, + 982094144.0, + 977917248.0, + 968119744.0, + 952073984.0, + 931399680.0, + 966554112.0, + 958850880.0, + 977573952.0, + 964592192.0, + 958312704.0, + 974005888.0, + 950970624.0, + 974338496.0, + 963808896.0, + 954280000.0, + 981481088.0, + 974654976.0, + 966983488.0, + 971694144.0, + 940360576.0, + 965095104.0, + 960203840.0, + 952547008.0, + 966836608.0, + 958368576.0, + 959804416.0, + 972355200.0, + 985891200.0, + 958696128.0, + 936294912.0, + 945463296.0, + 977076032.0, + 988789248.0, + 966621568.0, + 985454784.0, + 938732992.0, + 963043200.0, + 961942912.0, + 989489600.0, + 987013312.0, + 959490944.0, + 961899648.0, + 958968000.0, + 966210816.0, + 981719936.0, + 952090944.0, + 938251968.0, + 971376576.0, + 969824576.0, + 976530240.0, + 971830336.0, + 955762752.0, + 972647168.0, + 965210240.0, + 950826048.0, + 978837824.0, + 958071680.0, + 961483136.0, + 985632192.0, + 962112576.0, + 974645824.0, + 956923328.0, + 948963840.0, + 975927616.0, + 968292352.0, + 962047872.0, + 977941696.0, + 946268288.0, + 976358528.0, + 979349632.0, + 979796608.0, + 975724736.0, + 940562432.0, + 963765888.0, + 965244032.0, + 978698112.0, + 945850816.0, + 941845440.0, + 959131072.0, + 972693952.0, + 970566336.0, + 966508544.0, + 962100224.0, + 937939136.0, + 973749696.0, + 973512704.0, + 981707456.0, + 970136768.0, + 949885696.0, + 962003328.0, + 982789568.0, + 968080960.0, + 969705536.0, + 954171072.0, + 952187136.0, + 985361856.0, + 972913600.0, + 976518272.0, + 959725056.0, + 932516480.0, + 964037696.0, + 967028736.0, + 977857216.0, + 961843648.0, + 955102400.0, + 988763328.0, + 968715968.0, + 970518336.0, + 959374656.0, + 952353472.0, + 948822592.0, + 979556224.0, + 967519488.0, + 972424768.0, + 947987136.0, + 951507968.0, + 968237504.0, + 967390336.0, + 962856448.0, + 980083776.0, + 944050368.0, + 975006848.0, + 974312256.0, + 973574208.0, + 971708544.0, + 958864384.0, + 960295360.0, + 965778560.0, + 970290752.0, + 980613376.0, + 944283776.0, + 945492480.0, + 970518528.0, + 970185088.0, + 970997184.0, + 986612032.0, + 948066816.0, + 955517312.0, + 972393344.0, + 972488640.0, + 985050304.0, + 951690944.0, + 954792960.0, + 972011136.0, + 962667904.0, + 960713792.0, + 943963072.0, + 948743936.0, + 981819456.0, + 971381696.0, + 970545984.0, + 972548288.0, + 944227328.0, + 974196096.0, + 977102336.0, + 963895680.0, + 960720192.0, + 945273216.0, + 969737216.0, + 998076864.0, + 975855808.0, + 963338816.0, + 937053696.0, + 949942336.0, + 968207552.0, + 986284160.0, + 967589184.0, + 966929408.0, + 937815552.0, + 963158336.0, + 985092928.0, + 962796480.0, + 968078016.0, + 947321280.0, + 975432384.0, + 975331264.0, + 975791424.0, + 980182720.0, + 956997120.0, + 970744192.0, + 963803712.0, + 977681152.0, + 981516800.0, + 968661248.0, + 953440768.0, + 963244800.0, + 964300416.0, + 965498240.0, + 959689600.0, + 965970560.0, + 966674048.0, + 968969728.0, + 955515712.0, + 972235712.0, + 939842752.0, + 960277312.0, + 981393088.0, + 968752512.0, + 957488448.0, + 951367040.0, + 952284032.0, + 969787776.0, + 972364160.0, + 962866624.0, + 932202944.0, + 957318592.0, + 981520000.0, + 959303104.0, + 948332288.0, + 969478848.0, + 949914944.0, + 956194496.0, + 973598976.0, + 969103872.0, + 968650560.0, + 953933632.0, + 961696640.0, + 958652864.0, + 987229120.0, + 969981056.0, + 964830208.0, + 946335104.0, + 980762048.0, + 971935168.0, + 974750656.0, + 965119616.0, + 939432640.0, + 955701888.0, + 967307264.0, + 991853696.0, + 971335552.0, + 924678016.0, + 969423360.0, + 967502400.0, + 963945280.0, + 968606656.0, + 974431104.0, + 953848576.0, + 978189824.0, + 975013440.0, + 956637632.0, + 968968256.0, + 943301056.0, + 979950784.0, + 982469120.0, + 959020224.0, + 973195264.0, + 944738560.0, + 958959424.0, + 979202240.0, + 972960320.0, + 951416512.0, + 941890432.0, + 947712832.0, + 975258496.0, + 988450240.0, + 974012928.0, + 962630592.0, + 921264064.0, + 970375040.0, + 982832832.0, + 976660288.0, + 967197056.0, + 958454976.0, + 1000298880.0, + 954954880.0, + 966162624.0, + 958965696.0, + 958415872.0, + 956820224.0, + 968914240.0, + 962834880.0, + 959190720.0, + 954761728.0, + 947724480.0, + 967121152.0, + 973963712.0, + 966010944.0, + 958401152.0, + 942305472.0, + 973938816.0, + 971260032.0, + 971667776.0, + 953775232.0, + 943344320.0, + 959075200.0, + 980050304.0, + 979433984.0, + 976606272.0, + 942133504.0, + 956704768.0, + 954276992.0, + 981502208.0, + 956537472.0, + 958234560.0, + 952944192.0, + 961186560.0, + 970739008.0, + 964836224.0, + 959376320.0, + 938600640.0, + 980743936.0, + 970924416.0, + 959672512.0, + 972666496.0, + 947624960.0, + 962766400.0, + 961655232.0, + 951784640.0, + 982070592.0, + 964440960.0, + 949561280.0, + 971409536.0, + 963268096.0, + 987873600.0, + 969461504.0, + 942434624.0, + 968452608.0, + 971008256.0, + 960998336.0, + 961911680.0, + 945805824.0, + 934175296.0, + 965408832.0, + 963442240.0, + 972466048.0, + 960792896.0, + 963350976.0, + 979455616.0, + 956931904.0, + 964225856.0, + 974062144.0, + 949420480.0, + 989551488.0, + 994637760.0, + 971231232.0, + 963970560.0, + 948497792.0, + 964947456.0, + 970541440.0, + 974020160.0, + 966556992.0, + 950605248.0, + 966238784.0, + 957847872.0, + 985896640.0, + 976405952.0, + 965389184.0, + 915178176.0, + 963368000.0, + 982967744.0, + 983009664.0, + 962054528.0, + 948861504.0, + 973102208.0, + 965067840.0, + 968189696.0, + 965075712.0, + 956771584.0, + 971409024.0, + 967623680.0, + 954622208.0, + 970002432.0, + 973575488.0, + 956789184.0, + 981327488.0, + 959849216.0, + 957913152.0, + 965475840.0, + 950060352.0, + 978034432.0, + 968152768.0, + 962766656.0, + 969904448.0, + 950783424.0, + 952283264.0, + 970217024.0, + 963142464.0, + 967003904.0, + 959724096.0, + 936029056.0, + 990472512.0, + 977625088.0, + 977825024.0, + 963217600.0, + 950928512.0, + 960868864.0, + 973374272.0, + 976636416.0, + 972201408.0, + 938382144.0, + 961862144.0, + 965315392.0, + 964543424.0, + 978128576.0, + 938131584.0, + 972171200.0, + 976696704.0, + 984454976.0, + 975423936.0, + 958847232.0, + 952034240.0, + 951423680.0, + 963932608.0, + 975787904.0, + 973280000.0, + 944062208.0, + 966852608.0, + 969012800.0, + 964098432.0, + 964232384.0, + 955763712.0, + 962337344.0, + 973103872.0, + 965437632.0, + 976107584.0, + 965253824.0, + 941408832.0, + 971009344.0, + 958048704.0, + 964609664.0, + 970383424.0, + 944223680.0, + 964641088.0, + 975353024.0, + 963216000.0, + 956843584.0, + 949851264.0, + 977999744.0, + 966273856.0, + 975746624.0, + 974540032.0, + 955812736.0, + 954867392.0, + 975837184.0, + 987603008.0, + 968191872.0, + 980909888.0, + 935765056.0, + 968295104.0, + 969191680.0, + 975296576.0, + 984730560.0, + 940931008.0, + 974232704.0, + 964276672.0, + 981304640.0, + 971199104.0, + 943623168.0, + 946810048.0, + 972410880.0, + 980049280.0, + 977307904.0, + 951884608.0, + 955077888.0, + 981102784.0, + 962022400.0, + 957946688.0, + 968624064.0, + 956834432.0, + 986279808.0, + 974450304.0, + 993519808.0, + 963823872.0, + 961741632.0, + 961028608.0, + 984976512.0, + 971022464.0, + 970911040.0, + 956058816.0, + 951333760.0, + 973084160.0, + 973563712.0, + 977726272.0, + 964567424.0, + 930937984.0, + 973832576.0, + 976974720.0, + 978134848.0, + 981408192.0, + 940666496.0, + 948610688.0, + 969757824.0, + 974738176.0, + 968011904.0, + 970674944.0, + 948271616.0, + 980399424.0, + 966324544.0, + 976598784.0, + 975199616.0, + 954019392.0, + 975527360.0, + 968828608.0, + 964216064.0, + 976426624.0, + 957919936.0, + 943160704.0, + 973871488.0, + 970541312.0, + 981382272.0, + 957914240.0, + 948976576.0, + 978926784.0, + 978817344.0, + 967202176.0, + 952486400.0, + 957904256.0, + 977004160.0, + 977582144.0, + 972821696.0, + 958474368.0, + 958560768.0, + 960841408.0, + 988150848.0, + 974835648.0, + 969553728.0, + 967503872.0, + 954326528.0, + 965065792.0, + 970564736.0, + 971599424.0, + 981106752.0, + 938739904.0, + 973612928.0, + 964725120.0, + 973270464.0, + 986174528.0, + 939959168.0, + 974849408.0, + 968068352.0, + 967849984.0, + 988423360.0, + 941041728.0, + 943350400.0, + 969911616.0, + 968380864.0, + 965419712.0, + 946773056.0, + 951969152.0, + 976044864.0, + 968452224.0, + 975423040.0, + 963698944.0, + 944575616.0, + 964251968.0, + 971090752.0, + 962767488.0, + 974284352.0, + 953160128.0, + 982262336.0, + 982164096.0, + 965801280.0, + 961299328.0, + 944725696.0, + 974897088.0, + 991996096.0, + 957498688.0, + 981525632.0, + 967816448.0, + 931301568.0, + 973258304.0, + 968595136.0, + 979546240.0, + 973765888.0, + 954125568.0, + 970055616.0, + 968299968.0, + 985013440.0, + 967392768.0, + 958041664.0, + 961655232.0, + 960015168.0, + 967324736.0, + 981402176.0, + 976802688.0, + 937603328.0, + 967031296.0, + 965052224.0, + 979589888.0, + 981316352.0, + 959215040.0, + 973302080.0, + 967704192.0, + 970518976.0, + 969294208.0, + 949856448.0, + 967728384.0, + 979262848.0, + 972170240.0, + 965048576.0, + 951281984.0, + 954714560.0, + 968276224.0, + 977366592.0, + 976548800.0, + 967489472.0, + 950112960.0, + 970514688.0, + 991727168.0, + 964326656.0, + 987481472.0, + 948382848.0, + 972066240.0, + 971654208.0, + 970150208.0, + 974186688.0, + 956165824.0, + 943899264.0, + 974364352.0, + 960993216.0, + 970082432.0, + 968749184.0, + 952795264.0, + 985093632.0, + 964221248.0, + 976967744.0, + 982484416.0, + 959192768.0, + 983127936.0, + 966610944.0, + 958042240.0, + 980946496.0, + 955983424.0, + 942097024.0, + 964350720.0, + 965628736.0, + 961262528.0, + 955575360.0, + 939499072.0, + 961869440.0, + 967743616.0, + 966309504.0, + 972100800.0, + 950385600.0, + 969897280.0, + 974915520.0, + 968265216.0, + 975927552.0, + 952268096.0, + 966918720.0, + 975372096.0, + 964387328.0, + 957277824.0, + 968436416.0, + 947367360.0, + 972595264.0, + 966886208.0, + 962633024.0, + 964268544.0, + 943111040.0, + 966199104.0, + 979073600.0, + 964781568.0, + 968280000.0, + 945923904.0, + 971825088.0, + 978705536.0, + 979387968.0, + 975173760.0, + 965174336.0, + 963589376.0, + 970487232.0, + 959623360.0, + 966108288.0, + 972779456.0, + 935563840.0, + 970765184.0, + 958642112.0, + 962041536.0, + 968177344.0, + 956190144.0, + 966033792.0, + 964530048.0, + 965372480.0, + 962724864.0, + 949198912.0, + 963558528.0, + 963447680.0, + 964988736.0, + 964933696.0, + 970143552.0, + 937960064.0, + 971137600.0, + 969831808.0, + 979275520.0, + 960125760.0, + 944070784.0, + 959488576.0, + 969027136.0, + 965588352.0, + 967232640.0, + 940622272.0, + 945988096.0, + 972705856.0, + 969395584.0, + 967463616.0, + 951970368.0, + 945919040.0, + 967662336.0, + 971383552.0, + 975806528.0, + 982927680.0, + 952994112.0, + 963969856.0, + 986701120.0, + 952023552.0, + 970077312.0, + 960094208.0, + 961081728.0, + 965083840.0, + 967231424.0, + 977440576.0, + 975297600.0, + 942971648.0, + 972595072.0, + 974553216.0, + 962913024.0, + 969718336.0, + 943192512.0, + 948647040.0, + 965911552.0, + 964147584.0, + 967384384.0, + 951766720.0, + 969970752.0, + 963362752.0, + 980107200.0, + 971437760.0, + 957932608.0, + 946457920.0, + 983375936.0, + 970740672.0, + 973367296.0, + 963288448.0, + 954637760.0, + 972827968.0, + 972902336.0, + 968836224.0, + 961336192.0, + 938383360.0, + 967467904.0, + 967238528.0, + 957343744.0, + 974524160.0, + 943794432.0, + 951146944.0, + 961809664.0, + 976303040.0, + 967136064.0, + 973762688.0, + 949713600.0, + 971735872.0, + 972907328.0, + 972992384.0, + 971164800.0, + 949211648.0, + 981886080.0, + 976059776.0, + 975098944.0, + 961717568.0, + 952480704.0, + 956693376.0, + 968644864.0, + 962700352.0, + 956191232.0, + 990552000.0, + 935804032.0, + 954107200.0, + 959364800.0, + 978269312.0, + 951698240.0, + 951989248.0, + 991284864.0, + 964332736.0, + 975417536.0, + 965645888.0, + 943253184.0, + 962853632.0, + 958807296.0, + 980278400.0, + 958644992.0, + 939119488.0, + 948831360.0, + 974136960.0, + 974169408.0, + 971564800.0, + 959983936.0, + 948426496.0, + 968406144.0, + 973707584.0, + 967865920.0, + 975432704.0, + 943908736.0, + 974013376.0, + 961091712.0, + 967949888.0, + 968758272.0, + 975363392.0, + 944782848.0, + 961383360.0, + 969374464.0, + 975388928.0, + 955702848.0, + 950196032.0, + 974744512.0, + 962855232.0, + 962962368.0, + 953050368.0, + 956594240.0, + 963186624.0, + 965790080.0, + 969557952.0, + 952897600.0, + 961956992.0, + 963387712.0, + 992559680.0, + 957787264.0, + 964560576.0, + 969303808.0, + 932638848.0, + 976011648.0, + 962513856.0, + 975204992.0, + 968566592.0, + 951994240.0, + 965452480.0, + 960548864.0, + 984055104.0, + 980254784.0, + 941545600.0, + 958248192.0, + 957811776.0, + 975603712.0, + 968386944.0, + 959279744.0, + 939403072.0, + 966078144.0, + 959020864.0, + 957134144.0, + 984928448.0, + 952804736.0, + 977573696.0, + 983040384.0, + 960741184.0, + 972496256.0, + 938911552.0, + 960537536.0, + 963278208.0, + 963289664.0, + 970740672.0, + 935307392.0, + 976323200.0, + 961312192.0, + 977152064.0, + 971782592.0, + 964880768.0, + 949039488.0, + 964129600.0, + 969086784.0, + 971316416.0, + 967508544.0, + 960702208.0, + 966329152.0, + 968020160.0, + 979848256.0, + 966748352.0, + 952717504.0, + 951754816.0, + 975666688.0, + 970677696.0, + 965876672.0, + 957349632.0, + 941275392.0, + 966852288.0, + 963880000.0, + 978972352.0, + 952381312.0, + 935715584.0, + 963361664.0, + 969399424.0, + 976406528.0, + 963896832.0, + 945520512.0, + 962600256.0, + 972852608.0, + 973184576.0, + 963019072.0, + 957626880.0, + 949598912.0, + 981199808.0, + 972227392.0, + 976719488.0, + 973338368.0, + 953693504.0, + 956079744.0, + 957734912.0, + 958488512.0, + 977933376.0, + 932571712.0, + 986439296.0, + 967509120.0, + 963144576.0, + 953336448.0, + 956104768.0, + 949976896.0, + 987421504.0, + 969001088.0, + 972957504.0, + 962489664.0, + 945620160.0, + 973000896.0, + 975045696.0, + 971812864.0, + 972073408.0, + 946393280.0, + 970606016.0, + 979429376.0, + 968875072.0, + 975618944.0, + 941368128.0, + 959739200.0, + 975790208.0, + 955453696.0, + 973890816.0, + 985247296.0, + 940293760.0, + 968178432.0, + 979540096.0, + 959783040.0, + 974319488.0, + 949450240.0, + 979878464.0, + 985235968.0, + 978790720.0, + 983719424.0, + 939677952.0, + 970797056.0, + 980414400.0, + 970359040.0, + 970081600.0, + 937915520.0, + 952333376.0, + 979505856.0, + 979478592.0, + 953235200.0, + 970615040.0, + 948029440.0, + 978493888.0, + 990812224.0, + 964144000.0, + 968921664.0, + 939206528.0, + 976269952.0, + 969561536.0, + 961115904.0, + 966461120.0, + 942768384.0, + 963134336.0, + 976011008.0, + 975344768.0, + 976678592.0, + 960278976.0, + 940133760.0, + 977436672.0, + 964483200.0, + 973764352.0, + 966671488.0, + 942376704.0, + 960924672.0, + 971255552.0, + 974823744.0, + 970653376.0, + 949611520.0, + 953117248.0, + 972012736.0, + 964462592.0, + 973082304.0, + 987549376.0, + 941428800.0, + 972785472.0, + 971244032.0, + 973160704.0, + 985143424.0, + 949573376.0, + 992390400.0, + 961834176.0, + 968338432.0, + 951679488.0, + 936266240.0, + 951091648.0, + 962658496.0, + 969425152.0, + 965073664.0, + 944978560.0, + 944183680.0, + 976292096.0, + 972761728.0, + 976144384.0, + 952296832.0, + 950193024.0, + 973788544.0, + 975900224.0, + 978513792.0, + 979278144.0, + 936786432.0, + 968568320.0, + 973700544.0, + 959145664.0, + 967774400.0, + 953044672.0, + 959332352.0, + 956206592.0, + 959445696.0, + 973294592.0, + 973872000.0, + 950893440.0, + 964301440.0, + 964745536.0, + 969885632.0, + 965207296.0, + 954259904.0, + 964745216.0, + 963812352.0, + 964617344.0, + 962164352.0, + 948716864.0, + 970232704.0, + 966398016.0, + 977294784.0, + 965150272.0, + 959745984.0, + 951908544.0, + 966104768.0, + 988442048.0, + 971915456.0, + 961666944.0, + 949015360.0, + 965207296.0, + 972221504.0, + 964808832.0, + 983736640.0, + 955788608.0, + 980358592.0, + 975898368.0, + 969959680.0, + 974199104.0, + 939894784.0, + 955800832.0, + 976698816.0, + 973913600.0, + 981422080.0, + 975105920.0, + 955285696.0, + 966522048.0, + 956449536.0, + 969893760.0, + 976778496.0, + 947510080.0, + 949980224.0, + 962904128.0, + 990148544.0, + 968781760.0, + 948956032.0, + 946621056.0, + 970508672.0, + 973233600.0, + 972127360.0, + 966778752.0, + 958284736.0, + 967196480.0, + 966231552.0, + 973855296.0, + 969750336.0, + 955944256.0, + 980303360.0, + 958554944.0, + 972545728.0, + 970652160.0, + 948179968.0, + 949998720.0, + 970587712.0, + 972276864.0, + 977373632.0, + 949628992.0, + 948334976.0, + 967682368.0, + 970113344.0, + 966447616.0, + 968831360.0, + 954559296.0, + 974449792.0, + 982847808.0, + 983556736.0, + 967126912.0, + 944710848.0, + 964678592.0, + 985468160.0, + 969857344.0, + 989257920.0, + 946398528.0, + 931107136.0, + 965849728.0, + 978189568.0, + 978718144.0, + 985299136.0, + 955497280.0, + 966239808.0, + 960832512.0, + 976461376.0, + 965884544.0, + 948155520.0, + 951423488.0, + 968965184.0, + 975668416.0, + 955821568.0, + 971427904.0, + 945778816.0, + 964547328.0, + 969923200.0, + 975564928.0, + 957127296.0, + 953939712.0, + 971291712.0, + 964763328.0, + 972956608.0, + 948315968.0, + 933072832.0, + 966281088.0, + 978116480.0, + 967044224.0, + 975879104.0, + 956724544.0, + 939582208.0, + 973464128.0, + 963027840.0, + 966226816.0, + 962247488.0, + 961438464.0, + 966564160.0, + 965973440.0, + 971071872.0, + 985012736.0, + 930724672.0, + 962994496.0, + 967571264.0, + 970828480.0, + 989781824.0, + 949894848.0, + 951055552.0, + 985401088.0, + 962364032.0, + 959778368.0, + 961597056.0, + 974075776.0, + 958506752.0, + 968643776.0, + 958013696.0, + 966115648.0, + 937143104.0, + 959942656.0, + 980228864.0, + 970700736.0, + 976956672.0, + 946456576.0, + 963817088.0, + 948654720.0, + 976193536.0, + 983209344.0, + 943088832.0, + 964205696.0, + 986925376.0, + 968215936.0, + 952683840.0, + 959629696.0, + 944938880.0, + 977094208.0, + 968412480.0, + 973843072.0, + 973784768.0, + 921360192.0, + 960347008.0, + 983767360.0, + 974511232.0, + 967499328.0, + 946859392.0, + 945055232.0, + 979709568.0, + 968872960.0, + 970305536.0, + 960998848.0, + 947197440.0, + 987041984.0, + 970712000.0, + 983894784.0, + 969881216.0, + 952739072.0, + 969241920.0, + 970751872.0, + 948162432.0, + 978588288.0, + 958849088.0, + 966012096.0, + 974179648.0, + 965955328.0, + 953478144.0, + 962338816.0, + 948082240.0, + 973504512.0, + 975912512.0, + 970496128.0, + 977114688.0, + 957253440.0, + 972977984.0, + 982692224.0, + 966226368.0, + 952172416.0, + 937258496.0, + 975366272.0, + 980247680.0, + 958719744.0, + 965531712.0, + 961147840.0, + 951220288.0, + 982266176.0, + 965548736.0, + 984989184.0, + 962283520.0, + 937615168.0, + 967855744.0, + 963401728.0, + 969174720.0, + 985252992.0, + 941517568.0, + 961269888.0, + 970950720.0, + 970138304.0, + 976718976.0, + 954686784.0, + 954291712.0, + 961638592.0, + 979856064.0, + 963379200.0, + 961332416.0, + 947062272.0, + 983171648.0, + 965416128.0, + 972068480.0, + 969358208.0, + 933961792.0, + 985517952.0, + 961558464.0, + 976432576.0, + 978010944.0, + 941443072.0, + 956131072.0, + 974381504.0, + 957675776.0, + 972152256.0, + 956615168.0, + 951517888.0, + 973441792.0, + 961947008.0, + 969538432.0, + 973597888.0, + 950416320.0, + 961668992.0, + 969023808.0, + 970656128.0, + 965169472.0, + 928397440.0, + 934467264.0, + 978082048.0, + 963382784.0, + 972485504.0, + 963051008.0, + 948003072.0, + 968812032.0, + 974810816.0, + 971538816.0, + 958721792.0, + 949776512.0, + 958928384.0, + 963862976.0, + 960073280.0, + 972865408.0, + 965913280.0, + 964293248.0, + 965932800.0, + 973660288.0, + 971048640.0, + 970819264.0, + 936653376.0, + 957160256.0, + 964599168.0, + 956811456.0, + 972767936.0, + 946143680.0, + 978819456.0, + 963762816.0, + 964653376.0, + 975832576.0, + 961012736.0, + 965595776.0, + 971409984.0, + 970710464.0, + 967910336.0, + 960150272.0, + 953985728.0, + 986790400.0, + 959003712.0, + 972030336.0, + 953911680.0, + 941837120.0, + 965127936.0, + 974224384.0, + 971219200.0, + 966096960.0, + 939365312.0, + 969099840.0, + 974691008.0, + 973880064.0, + 981528640.0, + 935658304.0, + 950010112.0, + 969443904.0, + 969827200.0, + 969579904.0, + 957485056.0, + 935227840.0, + 954078464.0, + 972510784.0, + 961786688.0, + 980644480.0, + 938357824.0, + 958728256.0, + 979267072.0, + 965789824.0, + 962056320.0, + 920034368.0, + 993872448.0, + 955232768.0, + 959374080.0, + 954846720.0, + 965491328.0, + 962094976.0, + 985822848.0, + 957046912.0, + 970249088.0, + 970162688.0, + 969172928.0, + 964821888.0, + 977317696.0, + 974905728.0, + 972570048.0, + 923725440.0, + 958935488.0, + 972595584.0, + 963867904.0, + 967702208.0, + 967891520.0, + 942485312.0, + 967524736.0, + 956522816.0, + 966104384.0, + 957793920.0, + 944605184.0, + 978150080.0, + 978178240.0, + 983204736.0, + 965906176.0, + 937687552.0, + 972870336.0, + 959842944.0, + 976423680.0, + 962552576.0, + 956921728.0, + 954046784.0, + 965483968.0, + 972903744.0, + 950048384.0, + 962516800.0, + 952921856.0, + 963355712.0, + 963076864.0, + 972000640.0, + 965294272.0, + 933841728.0, + 965369344.0, + 953449152.0, + 980671104.0, + 975236480.0, + 930823104.0, + 967363264.0, + 966518528.0, + 970347328.0, + 956038144.0, + 931960320.0, + 944360448.0, + 970181824.0, + 972669376.0, + 958170624.0, + 950540352.0, + 940246080.0, + 969580416.0, + 963093440.0, + 954739904.0, + 964955392.0, + 953176256.0, + 958955136.0, + 973978368.0, + 968812608.0, + 985562944.0, + 978141504.0, + 969058304.0, + 966713216.0, + 974808064.0, + 984063360.0, + 966990784.0, + 959373376.0, + 960349440.0, + 953334784.0, + 980396672.0, + 967019904.0, + 961928192.0, + 966572032.0, + 962305536.0, + 960780928.0, + 960643776.0, + 959538368.0, + 957590016.0, + 972084736.0, + 974102720.0, + 966522880.0, + 968475584.0, + 948236160.0, + 975949824.0, + 963794688.0, + 963009216.0, + 986218368.0, + 930699264.0, + 976172544.0, + 990139072.0, + 977453248.0, + 962462080.0, + 945796288.0, + 969537856.0, + 977129664.0, + 972228544.0, + 986717696.0, + 936598464.0, + 944995904.0, + 955667328.0, + 973499520.0, + 980912896.0, + 981662400.0, + 936935104.0, + 964624384.0, + 959895936.0, + 986651456.0, + 975640192.0, + 958426624.0, + 975357312.0, + 963550336.0, + 970582272.0, + 974691392.0, + 944117696.0, + 965030272.0, + 967080704.0, + 975631616.0, + 967179392.0, + 982170944.0, + 955264704.0, + 974654400.0, + 969905152.0, + 965275264.0, + 965981440.0, + 940290752.0, + 973531136.0, + 960609792.0, + 972436864.0, + 978824704.0, + 940060864.0, + 968653184.0, + 964429056.0, + 968082752.0, + 969574400.0, + 965763776.0, + 946198080.0, + 975619648.0, + 973022592.0, + 967326272.0, + 959540160.0, + 943441024.0, + 983268864.0, + 967697600.0, + 971282368.0, + 975432256.0, + 934134656.0, + 961381120.0, + 967247296.0, + 972474944.0, + 961314432.0, + 964050496.0, + 946494016.0, + 969322432.0, + 972848896.0, + 976618944.0, + 967442624.0, + 947587456.0, + 966871488.0, + 964618368.0, + 994787968.0, + 988412288.0, + 942145152.0, + 961744832.0, + 970945984.0, + 977523904.0, + 970607616.0, + 952486848.0, + 961963136.0, + 961892416.0, + 959132416.0, + 951528128.0, + 956242368.0, + 948932288.0, + 989051328.0, + 974463232.0, + 966427200.0, + 975378112.0, + 937088704.0, + 968369984.0, + 987338176.0, + 970588864.0, + 968531456.0, + 956638208.0, + 948582400.0, + 985812480.0, + 981196416.0, + 974768896.0, + 946486016.0, + 941977088.0, + 952414848.0, + 977420160.0, + 980446144.0, + 969054144.0, + 949351488.0, + 974843648.0, + 967210176.0, + 958707904.0, + 974507328.0, + 950730368.0, + 973157504.0, + 971576448.0, + 965261056.0, + 973908224.0, + 975159040.0, + 947273024.0, + 971511680.0, + 966220480.0, + 967885504.0, + 968404352.0, + 952753856.0, + 983745600.0, + 957472256.0, + 961332416.0, + 964501824.0, + 943728896.0, + 977682368.0, + 981852992.0, + 963727936.0, + 967334720.0, + 953132480.0, + 978972160.0, + 981037376.0, + 972663104.0, + 970084928.0, + 972737472.0, + 940333888.0, + 987577472.0, + 970059840.0, + 975905920.0, + 961738816.0, + 953195072.0, + 968280128.0, + 978371008.0, + 971405696.0, + 969986112.0, + 936838720.0, + 952641344.0, + 986705088.0, + 966993856.0, + 967387712.0, + 954611840.0, + 958291136.0, + 969723456.0, + 968560064.0, + 968486720.0, + 981047808.0, + 956524800.0, + 963046848.0, + 957060544.0, + 958426624.0, + 985285312.0, + 941419456.0, + 960780032.0, + 967297152.0, + 962075008.0, + 968262272.0, + 959072128.0, + 942765632.0, + 956369216.0, + 959791808.0, + 965952448.0, + 949544320.0, + 948598912.0, + 973556288.0, + 977461312.0, + 963175808.0, + 973002816.0, + 935190592.0, + 977148288.0, + 988324800.0, + 969807616.0, + 957966784.0, + 945861952.0, + 940448192.0, + 969709952.0, + 980650816.0, + 955865408.0, + 960284864.0, + 936297664.0, + 963262272.0, + 961871552.0, + 973965504.0, + 968831552.0, + 936296320.0, + 957131968.0, + 956695488.0, + 959624064.0, + 981766016.0, + 965865792.0, + 955595520.0, + 960115520.0, + 972505408.0, + 969194048.0, + 943397248.0, + 960521088.0, + 974330368.0, + 972667904.0, + 970653632.0, + 980275200.0, + 936819648.0, + 988542400.0, + 963037568.0, + 952548928.0, + 962609600.0, + 952786944.0, + 960264896.0, + 974453312.0, + 957190592.0, + 974812992.0, + 944580416.0, + 958541888.0, + 959315520.0, + 975046336.0, + 963746368.0, + 965262784.0, + 933421888.0, + 960867840.0, + 976219904.0, + 973223488.0, + 967116608.0, + 946622336.0, + 940638784.0, + 971085056.0, + 979334016.0, + 961466304.0, + 943832256.0, + 929239872.0, + 967302144.0, + 964007168.0, + 959526592.0, + 966386176.0, + 946160192.0, + 968565632.0, + 943378688.0, + 960701504.0, + 971588416.0, + 947586240.0, + 958351744.0, + 962951744.0, + 984119232.0, + 961026176.0, + 968020096.0, + 974852416.0, + 960410368.0, + 957064320.0, + 981581120.0, + 957182336.0, + 933307392.0, + 957020608.0, + 951573696.0, + 963787136.0, + 959650688.0, + 941719552.0, + 965030208.0, + 965331392.0, + 965931328.0, + 959375040.0, + 943457600.0, + 970753728.0, + 966362944.0, + 970086592.0, + 971502656.0, + 944190528.0, + 953849664.0, + 968904704.0, + 985095488.0, + 986465472.0, + 966331200.0, + 943385536.0, + 967688000.0, + 973926400.0, + 967664640.0, + 956166784.0, + 938424320.0, + 962817984.0, + 976629888.0, + 963985536.0, + 991779584.0, + 966873152.0, + 936206784.0, + 968224192.0, + 961028928.0, + 989036544.0, + 984309504.0, + 951836032.0, + 965718656.0, + 942684800.0, + 963555584.0, + 966728960.0, + 946504000.0, + 975448000.0, + 964568704.0, + 950212928.0, + 961762880.0, + 947056832.0, + 955735552.0, + 974926080.0, + 975692992.0, + 961978624.0, + 955910016.0, + 941977920.0, + 953971968.0, + 984272128.0, + 970927744.0, + 971754688.0, + 948815744.0, + 964617472.0, + 976331072.0, + 974519808.0, + 989516480.0, + 948103040.0, + 952357952.0, + 963913600.0, + 983301056.0, + 966695616.0, + 973377024.0, + 944717312.0, + 972132480.0, + 951375936.0, + 980717760.0, + 965723392.0, + 958243776.0, + 961657344.0, + 967972480.0, + 975973248.0, + 969575552.0, + 946204480.0, + 984173248.0, + 971643776.0, + 976845696.0, + 971362944.0, + 963377856.0, + 970694720.0, + 966316992.0, + 957935296.0, + 964638912.0, + 971663232.0, + 923816832.0, + 975231680.0, + 978931648.0, + 957507264.0, + 962622976.0, + 938568704.0, + 950523328.0, + 971668352.0, + 984826112.0, + 958353920.0, + 953863168.0, + 959330048.0, + 975530560.0, + 979873536.0, + 961880320.0, + 957192960.0, + 946302208.0, + 979920448.0, + 973217728.0, + 965171520.0, + 968205376.0, + 964973248.0, + 961020608.0, + 975194560.0, + 971147776.0, + 968591104.0, + 952052800.0, + 949152704.0, + 961409664.0, + 976582656.0, + 969878144.0, + 948465600.0, + 953818688.0, + 966417152.0, + 956377024.0, + 970556864.0, + 963652736.0, + 950035008.0, + 982264768.0, + 960511168.0, + 964802112.0, + 966632384.0, + 952291904.0, + 964435200.0, + 976723328.0, + 965133056.0, + 967481408.0, + 931151360.0, + 964692608.0, + 980070784.0, + 962143680.0, + 961062848.0, + 969127744.0, + 952444608.0, + 941764544.0, + 973702464.0, + 975889088.0, + 983844224.0, + 946641472.0, + 961976384.0, + 979876608.0, + 975089792.0, + 971760000.0, + 968782720.0, + 955185856.0, + 962823616.0, + 968077888.0, + 975827072.0, + 949773632.0, + 949869760.0, + 957432640.0, + 971686080.0, + 974256128.0, + 989518400.0, + 951585664.0, + 947193216.0, + 967357760.0, + 961240320.0, + 982374016.0, + 958028160.0, + 958692352.0, + 979599680.0, + 972809408.0, + 961097152.0, + 958948224.0, + 946387072.0, + 964978944.0, + 962835584.0, + 973247488.0, + 974088704.0, + 952958784.0, + 981753984.0, + 975820480.0, + 954763136.0, + 953689664.0, + 952849792.0, + 979234560.0, + 969361472.0, + 980917888.0, + 960989120.0, + 941193984.0, + 966687232.0, + 962609856.0, + 974734976.0, + 988843008.0, + 968050752.0, + 952659328.0, + 971140288.0, + 972805568.0, + 974052864.0, + 976467456.0, + 937378624.0, + 965140672.0, + 978458048.0, + 960431040.0, + 979525888.0, + 957217856.0, + 969703808.0, + 959344192.0, + 954610432.0, + 980951488.0, + 964569024.0, + 959116992.0, + 971099456.0, + 962928704.0, + 969459136.0, + 974187200.0, + 949669440.0, + 964647488.0, + 984592320.0, + 969596352.0, + 973169472.0, + 950625536.0, + 952672192.0, + 976262400.0, + 978434368.0, + 979720896.0, + 952211904.0, + 949790784.0, + 975559040.0, + 978205824.0, + 963229568.0, + 975362240.0, + 953066752.0, + 962996864.0, + 962671872.0, + 975643456.0, + 965725760.0, + 970741312.0, + 970435520.0, + 957665984.0, + 974212352.0, + 975483840.0, + 960092160.0, + 957273088.0, + 957728448.0, + 970785664.0, + 959509248.0, + 977901888.0, + 957934528.0, + 969029248.0, + 987927424.0, + 980561536.0, + 967277376.0, + 925047552.0, + 945694592.0, + 970188608.0, + 975151680.0, + 979169728.0, + 935205440.0, + 968040960.0, + 963594816.0, + 960712256.0, + 976533312.0, + 961861504.0, + 956841984.0, + 984648192.0, + 976726016.0, + 977018112.0, + 983204288.0, + 941420864.0, + 971602048.0, + 965834816.0, + 973837568.0, + 970657984.0, + 947158976.0, + 970141952.0, + 976233792.0, + 986233088.0, + 959099968.0, + 961520640.0, + 946280448.0, + 971910336.0, + 988432832.0, + 968733632.0, + 966379712.0, + 941240512.0, + 964067264.0, + 967122880.0, + 983430720.0, + 973709632.0, + 949628352.0, + 955711552.0, + 960252608.0, + 966449856.0, + 969061120.0, + 967693312.0, + 938528768.0, + 964876608.0, + 961517888.0, + 975615744.0, + 965115968.0, + 943306624.0, + 976712576.0, + 966227968.0, + 984008576.0, + 982578688.0, + 961487104.0, + 968181632.0, + 973395776.0, + 972841984.0, + 965128064.0, + 947619840.0, + 945285760.0, + 980244736.0, + 979232320.0, + 972242496.0, + 968526528.0, + 961141760.0, + 971945344.0, + 961514112.0, + 975681408.0, + 982502848.0, + 971588160.0, + 974645888.0, + 966941120.0, + 973488128.0, + 958184640.0, + 951346176.0, + 958820736.0, + 974529664.0, + 975623424.0, + 988260224.0, + 966642368.0, + 946606656.0, + 987313856.0, + 961603200.0, + 972827072.0, + 993334784.0, + 956973568.0, + 964533632.0, + 972627392.0, + 974839744.0, + 981789248.0, + 948171328.0, + 969399488.0, + 991472064.0, + 960616256.0, + 972474496.0, + 952595456.0, + 925109120.0, + 968372544.0, + 968064832.0, + 975109248.0, + 982653952.0, + 959342464.0, + 983058560.0, + 971739520.0, + 961757056.0, + 975478656.0, + 954294528.0, + 985396096.0, + 984114496.0, + 976023552.0, + 965210560.0, + 956236096.0, + 956499264.0, + 965890816.0, + 972277760.0, + 982332288.0, + 960553856.0, + 934424896.0, + 968267392.0, + 987247808.0, + 975718784.0, + 973757568.0, + 938969664.0, + 965516032.0, + 974022848.0, + 986853888.0, + 980466112.0, + 958550720.0, + 952015936.0, + 969878656.0, + 958279296.0, + 972604992.0, + 975836096.0, + 953564992.0, + 979066496.0, + 952399936.0, + 968564544.0, + 981480448.0, + 958236032.0, + 982074816.0, + 967049856.0, + 962132224.0, + 984581056.0, + 938472320.0, + 951162496.0, + 972205504.0, + 978641408.0, + 964497472.0, + 967210176.0, + 966715200.0, + 978138752.0, + 965499456.0, + 982062464.0, + 967014080.0, + 933283840.0, + 967528448.0, + 972387904.0, + 970224832.0, + 957721792.0, + 936020288.0, + 961665088.0, + 971708928.0, + 976050688.0, + 977412608.0, + 951679104.0, + 950734848.0, + 960669504.0, + 972341184.0, + 976244288.0, + 960160000.0, + 947311552.0, + 970428608.0, + 977004032.0, + 973598336.0, + 965952128.0, + 953631104.0, + 961531072.0, + 974096064.0, + 956493632.0, + 972419008.0, + 949921408.0, + 959389568.0, + 970915840.0, + 960707328.0, + 969883072.0, + 950362496.0, + 944046976.0, + 963459712.0, + 965798208.0, + 972922624.0, + 954916544.0, + 937705152.0, + 972928704.0, + 975403712.0, + 954521728.0, + 980202560.0, + 953754048.0, + 969368832.0, + 968118784.0, + 972525696.0, + 973869248.0, + 954355200.0, + 960220928.0, + 958779008.0, + 976038272.0, + 983960448.0, + 953832512.0, + 932845952.0, + 970799552.0, + 959723712.0, + 948840896.0, + 964909248.0, + 971099904.0, + 984398912.0, + 967586496.0, + 957644096.0, + 975620480.0, + 959153472.0, + 965121344.0, + 960652800.0, + 954977216.0, + 965387968.0, + 975576000.0, + 964114304.0, + 967188480.0, + 964494912.0, + 956100608.0, + 980596480.0, + 934784704.0, + 967844672.0, + 960337792.0, + 984000384.0, + 978473344.0, + 941712128.0, + 940852544.0, + 985462272.0, + 969591488.0, + 954145344.0, + 945101440.0, + 942309120.0, + 967699648.0, + 976427584.0, + 966555648.0, + 971402176.0, + 933462400.0, + 972825600.0, + 967395584.0, + 979034688.0, + 977268480.0, + 974676928.0, + 969167360.0, + 965481088.0, + 951445376.0, + 966911040.0, + 973285312.0, + 976264576.0, + 981931840.0, + 947635904.0, + 976055296.0, + 966089152.0, + 972691712.0, + 963840320.0, + 941326208.0, + 957706688.0, + 969287104.0, + 976068224.0, + 974985856.0, + 950714816.0, + 952715328.0, + 984747904.0, + 948829312.0, + 957722496.0, + 973929664.0, + 975078016.0, + 981288960.0, + 970005376.0, + 938006720.0, + 953810176.0, + 979296768.0, + 983487808.0, + 971574336.0, + 951608896.0, + 959221376.0, + 971856768.0, + 959724992.0, + 976883008.0, + 954026496.0, + 953275904.0, + 978716032.0, + 944000576.0, + 963460480.0, + 965799040.0, + 972922752.0, + 954891840.0, + 937705664.0, + 972915136.0, + 975415296.0, + 954533888.0, + 980177536.0, + 953753536.0, + 969381248.0, + 968142976.0, + 972537600.0, + 973831808.0, + 954343424.0, + 960220224.0, + 958790080.0, + 976038016.0, + 983972096.0, + 953807232.0, + 932920064.0, + 970799232.0, + 959723200.0, + 948864896.0, + 964884480.0, + 971112576.0, + 984398720.0, + 967574912.0, + 957656512.0, + 975620352.0, + 959166272.0, + 965121472.0, + 960652416.0, + 954976960.0, + 965413056.0, + 975551552.0, + 964127872.0, + 967163776.0, + 964531584.0, + 956100864.0, + 980619648.0, + 934836864.0, + 967758784.0, + 960241536.0, + 983953408.0, + 978476288.0, + 941728128.0, + 940770368.0, + 985442304.0, + 969497024.0, + 953989952.0, + 945055552.0, + 942263104.0, + 967777280.0, + 976334592.0, + 966571840.0, + 971235520.0, + 933357312.0, + 972671744.0, + 967401792.0, + 979026880.0, + 977224000.0, + 959510016.0, + 950832384.0, + 967844480.0, + 980699008.0, + 987294720.0, + 965245376.0, + 965616000.0, + 974624832.0, + 967250176.0, + 975755136.0, + 962569152.0, + 948885888.0, + 967623488.0, + 972936064.0, + 971688000.0, + 969572288.0, + 965403712.0, + 976450048.0, + 989531008.0, + 970944256.0, + 972181888.0, + 936046528.0, + 966370304.0, + 972200768.0, + 969336704.0, + 970431936.0, + 965288192.0, + 933166272.0, + 971364160.0, + 978929344.0, + 976331584.0, + 987327296.0, + 946889280.0, + 958214528.0, + 977118720.0, + 965547392.0, + 984612736.0, + 935114432.0, + 955475520.0, + 972495680.0, + 975402432.0, + 988739072.0, + 968162624.0, + 943416128.0, + 970848256.0, + 967395264.0, + 965988672.0, + 962378304.0, + 963967360.0, + 974648768.0, + 966361280.0, + 969268864.0, + 971763968.0, + 950869760.0, + 974286400.0, + 963961600.0, + 968563968.0, + 985711744.0, + 954805696.0, + 939713024.0, + 968065728.0, + 974649024.0, + 971763008.0, + 976928640.0, + 955428352.0, + 972459392.0, + 973543424.0, + 976338688.0, + 987718144.0, + 951488512.0, + 983185536.0, + 990421184.0, + 975663808.0, + 988157376.0, + 934474752.0, + 951868096.0, + 973459264.0, + 986276992.0, + 962858752.0, + 955511168.0, + 954075456.0, + 985214144.0, + 982672000.0, + 961882304.0, + 967703552.0, + 946924160.0, + 959210112.0, + 978227776.0, + 990556352.0, + 984685696.0, + 953637440.0, + 958717504.0, + 969459200.0, + 978187648.0, + 976566016.0, + 957046720.0, + 944725248.0, + 962267136.0, + 953223680.0, + 978039936.0, + 963942272.0, + 948576896.0, + 958290880.0, + 958961728.0, + 972314368.0, + 966842496.0, + 945102400.0, + 977229952.0, + 979577792.0, + 965203776.0, + 968379712.0, + 943055680.0, + 962542976.0, + 975157952.0, + 975780416.0, + 969534784.0, + 953857728.0, + 938889856.0, + 963288384.0, + 974510976.0, + 965829248.0, + 968128256.0, + 935808384.0, + 960364928.0, + 968863872.0, + 980017088.0, + 966261760.0, + 962851008.0, + 950767872.0, + 967333888.0, + 965313216.0, + 972419840.0, + 962101504.0, + 943887040.0, + 969182656.0, + 987024576.0, + 965374016.0, + 959152896.0, + 935401664.0, + 957712768.0, + 962500096.0, + 965676736.0, + 952943680.0, + 947405440.0, + 978652480.0, + 976018432.0, + 976036544.0, + 953411200.0, + 956691520.0, + 950750080.0, + 974362624.0, + 963357504.0, + 973498368.0, + 982044224.0, + 935211136.0, + 980957504.0, + 957125504.0, + 990201792.0, + 983980224.0, + 954271168.0, + 964047040.0, + 972504448.0, + 972838976.0, + 969950016.0, + 936930688.0, + 948201088.0, + 965381120.0, + 973615040.0, + 969120832.0, + 959927104.0, + 932712768.0, + 940376960.0, + 971462336.0, + 958116544.0, + 960537024.0, + 942766848.0, + 971615104.0, + 974988928.0, + 965281088.0, + 968122240.0, + 955333376.0, + 977009664.0, + 975159168.0, + 971173824.0, + 975966208.0, + 968325312.0, + 954160704.0, + 965048576.0, + 970862976.0, + 973944320.0, + 967710656.0, + 960264576.0, + 974721024.0, + 960673984.0, + 967960576.0, + 966799232.0, + 945622080.0, + 961106112.0, + 972691520.0, + 975854912.0, + 967126144.0, + 973556224.0, + 941441152.0, + 969625728.0, + 974827008.0, + 966048192.0, + 961113920.0, + 940970176.0, + 977645248.0, + 957299008.0, + 982105536.0, + 951640832.0, + 944700992.0, + 942565184.0, + 963256512.0, + 982825856.0, + 968952384.0, + 951149504.0, + 945334912.0, + 952500224.0, + 975310848.0, + 978003264.0, + 956148416.0, + 943718720.0, + 982953792.0, + 963869312.0, + 948198016.0, + 967967552.0, + 947703872.0, + 955879552.0, + 972025792.0, + 969435008.0, + 968857984.0, + 923345024.0, + 952490304.0, + 970841408.0, + 964098560.0, + 961509760.0, + 964125952.0, + 948952000.0, + 978255488.0, + 947187904.0, + 951233920.0, + 968771136.0, + 938644800.0, + 964122816.0, + 977037632.0, + 974097152.0, + 973905280.0, + 955970624.0, + 962578944.0, + 962210048.0, + 986755008.0, + 966615552.0, + 975372800.0, + 937327680.0, + 973089984.0, + 980755456.0, + 966728320.0, + 977002112.0, + 941108672.0, + 962906176.0, + 972684928.0, + 987922688.0, + 966439424.0, + 934246528.0, + 952499136.0, + 960191168.0, + 965288896.0, + 960432640.0, + 958084736.0, + 959877888.0, + 988470784.0, + 959201856.0, + 971490432.0, + 954907456.0, + 939653568.0, + 971027136.0, + 966186880.0, + 973723328.0, + 969431488.0, + 948177984.0, + 976689152.0, + 970478656.0, + 971602432.0, + 980754432.0, + 934142080.0, + 952305088.0, + 971836928.0, + 955838016.0, + 974878592.0, + 962123904.0, + 936078720.0, + 980041792.0, + 986885440.0, + 976687104.0, + 967537792.0, + 943618880.0, + 965750144.0, + 958588672.0, + 966946880.0, + 973014656.0, + 947739072.0, + 936636736.0, + 956736576.0, + 961392640.0, + 972654528.0, + 965664384.0, + 947980160.0, + 960048064.0, + 978128128.0, + 978347584.0, + 973416384.0, + 952793536.0, + 967907200.0, + 960362496.0, + 985349440.0, + 975228096.0, + 940725888.0, + 975590848.0, + 970927360.0, + 977941568.0, + 967524352.0, + 947491392.0, + 956494912.0, + 976898176.0, + 968660288.0, + 967410176.0, + 956779456.0, + 947900800.0, + 968135104.0, + 962090944.0, + 953876800.0, + 974625536.0, + 942612928.0, + 965259392.0, + 975841856.0, + 962702400.0, + 970134016.0, + 961016192.0, + 948337728.0, + 970443648.0, + 964928704.0, + 963995136.0, + 951901632.0, + 947745408.0, + 970828800.0, + 964468288.0, + 974607680.0, + 973945920.0, + 935287040.0, + 967770304.0, + 965945280.0, + 968131840.0, + 983505536.0, + 946878976.0, + 957194688.0, + 968776704.0, + 962673536.0, + 979175616.0, + 957675008.0, + 948040256.0, + 974693824.0, + 961418944.0, + 961560896.0, + 975510336.0, + 922718464.0, + 976531200.0, + 970026176.0, + 968694208.0, + 959724480.0, + 953878592.0, + 959786176.0, + 957711360.0, + 970779072.0, + 961660480.0, + 939856128.0, + 927031872.0, + 973463552.0, + 972118144.0, + 961870784.0, + 963176832.0, + 943803904.0, + 964440768.0, + 961923136.0, + 980821056.0, + 956335424.0, + 952625664.0, + 981711872.0, + 961568320.0, + 964838464.0, + 968888448.0, + 965878528.0, + 951641984.0, + 966636160.0, + 977746048.0, + 973747712.0, + 973595008.0, + 941317888.0, + 963372032.0, + 973433216.0, + 967439680.0, + 971444480.0, + 954296192.0, + 964910336.0, + 957482816.0, + 972145536.0, + 981093632.0, + 938479488.0, + 943498048.0, + 970730752.0, + 970503296.0, + 969970048.0, + 958199808.0, + 941458752.0, + 974803712.0, + 969307904.0, + 959485248.0, + 972349696.0, + 937029248.0, + 974571712.0, + 971851840.0, + 963073088.0, + 965308800.0, + 947386304.0, + 950749504.0, + 973501440.0, + 966506496.0, + 965451008.0, + 946476800.0, + 954803776.0, + 960677888.0, + 975553280.0, + 985697472.0, + 967333056.0, + 963199360.0, + 976725312.0, + 967323200.0, + 992654080.0, + 986509056.0, + 943164288.0, + 953127744.0, + 977015040.0, + 965123136.0, + 969399616.0, + 961335488.0, + 945745024.0, + 974935296.0, + 968672640.0, + 973500288.0, + 963175616.0, + 950083776.0, + 988846400.0, + 962997248.0, + 972118976.0, + 983144960.0, + 952239232.0, + 972193728.0, + 965854016.0, + 965680256.0, + 974366720.0, + 953477760.0, + 939308352.0, + 972329984.0, + 981523200.0, + 969490048.0, + 978188160.0, + 938694016.0, + 968842816.0, + 971686016.0, + 970983040.0, + 971740736.0, + 930656576.0, + 984600832.0, + 975402176.0, + 988961728.0, + 965132288.0, + 947274624.0, + 955488128.0, + 973385280.0, + 977471808.0, + 963681600.0, + 960927552.0, + 953023424.0, + 987824832.0, + 965590976.0, + 963980096.0, + 960733760.0, + 949200448.0, + 967588352.0, + 966703104.0, + 969066240.0, + 976370560.0, + 941354688.0, + 966642816.0, + 966047488.0, + 969355904.0, + 990853056.0, + 949508736.0, + 950438720.0, + 981411520.0, + 968631616.0, + 970798592.0, + 964391936.0, + 941445376.0, + 976633344.0, + 976118848.0, + 969960576.0, + 976733184.0, + 945326720.0, + 948413952.0, + 976370624.0, + 979899776.0, + 960155776.0, + 938561792.0, + 950783168.0, + 985888768.0, + 971780864.0, + 949372096.0, + 963471488.0, + 931262656.0, + 958901056.0, + 967674688.0, + 979127360.0, + 986270144.0, + 953936576.0, + 969436672.0, + 974246272.0, + 977548736.0, + 965172032.0, + 936836864.0, + 957858240.0, + 971553152.0, + 959909888.0, + 968876608.0, + 961975744.0, + 946142976.0, + 965700224.0, + 963430208.0, + 968827456.0, + 979429888.0, + 957229312.0, + 978127168.0, + 957243392.0, + 971896576.0, + 960394560.0, + 946881600.0, + 978391808.0, + 977875008.0, + 968016256.0, + 980692352.0, + 957090624.0, + 948157504.0, + 981413248.0, + 986890368.0, + 971801600.0, + 970665024.0, + 943533568.0, + 965002304.0, + 978120960.0, + 968204416.0, + 976235008.0, + 945316992.0, + 958159104.0, + 965148736.0, + 983245248.0, + 973754624.0, + 941476224.0, + 969210304.0, + 970490048.0, + 993738944.0, + 962900480.0, + 960056640.0, + 958914688.0, + 969335552.0, + 970590464.0, + 970878528.0, + 961918208.0, + 953020416.0, + 976659328.0, + 966341248.0, + 951200576.0, + 989766400.0, + 960174400.0, + 951518144.0, + 970880192.0, + 965541056.0, + 972397696.0, + 959394624.0, + 945449536.0, + 978595136.0, + 968110784.0, + 978378688.0, + 970272128.0, + 948860160.0, + 960002688.0, + 977512064.0, + 973280832.0, + 969158464.0, + 948459584.0, + 948905408.0, + 962331392.0, + 971808064.0, + 945258816.0, + 955597504.0, + 949455360.0, + 978680576.0, + 948608704.0, + 967549120.0, + 951794112.0, + 942391424.0, + 972997888.0, + 968783232.0, + 984962624.0, + 969719360.0, + 949466304.0, + 950536576.0, + 962350208.0, + 983682880.0, + 971230080.0, + 954590464.0, + 943698688.0, + 969232832.0, + 983961856.0, + 979398080.0, + 972813440.0, + 958214464.0, + 957435136.0, + 966599616.0, + 976473088.0, + 970263232.0, + 962195200.0, + 972318912.0, + 968081344.0, + 955904960.0, + 967072896.0, + 969866688.0, + 952741824.0, + 966310720.0, + 957955392.0, + 969470720.0, + 944493824.0, + 940694784.0, + 965002752.0, + 970607616.0, + 981817728.0, + 968428480.0, + 953521984.0, + 965388416.0, + 978347328.0, + 966850624.0, + 960573952.0, + 933111680.0, + 964919808.0, + 964675648.0, + 977765696.0, + 981308736.0, + 949135872.0, + 922461440.0, + 975057792.0, + 991951936.0, + 992210432.0, + 967401600.0, + 941656640.0, + 960320832.0, + 960569920.0, + 965271424.0, + 961143872.0, + 956793408.0, + 954331776.0, + 967170240.0, + 966033472.0, + 966194624.0, + 935685696.0, + 958637312.0, + 964073920.0, + 974766976.0, + 960167808.0, + 962641856.0, + 944531328.0, + 956464576.0, + 969817216.0, + 967020800.0, + 964316928.0, + 950040640.0, + 965108288.0, + 970707520.0, + 980185728.0, + 954227968.0, + 936634944.0, + 957711040.0, + 970087360.0, + 962822208.0, + 965236480.0, + 979863808.0, + 942509696.0, + 962691712.0, + 949963776.0, + 968699840.0, + 965042816.0, + 948904512.0, + 962331520.0, + 971807808.0, + 945271168.0, + 955597760.0, + 949480384.0, + 978692800.0, + 948633472.0, + 967562112.0, + 951806016.0, + 942356352.0, + 972985728.0, + 968772288.0, + 984974592.0, + 969719616.0, + 949453056.0, + 950524416.0, + 962338048.0, + 983694528.0, + 971254464.0, + 954589504.0, + 943710464.0, + 969159104.0, + 983937152.0, + 979348992.0, + 972752576.0, + 958227328.0, + 957435456.0, + 966537856.0, + 976399232.0, + 970201984.0, + 962133248.0, + 972320256.0, + 968094912.0, + 955942080.0, + 967048960.0, + 969793600.0, + 952729216.0, + 966249792.0, + 957856000.0, + 969446016.0, + 944469504.0, + 940671232.0, + 965039744.0, + 970583872.0, + 981854400.0, + 968454016.0, + 953595776.0, + 965265216.0, + 978310272.0, + 966863360.0, + 960635648.0, + 933161216.0, + 964981120.0, + 964736704.0, + 977729216.0, + 981369856.0, + 949234688.0, + 922472896.0, + 975021504.0, + 991977024.0, + 992198400.0, + 967413376.0, + 941779264.0, + 960358272.0, + 960520512.0, + 965234304.0, + 961217984.0, + 956818368.0, + 954282368.0, + 967119360.0, + 966021440.0, + 966181120.0, + 935647680.0, + 958686080.0, + 964085824.0, + 974692800.0, + 960190976.0, + 962678208.0, + 944629888.0, + 956341248.0, + 969866048.0, + 966983168.0, + 964305920.0, + 950040960.0, + 965118976.0, + 970683328.0, + 980246336.0, + 954313152.0, + 936660224.0, + 957858176.0, + 970259200.0, + 962982400.0, + 965395968.0, + 979852160.0, + 942571520.0, + 962814848.0, + 950123776.0, + 968750016.0, + 965141760.0, + 948823808.0, + 983623680.0, + 940748288.0, + 963410048.0, + 963626816.0, + 939562048.0, + 970609984.0, + 963134400.0, + 974736704.0, + 966612352.0, + 963030144.0, + 937643136.0, + 961444800.0, + 960444480.0, + 977787712.0, + 971132288.0, + 936175168.0, + 973130176.0, + 975083584.0, + 961572544.0, + 963151104.0, + 932620736.0, + 970335360.0, + 966968384.0, + 954511040.0, + 968624704.0, + 945154176.0, + 952199488.0, + 966007488.0, + 960615488.0, + 964620480.0, + 964969408.0, + 956707776.0, + 968735616.0, + 978274240.0, + 958711488.0, + 959233728.0, + 955382144.0, + 962608128.0, + 977112960.0, + 974712192.0, + 971333376.0, + 941660416.0, + 944241984.0, + 963605952.0, + 965682432.0, + 983975040.0, + 960205312.0, + 959871232.0, + 960268288.0, + 971132416.0, + 978150400.0, + 982862528.0, + 941887168.0, + 961903872.0, + 970491520.0, + 974277312.0, + 962182208.0, + 954811008.0, + 944578496.0, + 970455424.0, + 966991744.0, + 957494528.0, + 964056000.0, + 944423936.0, + 980454208.0, + 974465280.0, + 954423104.0, + 960839872.0, + 937270976.0, + 973537088.0, + 969785024.0, + 970211840.0, + 981995712.0, + 926688512.0, + 973748032.0, + 970980928.0, + 977698432.0, + 961616192.0, + 942736064.0, + 961197184.0, + 968038720.0, + 971284672.0, + 977131584.0, + 974754496.0, + 949533696.0, + 958820480.0, + 969128000.0, + 968374592.0, + 972292992.0, + 947686400.0, + 964750464.0, + 958587776.0, + 967742336.0, + 969104640.0, + 959429760.0, + 961944064.0, + 967365632.0, + 973772096.0, + 978696448.0, + 969170432.0, + 940230528.0, + 963335168.0, + 962181120.0, + 981141568.0, + 978090112.0, + 935244096.0, + 962151232.0, + 969541120.0, + 963164928.0, + 974049536.0, + 964759744.0, + 960492544.0, + 955703296.0, + 980971840.0, + 966296320.0, + 953239168.0, + 945131968.0, + 978993728.0, + 971790528.0, + 957845696.0, + 956807936.0, + 949911232.0, + 971328768.0, + 970873152.0, + 954099072.0, + 952163968.0, + 936398080.0, + 969994880.0, + 981506048.0, + 966612288.0, + 988070656.0, + 956919872.0, + 944482112.0, + 969565056.0, + 977715904.0, + 980382464.0, + 975873344.0, + 947583936.0, + 949577472.0, + 952022016.0, + 978221120.0, + 978280768.0, + 959719360.0, + 958698240.0, + 977777216.0, + 971708736.0, + 968023168.0, + 944388096.0, + 929667264.0, + 971642816.0, + 959842176.0, + 960068416.0, + 977488000.0, + 946279616.0, + 972871424.0, + 965121152.0, + 963813248.0, + 972704512.0, + 948418368.0, + 967054528.0, + 976690496.0, + 957752128.0, + 965221888.0, + 939264320.0, + 949405568.0, + 979472768.0, + 972559104.0, + 961187072.0, + 958784576.0, + 955768896.0, + 976584832.0, + 975012864.0, + 963368064.0, + 961595904.0, + 942477504.0, + 967543744.0, + 987212416.0, + 970426816.0, + 962507008.0, + 932487296.0, + 968146496.0, + 971241984.0, + 963397184.0, + 965990016.0, + 975485760.0, + 959697920.0, + 957662528.0, + 959848512.0, + 964331840.0, + 973422784.0, + 944137856.0, + 959017792.0, + 968962944.0, + 963458624.0, + 965024960.0, + 950269376.0, + 944364608.0, + 976819584.0, + 974035776.0, + 975248256.0, + 951434944.0, + 958625984.0, + 978308032.0, + 968245952.0, + 964074816.0, + 958005696.0, + 944474752.0, + 956913152.0, + 979996544.0, + 963568768.0, + 961635776.0, + 941341568.0, + 977417408.0, + 968409280.0, + 983728768.0, + 959474560.0, + 952618368.0, + 948522176.0, + 972658624.0, + 968114880.0, + 987826176.0, + 979746368.0, + 951888000.0, + 974990528.0, + 970640384.0, + 983833984.0, + 955228416.0, + 938310784.0, + 987369344.0, + 968621056.0, + 982585856.0, + 971603136.0, + 946924800.0, + 960180224.0, + 973838720.0, + 956210240.0, + 977756096.0, + 955286400.0, + 956882368.0, + 975506176.0, + 982850816.0, + 972406336.0, + 955346432.0, + 954768256.0, + 971891264.0, + 976223872.0, + 965384960.0, + 988329536.0, + 940920000.0, + 963516736.0, + 973791744.0, + 961151936.0, + 962630848.0, + 945259840.0, + 962798080.0, + 960549376.0, + 965974080.0, + 976438784.0, + 955598720.0, + 936489600.0, + 981645120.0, + 971192576.0, + 979336256.0, + 979060288.0, + 937376640.0, + 965843264.0, + 961182976.0, + 975227776.0, + 985569344.0, + 925643264.0, + 950198272.0, + 968529856.0, + 963685760.0, + 964228672.0, + 940943680.0, + 964576512.0, + 986008448.0, + 959602368.0, + 973525952.0, + 965438208.0, + 949892032.0, + 973680576.0, + 964967040.0, + 968299904.0, + 969289280.0, + 968079616.0, + 958577408.0, + 965750208.0, + 981167168.0, + 967182912.0, + 955320704.0, + 952202112.0, + 978290560.0, + 967783360.0, + 979566144.0, + 962871104.0, + 946183552.0, + 980836992.0, + 960626880.0, + 972459520.0, + 963098752.0, + 938030592.0, + 963154048.0, + 970648512.0, + 975693952.0, + 969214912.0, + 939156160.0, + 960843904.0, + 983181056.0, + 969683072.0, + 983899968.0, + 957171392.0, + 955291520.0, + 975634176.0, + 950389504.0, + 968456128.0, + 973664448.0, + 955240576.0, + 968927104.0, + 965345600.0, + 974902528.0, + 977416192.0, + 953380032.0, + 946584256.0, + 975541632.0, + 978207232.0, + 966041728.0, + 955186368.0, + 951993344.0, + 969656640.0, + 964069440.0, + 961641024.0, + 973128448.0, + 939283392.0, + 972562176.0, + 965967872.0, + 967518784.0, + 964891712.0, + 950547584.0, + 957620352.0, + 976627584.0, + 966624064.0, + 965923456.0, + 949839616.0, + 961386048.0, + 962042496.0, + 964597056.0, + 992649600.0, + 966484416.0, + 933762560.0, + 980412096.0, + 973889024.0, + 991910848.0, + 962221504.0, + 927516608.0, + 957914688.0, + 1003087936.0, + 969438336.0, + 994572928.0, + 957337152.0, + 945402752.0, + 973264000.0, + 963371072.0, + 970002112.0, + 978065536.0, + 932970944.0, + 977331136.0, + 974472512.0, + 966659840.0, + 980392768.0, + 948684800.0, + 978253760.0, + 964314496.0, + 974387840.0, + 974428800.0, + 960729920.0, + 961564480.0, + 974459776.0, + 971480448.0, + 964652608.0, + 966532032.0, + 954160512.0, + 968842496.0, + 974479040.0, + 955530432.0, + 979164288.0, + 933598720.0, + 969210112.0, + 970310272.0, + 989090368.0, + 976012416.0, + 944329024.0, + 958350016.0, + 966741376.0, + 974725312.0, + 964733760.0, + 950395456.0, + 937944768.0, + 986087296.0, + 967035968.0, + 968190208.0, + 968882560.0, + 942668800.0, + 958466624.0, + 967102208.0, + 968608064.0, + 974031808.0, + 955323136.0, + 945243136.0, + 966673472.0, + 959799104.0, + 961131328.0, + 950403200.0, + 958410368.0, + 985041920.0, + 962865792.0, + 951850560.0, + 963336960.0, + 955052032.0, + 973814400.0, + 973320128.0, + 970091712.0, + 983395328.0, + 941096832.0, + 970075712.0, + 985897984.0, + 960378240.0, + 968476480.0, + 946361280.0, + 955714624.0, + 961451904.0, + 984933056.0, + 970828992.0, + 962079808.0, + 931361344.0, + 963916352.0, + 968430656.0, + 970390592.0, + 979846656.0, + 943707392.0, + 961262400.0, + 970290496.0, + 971187776.0, + 970230336.0, + 948264832.0, + 953755520.0, + 967838720.0, + 969190720.0, + 973588032.0, + 971746112.0, + 927563648.0, + 975884352.0, + 967900480.0, + 950607296.0, + 968911168.0, + 952115648.0, + 971788736.0, + 967855360.0, + 974516352.0, + 966063552.0, + 951767104.0, + 963103232.0, + 973122304.0, + 959739008.0, + 958534272.0, + 974417088.0, + 954375424.0, + 974756032.0, + 956526208.0, + 971175296.0, + 973135104.0, + 956416576.0, + 960451904.0, + 978049216.0, + 963036864.0, + 983686336.0, + 945734784.0, + 955926016.0, + 976058432.0, + 968833536.0, + 972618816.0, + 927228160.0, + 958656448.0, + 980451072.0, + 968281600.0, + 983305408.0, + 962883328.0, + 936271360.0, + 980970048.0, + 980767040.0, + 978618816.0, + 983502976.0, + 934806784.0, + 966015616.0, + 965425664.0, + 977339008.0, + 978005504.0, + 947828288.0, + 946365760.0, + 967452352.0, + 977266560.0, + 966671936.0, + 977114112.0, + 945662592.0, + 960290304.0, + 975321280.0, + 961174784.0, + 969118016.0, + 941631424.0, + 967631616.0, + 970321856.0, + 960391040.0, + 957362112.0, + 942030016.0, + 968485120.0, + 971643776.0, + 965604032.0, + 959727488.0, + 945985280.0, + 945622848.0, + 972329152.0, + 973611712.0, + 966334720.0, + 949630208.0, + 935228416.0, + 964480704.0, + 964293952.0, + 974332480.0, + 970879616.0, + 935772288.0, + 961582784.0, + 966219520.0, + 962436224.0, + 984202496.0, + 972814784.0, + 954326848.0, + 962301696.0, + 967726976.0, + 977598912.0, + 967686464.0, + 940986560.0, + 960195072.0, + 970812352.0, + 968921536.0, + 960585280.0, + 948979328.0, + 962759872.0, + 965529280.0, + 974816960.0, + 952455744.0, + 957332288.0, + 953608576.0, + 977573312.0, + 965862720.0, + 956113792.0, + 950569664.0, + 941777600.0, + 969468352.0, + 966002432.0, + 958425664.0, + 975031808.0, + 937451584.0, + 964906560.0, + 981260416.0, + 969369152.0, + 972111296.0, + 952362304.0, + 976727168.0, + 964479552.0, + 969750464.0, + 959772672.0, + 944965504.0, + 961007488.0, + 963736832.0, + 979597376.0, + 960763776.0, + 972219584.0, + 942147456.0, + 960588096.0, + 959118592.0, + 975184256.0, + 969104192.0, + 952613120.0, + 971003008.0, + 966003712.0, + 968722688.0, + 981709184.0, + 958637952.0, + 942135808.0, + 969012736.0, + 956066816.0, + 961078848.0, + 970604352.0, + 959763904.0, + 955736000.0, + 962221568.0, + 968104256.0, + 967102464.0, + 945729856.0, + 967452096.0, + 977266816.0, + 966684352.0, + 977138496.0, + 945675136.0, + 960314624.0, + 975333248.0, + 961163392.0, + 969118656.0, + 941668224.0, + 967618752.0, + 970310848.0, + 960390656.0, + 957349952.0, + 942054272.0, + 968522496.0, + 971630912.0, + 965654400.0, + 959715072.0, + 945985536.0, + 945622912.0, + 972304320.0, + 973623872.0, + 966310336.0, + 949592576.0, + 935240704.0, + 964480640.0, + 964294144.0, + 974319744.0, + 970904320.0, + 935772608.0, + 961582656.0, + 966231744.0, + 962412480.0, + 984191040.0, + 972813760.0, + 954352128.0, + 962312960.0, + 967787968.0, + 977586176.0, + 967588288.0, + 940987136.0, + 960217856.0, + 970800640.0, + 968921728.0, + 960646912.0, + 948992128.0, + 962796928.0, + 965480256.0, + 974755904.0, + 952406272.0, + 957295936.0, + 953620608.0, + 977634496.0, + 965862528.0, + 956126976.0, + 950631168.0, + 941765696.0, + 969492864.0, + 965991168.0, + 958413248.0, + 975006912.0, + 937452224.0, + 964857280.0, + 981273344.0, + 969332608.0, + 972110976.0, + 952312256.0, + 976764032.0, + 964503616.0, + 969714432.0, + 959760512.0, + 944964736.0, + 960970496.0, + 963712000.0, + 979598528.0, + 960813632.0, + 972195008.0, + 942196480.0, + 960526144.0, + 959204864.0, + 975196480.0, + 969104384.0, + 952576256.0, + 971002816.0, + 966052416.0, + 968722944.0, + 981745984.0, + 958625536.0, + 942160448.0, + 969013568.0, + 956042624.0, + 961053696.0, + 970629248.0, + 959739200.0, + 955724736.0, + 962209088.0, + 968142464.0, + 967089280.0, + 945668864.0, + 960898432.0, + 977235008.0, + 969578176.0, + 951888832.0, + 950502208.0, + 968757248.0, + 975886080.0, + 981332416.0, + 964812288.0, + 943024320.0, + 940390656.0, + 973548160.0, + 965943360.0, + 966471936.0, + 959265536.0, + 921419520.0, + 966048448.0, + 972807872.0, + 968119936.0, + 973638208.0, + 950156992.0, + 942198208.0, + 956521728.0, + 957227008.0, + 974578816.0, + 964789376.0, + 947673280.0, + 958552960.0, + 969896832.0, + 973866304.0, + 963319232.0, + 946974656.0, + 970507136.0, + 974300928.0, + 968728256.0, + 967993664.0, + 944390016.0, + 973438848.0, + 966476032.0, + 966619840.0, + 948474624.0, + 949144256.0, + 952625920.0, + 968869184.0, + 966905280.0, + 969443712.0, + 953125312.0, + 950726016.0, + 963289408.0, + 967115584.0, + 959554112.0, + 961955136.0, + 949928832.0, + 962123072.0, + 974075328.0, + 964812864.0, + 968112192.0, + 935624256.0, + 965690432.0, + 975013376.0, + 972230656.0, + 983225600.0, + 950191424.0, + 941864832.0, + 968202112.0, + 959672128.0, + 963905280.0, + 970108288.0, + 938069504.0, + 956557248.0, + 974909952.0, + 970088640.0, + 985589312.0, + 950439488.0, + 971229504.0, + 960636544.0, + 973406400.0, + 963754944.0, + 958400000.0, + 955605056.0, + 980480384.0, + 978698560.0, + 959990272.0, + 998419264.0, + 955564032.0, + 963239104.0, + 962140224.0, + 967289216.0, + 967623488.0, + 939625088.0, + 992438272.0, + 974271680.0, + 959808000.0, + 979177664.0, + 945195200.0, + 970064064.0, + 978481792.0, + 981026304.0, + 979290944.0, + 947546688.0, + 936087040.0, + 969423872.0, + 980170304.0, + 982353344.0, + 967697472.0, + 941079040.0, + 976418240.0, + 974431168.0, + 971272640.0, + 978628032.0, + 947147392.0, + 957343680.0, + 972823040.0, + 973406848.0, + 975070016.0, + 966886848.0, + 936935360.0, + 1005433536.0, + 963226496.0, + 972925376.0, + 984034944.0, + 943340416.0, + 959539456.0, + 958324736.0, + 983017664.0, + 966851392.0, + 948965248.0, + 982532160.0, + 971716800.0, + 966982912.0, + 972968512.0, + 976725824.0, + 945723520.0, + 981040576.0, + 971900864.0, + 972293312.0, + 955699392.0, + 937940608.0, + 978129344.0, + 962060160.0, + 966207296.0, + 974121344.0, + 944960256.0, + 961661312.0, + 985523584.0, + 973240384.0, + 964819072.0, + 939521344.0, + 969320256.0, + 967837952.0, + 970653312.0, + 983024064.0, + 970004416.0, + 937659904.0, + 958299712.0, + 964612224.0, + 963229888.0, + 974880768.0, + 937586176.0, + 977672640.0, + 981671680.0, + 973553152.0, + 962791808.0, + 959164352.0, + 973840640.0, + 985303168.0, + 965766528.0, + 961121280.0, + 973491776.0, + 952712768.0, + 965113856.0, + 964555520.0, + 971201728.0, + 970863488.0, + 950785472.0, + 959686464.0, + 970848704.0, + 975874880.0, + 979675904.0, + 960527168.0, + 959776256.0, + 965007936.0, + 972823616.0, + 972689152.0, + 948126912.0, + 940385024.0, + 987545728.0, + 974689920.0, + 982222080.0, + 983776064.0, + 948529472.0, + 970900096.0, + 991997760.0, + 968460864.0, + 981246912.0, + 948523264.0, + 968977408.0, + 970034368.0, + 980296064.0, + 973424512.0, + 953459648.0, + 955207168.0, + 984964224.0, + 971993728.0, + 966674368.0, + 953027328.0, + 941834752.0, + 973236864.0, + 965828544.0, + 973984000.0, + 981075840.0, + 964756992.0, + 976059200.0, + 963879360.0, + 988287680.0, + 978435072.0, + 945715904.0, + 961802048.0, + 969206336.0, + 977976960.0, + 952105024.0, + 956877824.0, + 956256512.0, + 990695744.0, + 980071360.0, + 953720896.0, + 962829120.0, + 945668032.0, + 972284032.0, + 972888640.0, + 967761792.0, + 980776448.0, + 948615040.0, + 966361792.0, + 982206272.0, + 966370944.0, + 986622464.0, + 948144704.0, + 949329088.0, + 959902784.0, + 970838912.0, + 966989184.0, + 957025216.0, + 942351104.0, + 958215360.0, + 960865856.0, + 983184960.0, + 972305856.0, + 961650560.0, + 944967104.0, + 977176128.0, + 960722368.0, + 973730560.0, + 957799104.0, + 950623808.0, + 984631680.0, + 965180288.0, + 971907776.0, + 959599424.0, + 951507904.0, + 962582528.0, + 971796544.0, + 973951552.0, + 956933184.0, + 951876480.0, + 965066496.0, + 957428736.0, + 945454016.0, + 963840192.0, + 951509568.0, + 948340736.0, + 964039680.0, + 959940032.0, + 961440512.0, + 953579328.0, + 945393536.0, + 977347392.0, + 968001984.0, + 963222592.0, + 981661248.0, + 936674816.0, + 969665088.0, + 974688832.0, + 955779008.0, + 971591680.0, + 939876160.0, + 957993856.0, + 972945152.0, + 981450880.0, + 979301504.0, + 938979392.0, + 938124992.0, + 960974528.0, + 966305216.0, + 956191104.0, + 975439232.0, + 935804096.0, + 957565824.0, + 968625344.0, + 962437568.0, + 977906112.0, + 964598784.0, + 977459584.0, + 982078016.0, + 966283200.0, + 973177920.0, + 954320512.0, + 943455744.0, + 970327808.0, + 971942080.0, + 973416896.0, + 961534848.0, + 950097792.0, + 982887680.0, + 952245760.0, + 957750528.0, + 964709760.0, + 937046016.0, + 977337216.0, + 965727808.0, + 943180864.0, + 960242560.0, + 925050112.0, + 958928576.0, + 969931136.0, + 969109184.0, + 971194816.0, + 962613440.0, + 939920512.0, + 976433472.0, + 971829440.0, + 958282368.0, + 971260416.0, + 949039552.0, + 956771968.0, + 956933952.0, + 980506752.0, + 973143168.0, + 927628224.0, + 974584512.0, + 976833664.0, + 960569856.0, + 988030656.0, + 965211520.0, + 935127616.0, + 976348608.0, + 967932480.0, + 963279040.0, + 975688640.0, + 952014400.0, + 968965312.0, + 961304384.0, + 952195008.0, + 965002880.0, + 941603392.0, + 953584704.0, + 977179904.0, + 976776576.0, + 972946368.0, + 953639360.0, + 946864960.0, + 976625664.0, + 964002432.0, + 973798976.0, + 960317632.0, + 945066496.0, + 988350464.0, + 980451392.0, + 977086400.0, + 963622592.0, + 929048384.0, + 981782464.0, + 967433024.0, + 972647936.0, + 974506560.0, + 945966016.0, + 939222784.0, + 957534976.0, + 987667840.0, + 965267520.0, + 976038848.0, + 937561920.0, + 972810240.0, + 975164800.0, + 972472064.0, + 972491264.0, + 947998336.0, + 966361344.0, + 970009856.0, + 973619008.0, + 981154560.0, + 952933696.0, + 976786560.0, + 947885376.0, + 958564544.0, + 966372736.0, + 966783488.0, + 938809408.0, + 983439936.0, + 967003904.0, + 968498368.0, + 967651008.0, + 952398144.0, + 964309888.0, + 961600256.0, + 966716352.0, + 962245312.0, + 929510080.0, + 961024832.0, + 972022528.0, + 961711488.0, + 966981184.0, + 957040896.0, + 942684160.0, + 974605760.0, + 954346688.0, + 959398464.0, + 962317760.0, + 951121408.0, + 967225728.0, + 974124032.0, + 979479744.0, + 987104128.0, + 940624832.0, + 958289152.0, + 960102720.0, + 980962752.0, + 971064960.0, + 964880256.0, + 948795200.0, + 964321600.0, + 959236608.0, + 988032000.0, + 964875392.0, + 931460544.0, + 970996480.0, + 972416960.0, + 967662656.0, + 954256448.0, + 945120064.0, + 963872000.0, + 964610944.0, + 979560832.0, + 960020160.0, + 950951424.0, + 960822336.0, + 994535232.0, + 958599040.0, + 942380928.0, + 968216000.0, + 947105856.0, + 971760960.0, + 980036736.0, + 963646656.0, + 967657152.0, + 936693440.0, + 963997696.0, + 972429440.0, + 971884224.0, + 956699840.0, + 943542208.0, + 956398720.0, + 982384256.0, + 972313088.0, + 983851520.0, + 955359232.0, + 951103872.0, + 972202688.0, + 986218368.0, + 978935680.0, + 979468096.0, + 934389888.0, + 946535424.0, + 967828992.0, + 951572160.0, + 965640768.0, + 947936128.0, + 967830016.0, + 968956096.0, + 965479936.0, + 969829888.0, + 963991040.0, + 946903872.0, + 971556160.0, + 961360832.0, + 973492480.0, + 967809536.0, + 948909056.0, + 968958976.0, + 981511360.0, + 976309312.0, + 971950272.0, + 945601024.0, + 971416320.0, + 977988608.0, + 958511168.0, + 972856256.0, + 947430848.0, + 960966720.0, + 991648448.0, + 964147264.0, + 952902528.0, + 951459264.0, + 937504256.0, + 972234304.0, + 971107904.0, + 965070272.0, + 961047680.0, + 947676672.0, + 976734656.0, + 979049792.0, + 983569920.0, + 973526592.0, + 938792064.0, + 973177216.0, + 970189824.0, + 984988288.0, + 966742784.0, + 980391424.0, + 946019712.0, + 961028928.0, + 970450240.0, + 960787584.0, + 977265216.0, + 945821120.0, + 956521664.0, + 961719232.0, + 973778304.0, + 964537856.0, + 940492864.0, + 950683200.0, + 955745792.0, + 971825472.0, + 957766528.0, + 939073984.0, + 947324096.0, + 969824128.0, + 973455232.0, + 983569600.0, + 961739712.0, + 938999168.0, + 974623552.0, + 984473728.0, + 949941632.0, + 965813184.0, + 946405376.0, + 968927872.0, + 973865344.0, + 977084224.0, + 964973248.0, + 947135360.0, + 946273472.0, + 972392256.0, + 974191488.0, + 971267776.0, + 972360256.0, + 964770368.0, + 977415488.0, + 984290560.0, + 977601408.0, + 965566272.0, + 954436736.0, + 970806720.0, + 978717184.0, + 982710016.0, + 944809728.0, + 953924480.0, + 974160256.0, + 969196800.0, + 963922560.0, + 966249344.0, + 966987968.0, + 950023616.0, + 974795456.0, + 965070016.0, + 961694848.0, + 981401024.0, + 959930304.0, + 971520768.0, + 980754688.0, + 974664320.0, + 993916992.0, + 937818432.0, + 962183936.0, + 976438720.0, + 963917376.0, + 990203968.0, + 956792064.0, + 943964096.0, + 980457856.0, + 981783616.0, + 954637568.0, + 961753088.0, + 935091328.0, + 965711360.0, + 977455232.0, + 979657920.0, + 970022336.0, + 930166272.0, + 963039936.0, + 972477696.0, + 966914560.0, + 976458048.0, + 967621824.0, + 950980096.0, + 968104384.0, + 970179776.0, + 983982592.0, + 971714880.0, + 956250368.0, + 961398784.0, + 996187264.0, + 983184064.0, + 980320000.0, + 946448128.0, + 963747712.0, + 963375360.0, + 957764736.0, + 971193984.0, + 951312768.0, + 963498624.0, + 980757504.0, + 960145024.0, + 951851264.0, + 975585024.0, + 950430208.0, + 991695616.0, + 977995712.0, + 979880320.0, + 974014528.0, + 948867968.0, + 951865344.0, + 978824000.0, + 983955712.0, + 971592512.0, + 945306560.0, + 965366016.0, + 987411392.0, + 966968768.0, + 978241216.0, + 939017216.0, + 951207360.0, + 959384384.0, + 979550016.0, + 967499968.0, + 968249536.0, + 947116352.0, + 962058560.0, + 986022656.0, + 970979648.0, + 979891520.0, + 958160960.0, + 973625600.0, + 970199936.0, + 936042048.0, + 974542720.0, + 966317376.0, + 967736960.0, + 966451648.0, + 941509312.0, + 946934464.0, + 985022272.0, + 993832640.0, + 963818624.0, + 943571520.0, + 960695104.0, + 964601024.0, + 981035712.0, + 975136896.0, + 963840832.0, + 931345600.0, + 974278464.0, + 977487936.0, + 954886336.0, + 959859008.0, + 949456320.0, + 970041024.0, + 957902336.0, + 967944512.0, + 973971968.0, + 965403520.0, + 970025792.0, + 964872320.0, + 981099712.0, + 980828288.0, + 964662976.0, + 945581120.0, + 967718208.0, + 974442880.0, + 979596928.0, + 950164736.0, + 944028672.0, + 977515072.0, + 958398656.0, + 980205824.0, + 963889408.0, + 949533760.0, + 959894912.0, + 969487680.0, + 966771968.0, + 951593216.0, + 947880576.0, + 939232576.0, + 975483200.0, + 974284544.0, + 980492096.0, + 981782400.0, + 964036672.0, + 969631872.0, + 980470464.0, + 966010624.0, + 986144448.0, + 961163776.0, + 952830144.0, + 965579008.0, + 978729152.0, + 962246720.0, + 958934528.0, + 959206656.0, + 976089920.0, + 977606848.0, + 982533440.0, + 979123648.0, + 959382464.0, + 955497024.0, + 965305408.0, + 954315648.0, + 966078656.0, + 955758144.0, + 970187968.0, + 964871872.0, + 958676800.0, + 964101184.0, + 941854208.0, + 963804224.0, + 988312320.0, + 967540864.0, + 982727936.0, + 950355776.0, + 943842560.0, + 964206144.0, + 967681792.0, + 963985728.0, + 973952960.0, + 946185088.0, + 970556992.0, + 983165184.0, + 977010432.0, + 962128320.0, + 948087296.0, + 955686208.0, + 978376512.0, + 970443776.0, + 988269248.0, + 957204352.0, + 945734592.0, + 975788800.0, + 963080192.0, + 971975808.0, + 964220288.0, + 945264576.0, + 965027840.0, + 987998144.0, + 965919360.0, + 954813696.0, + 952554048.0, + 964279360.0, + 977191296.0, + 972061056.0, + 972829760.0, + 957928576.0, + 970136576.0, + 975593536.0, + 964497088.0, + 966447616.0, + 991256576.0, + 946500160.0, + 969212992.0, + 974068416.0, + 974737024.0, + 974696768.0, + 953009024.0, + 970906496.0, + 977557248.0, + 967094592.0, + 976999296.0, + 958727872.0, + 962367936.0, + 969229184.0, + 978496960.0, + 978030848.0, + 972421888.0, + 943807296.0, + 976963264.0, + 975615744.0, + 967325888.0, + 977507776.0, + 943290176.0, + 975217408.0, + 982035392.0, + 968135040.0, + 977053632.0, + 945691264.0, + 948240960.0, + 962050432.0, + 968998144.0, + 973971456.0, + 952708352.0, + 940449408.0, + 959019968.0, + 969570880.0, + 966268352.0, + 963873344.0, + 941665664.0, + 972860800.0, + 975966528.0, + 975972416.0, + 974561152.0, + 950864512.0, + 958872064.0, + 973215040.0, + 960422528.0, + 965365824.0, + 975283840.0, + 974657280.0, + 975230720.0, + 971587072.0, + 972595392.0, + 957307456.0, + 958130496.0, + 985611072.0, + 962916096.0, + 959985664.0, + 969508096.0, + 953627840.0, + 954611712.0, + 968197440.0, + 982575296.0, + 983971712.0, + 958382656.0, + 957011328.0, + 968621760.0, + 977772928.0, + 998082496.0, + 950702016.0, + 941183360.0, + 978453760.0, + 991169664.0, + 981956224.0, + 975812992.0, + 938254912.0, + 939543296.0, + 971699072.0, + 973283392.0, + 971791808.0, + 947605632.0, + 953044544.0, + 959760256.0, + 967676480.0, + 974007296.0, + 964421184.0, + 951546560.0, + 981677760.0, + 972406784.0, + 970918080.0, + 968455232.0, + 939796992.0, + 973474048.0, + 960469952.0, + 976404352.0, + 966766336.0, + 946603136.0, + 975151360.0, + 971010688.0, + 969470912.0, + 951497216.0, + 947792768.0, + 958404160.0, + 975930752.0, + 975256704.0, + 962240064.0, + 977318272.0, + 930841024.0, + 960398592.0, + 968235712.0, + 967766272.0, + 985691520.0, + 955201024.0, + 960639616.0, + 978853184.0, + 987039808.0, + 978473792.0, + 966890048.0, + 944261120.0, + 963210752.0, + 975936000.0, + 974629632.0, + 970267712.0, + 937977728.0, + 962709760.0, + 981735744.0, + 962920832.0, + 967363200.0, + 952978240.0, + 972963904.0, + 971441536.0, + 971740672.0, + 962539584.0, + 939496000.0, + 977551808.0, + 981093568.0, + 975887936.0, + 972821696.0, + 961747328.0, + 945311552.0, + 967977024.0, + 969105664.0, + 980798848.0, + 966242944.0, + 949797760.0, + 983714816.0, + 970833920.0, + 945755200.0, + 967193728.0, + 961072960.0, + 956049344.0, + 979794496.0, + 979409536.0, + 955583360.0, + 948328320.0, + 945778432.0, + 971047360.0, + 973259072.0, + 972804544.0, + 970561536.0, + 942075648.0, + 957838336.0, + 967917248.0, + 963194752.0, + 968840832.0, + 948597440.0, + 963875776.0, + 971170816.0, + 976631872.0, + 969871552.0, + 946608768.0, + 950977728.0, + 952536384.0, + 966601472.0, + 972600896.0, + 975292352.0, + 959579648.0, + 973327872.0, + 964020992.0, + 955797888.0, + 968677632.0, + 956773120.0, + 965181312.0, + 968129920.0, + 972135936.0, + 951427968.0, + 955557184.0, + 948683008.0, + 981261312.0, + 971077056.0, + 971476992.0, + 950447680.0, + 940191872.0, + 970275584.0, + 983097728.0, + 965519488.0, + 968750784.0, + 938610752.0, + 969587456.0, + 990765376.0, + 966579200.0, + 968761920.0, + 949857408.0, + 948727552.0, + 969548608.0, + 969403200.0, + 990644480.0, + 956706304.0, + 950657024.0, + 966418112.0, + 956336960.0, + 982417664.0, + 965955328.0, + 952908608.0, + 963553920.0, + 972780928.0, + 956650688.0, + 965306048.0, + 941660096.0, + 975323392.0, + 966645952.0, + 969169728.0, + 972649984.0, + 952689984.0, + 941567872.0, + 971130496.0, + 964185920.0, + 967277056.0, + 953614592.0, + 941154880.0, + 975814336.0, + 971055232.0, + 966501440.0, + 966091072.0, + 945412544.0, + 962485952.0, + 976194048.0, + 963347008.0, + 978652800.0, + 955002496.0, + 940073856.0, + 971572480.0, + 954295040.0, + 955446400.0, + 951189824.0, + 950797120.0, + 977538752.0, + 973928576.0, + 956914048.0, + 955816960.0, + 950760320.0, + 964704512.0, + 970046208.0, + 981827008.0, + 986930624.0, + 959809280.0, + 949219008.0, + 971119360.0, + 966263488.0, + 990474432.0, + 960558656.0, + 957388992.0, + 974515968.0, + 973033600.0, + 967214848.0, + 964596992.0, + 957578368.0, + 971036800.0, + 961374784.0, + 961208576.0, + 967172672.0, + 938409344.0, + 974357888.0, + 978312384.0, + 952390400.0, + 969554304.0, + 953537472.0, + 943304960.0, + 987164928.0, + 982131200.0, + 979497856.0, + 957850240.0, + 929631232.0, + 975801408.0, + 980536896.0, + 981911744.0, + 977005312.0, + 937022720.0, + 968465728.0, + 976653760.0, + 980911808.0, + 967166400.0, + 947136960.0, + 958265536.0, + 959514112.0, + 966745216.0, + 958495744.0, + 975898752.0, + 934748992.0, + 956127744.0, + 968496960.0, + 976967936.0, + 975069120.0, + 957240064.0, + 971066816.0, + 957350272.0, + 971126272.0, + 977724992.0, + 947334400.0, + 971159040.0, + 968806464.0, + 975622528.0, + 977878912.0, + 963468544.0, + 944571840.0, + 962560704.0, + 981287808.0, + 979527168.0, + 957787712.0, + 939059136.0, + 968560320.0, + 980471168.0, + 976077120.0, + 972815104.0, + 954501120.0, + 965551424.0, + 976883648.0, + 986746304.0, + 969805632.0, + 962991360.0, + 947270784.0, + 985450368.0, + 964469056.0, + 966434240.0, + 957432448.0, + 942539968.0, + 974965120.0, + 956821568.0, + 965717184.0, + 967280000.0, + 950170176.0, + 959438528.0, + 958206848.0, + 978313664.0, + 971045696.0, + 928319296.0, + 949907840.0, + 970086144.0, + 971957312.0, + 970379520.0, + 977625600.0, + 961762432.0, + 974383168.0, + 950196736.0, + 956589056.0, + 952494016.0, + 952963392.0, + 966447104.0, + 964753792.0, + 977809344.0, + 966891072.0, + 952598016.0, + 972681600.0, + 971991616.0, + 968174080.0, + 985592512.0, + 954238016.0, + 947522816.0, + 974858176.0, + 964008704.0, + 968909888.0, + 961032960.0, + 936363328.0, + 975954624.0, + 981625152.0, + 994467392.0, + 961430912.0, + 934254912.0, + 964986752.0, + 973942400.0, + 967812352.0, + 968865536.0, + 969067456.0, + 943684928.0, + 968590976.0, + 963968000.0, + 971096768.0, + 957842560.0, + 936353728.0, + 975125632.0, + 956620672.0, + 972860480.0, + 969087808.0, + 959302080.0, + 967400512.0, + 959865216.0, + 962037952.0, + 967559040.0, + 957276480.0, + 971644800.0, + 960631424.0, + 962179072.0, + 966372992.0, + 940861824.0, + 947034432.0, + 978493568.0, + 980964032.0, + 973897152.0, + 957368704.0, + 950063808.0, + 962376128.0, + 972362176.0, + 974479680.0, + 964861760.0, + 947381952.0, + 960873984.0, + 986188352.0, + 986653760.0, + 968953664.0, + 961995392.0, + 937458432.0, + 976422848.0, + 987054272.0, + 976164672.0, + 966153088.0, + 940852864.0, + 966988864.0, + 972323200.0, + 984427072.0, + 966263808.0, + 960219712.0, + 962609600.0, + 972372800.0, + 965145600.0, + 983216384.0, + 959191616.0, + 952017408.0, + 984503744.0, + 972381760.0, + 970925888.0, + 983927808.0, + 955741696.0, + 964807360.0, + 962663488.0, + 971010688.0, + 970122432.0, + 940264320.0, + 981067008.0, + 972853248.0, + 951584448.0, + 979899008.0, + 935559552.0, + 952981376.0, + 981274304.0, + 975550528.0, + 974468864.0, + 964181056.0, + 955264064.0, + 971069184.0, + 967755712.0, + 969733312.0, + 988884352.0, + 952290752.0, + 965828288.0, + 974338560.0, + 988345344.0, + 975821120.0, + 938162624.0, + 949911744.0, + 972447872.0, + 978433408.0, + 962590848.0, + 957763136.0, + 939956288.0, + 979724096.0, + 973045824.0, + 977720064.0, + 953820544.0, + 953856128.0, + 973335232.0, + 979608192.0, + 974271872.0, + 980235776.0, + 954629952.0, + 979941952.0, + 976590080.0, + 966590592.0, + 974112512.0, + 964212800.0, + 945699456.0, + 969678976.0, + 955055424.0, + 971074752.0, + 966606144.0, + 952254144.0, + 971029952.0, + 962129024.0, + 962424832.0, + 970693376.0, + 952322688.0, + 958444608.0, + 972927872.0, + 968439296.0, + 966921856.0, + 950682816.0, + 951214208.0, + 973332928.0, + 992733952.0, + 971332224.0, + 976244288.0, + 954827392.0, + 963696832.0, + 970490880.0, + 981176320.0, + 964407872.0, + 934496832.0, + 975446144.0, + 962134144.0, + 976278912.0, + 980446144.0, + 948943552.0, + 962128320.0, + 954484032.0, + 969118272.0, + 956123968.0, + 962984768.0, + 954831104.0, + 954525824.0, + 978705344.0, + 973173888.0, + 977060416.0, + 949226304.0, + 972265280.0, + 979094720.0, + 968731776.0, + 960491584.0, + 949041920.0, + 978376384.0, + 971173888.0, + 954371712.0, + 961983744.0, + 951910720.0, + 952841920.0, + 990631168.0, + 972559168.0, + 959304832.0, + 971809536.0, + 942383168.0, + 965231104.0, + 974464640.0, + 981380224.0, + 958910912.0, + 957755456.0, + 970527680.0, + 976583872.0, + 970140992.0, + 968759104.0, + 965870848.0, + 960037696.0, + 969258816.0, + 954101824.0, + 982122048.0, + 960570496.0, + 945480384.0, + 964073024.0, + 985843840.0, + 973869440.0, + 970753088.0, + 941949824.0, + 958639936.0, + 971054208.0, + 976788544.0, + 981324224.0, + 953916544.0, + 968612352.0, + 971466112.0, + 965251200.0, + 982072064.0, + 964857152.0, + 936473728.0, + 989248384.0, + 975082304.0, + 956401728.0, + 959035776.0, + 959967424.0, + 974521216.0, + 964208128.0, + 974205568.0, + 966699008.0, + 948480000.0, + 957248448.0, + 972548992.0, + 967801280.0, + 959898688.0, + 959591616.0, + 945049280.0, + 976845568.0, + 974118720.0, + 965360896.0, + 970676544.0, + 956050432.0, + 973345088.0, + 971380544.0, + 977565440.0, + 972866368.0, + 946684352.0, + 954313024.0, + 956690304.0, + 967831104.0, + 980876544.0, + 956017472.0, + 951929920.0, + 952476416.0, + 971459456.0, + 965668800.0, + 973300480.0, + 935285760.0, + 965915712.0, + 963629632.0, + 981445312.0, + 974570240.0, + 939342400.0, + 958580288.0, + 975360320.0, + 963977280.0, + 967263616.0, + 950951936.0, + 952195008.0, + 991103360.0, + 966405504.0, + 967564288.0, + 962578432.0, + 950104448.0, + 968568384.0, + 981835264.0, + 968462592.0, + 965158400.0, + 947679296.0, + 976035520.0, + 957253568.0, + 967911040.0, + 956425984.0, + 955563840.0, + 961449024.0, + 969612288.0, + 967868416.0, + 965920512.0, + 956017536.0, + 936955008.0, + 956162496.0, + 958886656.0, + 985937472.0, + 961879680.0, + 927042112.0, + 962634688.0, + 960232192.0, + 970858112.0, + 961795136.0, + 945729600.0, + 964316544.0, + 962578880.0, + 976056064.0, + 968943744.0, + 954059968.0, + 952211520.0, + 965631808.0, + 984753216.0, + 978760896.0, + 993282944.0, + 950888576.0, + 976827968.0, + 972381504.0, + 942402944.0, + 964386496.0, + 929799296.0, + 951978240.0, + 967774784.0, + 976081344.0, + 968537152.0, + 956775168.0, + 957879360.0, + 970892800.0, + 972498240.0, + 967353920.0, + 980255872.0, + 940492160.0, + 980501248.0, + 954292736.0, + 966397696.0, + 963227584.0, + 954642240.0, + 963893568.0, + 974775808.0, + 983215168.0, + 977195136.0, + 951423424.0, + 956400384.0, + 963034880.0, + 974961216.0, + 971533504.0, + 962922752.0, + 950258048.0, + 975637824.0, + 957381376.0, + 969819264.0, + 980625664.0, + 946940992.0, + 960805248.0, + 962985728.0, + 964744704.0, + 969727040.0, + 949888896.0, + 972552320.0, + 960921536.0, + 970367104.0, + 978911296.0, + 947163200.0, + 934725952.0, + 968431424.0, + 967967424.0, + 959508992.0, + 959371648.0, + 964364416.0, + 960264960.0, + 991169664.0, + 971080192.0, + 952040128.0, + 938593728.0, + 963616704.0, + 962915776.0, + 971360640.0, + 982134016.0, + 964101120.0, + 951304960.0, + 950762368.0, + 972821504.0, + 961204928.0, + 988112064.0, + 942493184.0, + 979211392.0, + 957885760.0, + 986932800.0, + 970256576.0, + 939706496.0, + 959901120.0, + 958390080.0, + 964506688.0, + 977971904.0, + 961017216.0, + 957600000.0, + 974556672.0, + 952241536.0, + 966172672.0, + 971275840.0, + 954361856.0, + 973363648.0, + 980531904.0, + 969458432.0, + 966105792.0, + 935105472.0, + 984801216.0, + 969606464.0, + 961833088.0, + 966787008.0, + 928877632.0, + 966106880.0, + 971920768.0, + 972951680.0, + 970035072.0, + 935989184.0, + 967732800.0, + 974979584.0, + 979531072.0, + 976741184.0, + 965059840.0, + 935687872.0, + 960361088.0, + 966069056.0, + 973935680.0, + 965925248.0, + 945275712.0, + 976974144.0, + 964358144.0, + 975030272.0, + 977392256.0, + 963665920.0, + 964441664.0, + 973377024.0, + 966946176.0, + 976391488.0, + 954897472.0, + 956866240.0, + 971802688.0, + 968992256.0, + 955209856.0, + 992473408.0, + 944883584.0, + 970791744.0, + 956528576.0, + 965104256.0, + 968989952.0, + 956756928.0, + 942567424.0, + 977942400.0, + 968843072.0, + 975863936.0, + 962286592.0, + 946168640.0, + 976761472.0, + 973547968.0, + 966428032.0, + 968753280.0, + 941782848.0, + 981568896.0, + 970650112.0, + 962541312.0, + 958340224.0, + 941857984.0, + 953693376.0, + 971379968.0, + 971345344.0, + 982117120.0, + 943655424.0, + 931906176.0, + 967732992.0, + 982273920.0, + 969781568.0, + 970274816.0, + 937666816.0, + 978240704.0, + 962954240.0, + 959127104.0, + 970177088.0, + 957474816.0, + 956872576.0, + 957039936.0, + 967306624.0, + 958317248.0, + 966183808.0, + 964544192.0, + 964224640.0, + 964107264.0, + 964336768.0, + 961554688.0, + 936262784.0, + 983615040.0, + 978130176.0, + 952057216.0, + 956989952.0, + 935590848.0, + 965808384.0, + 967137984.0, + 975077312.0, + 982873664.0, + 941896384.0, + 967565760.0, + 964772160.0, + 964473024.0, + 973621440.0, + 959005760.0, + 952679040.0, + 958533056.0, + 967776576.0, + 973948352.0, + 964684608.0, + 941531456.0, + 967012864.0, + 978867712.0, + 979581120.0, + 967211712.0, + 944920640.0, + 955282240.0, + 986541760.0, + 953864896.0, + 966455168.0, + 953371904.0, + 954932480.0, + 979053888.0, + 963094080.0, + 982322816.0, + 969551296.0, + 951063616.0, + 980398208.0, + 968261440.0, + 975850688.0, + 961723520.0, + 941625920.0, + 966718784.0, + 976810368.0, + 961055040.0, + 949607744.0, + 951856064.0, + 949875648.0, + 968905344.0, + 959880128.0, + 953734528.0, + 969506368.0, + 944838912.0, + 951733312.0, + 982731328.0, + 979609024.0, + 964157632.0, + 939245632.0, + 979957696.0, + 974389504.0, + 979366144.0, + 960522112.0, + 943308160.0, + 964122240.0, + 976184000.0, + 978814336.0, + 964108864.0, + 949786048.0, + 946045504.0, + 969010816.0, + 969111616.0, + 971748352.0, + 980925824.0, + 943806784.0, + 959547520.0, + 968200320.0, + 967044224.0, + 975558272.0, + 954056000.0, + 959260416.0, + 958877120.0, + 972952960.0, + 970241728.0, + 967678400.0, + 932853888.0, + 972036416.0, + 971098624.0, + 959330944.0, + 958193856.0, + 949360192.0, + 992170560.0, + 971812800.0, + 963243648.0, + 964975104.0, + 961604480.0, + 955190720.0, + 981019136.0, + 972152448.0, + 984506624.0, + 971607616.0, + 944088832.0, + 970249344.0, + 979284608.0, + 974471488.0, + 968311872.0, + 940442560.0, + 965120768.0, + 971700928.0, + 976549184.0, + 961977920.0, + 951775168.0, + 962387776.0, + 959103488.0, + 984542784.0, + 966540032.0, + 945429760.0, + 960994688.0, + 975391424.0, + 969736512.0, + 966153408.0, + 969823616.0, + 948580608.0, + 992408512.0, + 971539072.0, + 979703936.0, + 982296128.0, + 956905920.0, + 992412480.0, + 969729088.0, + 962617472.0, + 960805632.0, + 951380928.0, + 956359424.0, + 976190080.0, + 966485312.0, + 971786240.0, + 979065536.0, + 964077952.0, + 974641792.0, + 968888128.0, + 968237696.0, + 963236864.0, + 953285312.0, + 965282176.0, + 981066880.0, + 968741888.0, + 972894400.0, + 942543232.0, + 970599680.0, + 964458624.0, + 985496256.0, + 980776640.0, + 955896832.0, + 962174912.0, + 961911616.0, + 970182528.0, + 966946176.0, + 968785216.0, + 948882816.0, + 965135168.0, + 967639040.0, + 978747776.0, + 986414592.0, + 939405952.0, + 979846208.0, + 970650752.0, + 968850368.0, + 981602240.0, + 961640512.0, + 946454656.0, + 973582016.0, + 964789632.0, + 961473600.0, + 968343040.0, + 949969984.0, + 971928448.0, + 971314880.0, + 959104192.0, + 963365952.0, + 952575168.0, + 965523456.0, + 965695552.0, + 960338624.0, + 962264512.0, + 944583936.0, + 960016064.0, + 977076800.0, + 967023296.0, + 966516672.0, + 962657408.0, + 958591808.0, + 974540544.0, + 979476288.0, + 972672384.0, + 980966272.0, + 944032128.0, + 985064960.0, + 964441984.0, + 972774784.0, + 983797056.0, + 934553472.0, + 955930688.0, + 964994944.0, + 969340992.0, + 968677632.0, + 970268736.0, + 938323648.0, + 971731968.0, + 964198592.0, + 989328320.0, + 971778368.0, + 940107776.0, + 981913536.0, + 985520640.0, + 981864576.0, + 985729984.0, + 947180480.0, + 967223616.0, + 977136576.0, + 971312512.0, + 958620160.0, + 937219776.0, + 969906944.0, + 989536000.0, + 959000256.0, + 967185536.0, + 960442048.0, + 956690944.0, + 954768512.0, + 979761216.0, + 979127616.0, + 975673216.0, + 940251584.0, + 978542400.0, + 963822208.0, + 973824384.0, + 967194880.0, + 960176960.0, + 957544256.0, + 982852288.0, + 977266432.0, + 970722944.0, + 959931264.0, + 936303360.0, + 973506432.0, + 965473792.0, + 976978304.0, + 966051072.0, + 942156480.0, + 969740800.0, + 959723968.0, + 969843584.0, + 975390976.0, + 950899648.0, + 964472448.0, + 969880448.0, + 974273856.0, + 972444416.0, + 969481856.0, + 934030208.0, + 961135360.0, + 965749312.0, + 978463168.0, + 975870528.0, + 949311936.0, + 956377408.0, + 972544384.0, + 963309376.0, + 986862592.0, + 940750656.0, + 951446464.0, + 981448640.0, + 959576512.0, + 962948800.0, + 949163904.0, + 969286016.0, + 983874240.0, + 978678208.0, + 959104960.0, + 959925504.0, + 948311552.0, + 971295040.0, + 971687872.0, + 978941632.0, + 978186304.0, + 947857024.0, + 957856448.0, + 967444608.0, + 970969024.0, + 959270016.0, + 934897792.0, + 971406144.0, + 966228160.0, + 955068416.0, + 971829696.0, + 957611392.0, + 926622400.0, + 962200704.0, + 940676352.0, + 973363968.0, + 981754112.0, + 930013504.0, + 981521664.0, + 946296832.0, + 972613184.0, + 960888896.0, + 939908608.0, + 957204992.0, + 968745920.0, + 990500928.0, + 957769536.0, + 953502464.0, + 933075520.0, + 973375488.0, + 976109888.0, + 963894784.0, + 973716224.0, + 937010496.0, + 972700224.0, + 976633600.0, + 953237888.0, + 958961216.0, + 947493376.0, + 963495296.0, + 979683584.0, + 970673152.0, + 978585984.0, + 939018240.0, + 934343744.0, + 966421952.0, + 968439104.0, + 968599616.0, + 970528704.0, + 944416832.0, + 970915456.0, + 960240640.0, + 969255936.0, + 942678400.0, + 941709632.0, + 975365504.0, + 985196672.0, + 959022912.0, + 975623744.0, + 966364032.0, + 933010944.0, + 969849536.0, + 963707136.0, + 967841984.0, + 981740224.0, + 953160896.0, + 973561024.0, + 964611328.0, + 979193600.0, + 982682432.0, + 956872704.0, + 955965312.0, + 975309760.0, + 980521408.0, + 964034688.0, + 938742208.0, + 955473024.0, + 948317184.0, + 978696064.0, + 959955648.0, + 949428096.0, + 948040896.0, + 972056256.0, + 989929088.0, + 944271296.0, + 958224128.0, + 948599552.0, + 960164032.0, + 980739840.0, + 970414080.0, + 973131264.0, + 930045120.0, + 975961728.0, + 962432320.0, + 971418304.0, + 964445760.0, + 960394368.0, + 959721216.0, + 960424128.0, + 967191360.0, + 968555008.0, + 972729728.0, + 948144000.0, + 979811648.0, + 970586368.0, + 969357120.0, + 986145728.0, + 939891008.0, + 954734464.0, + 964696896.0, + 966558080.0, + 964827328.0, + 954691904.0, + 960255424.0, + 967464320.0, + 966189312.0, + 960184064.0, + 962468864.0, + 944331776.0, + 971257408.0, + 972093376.0, + 983859200.0, + 968536000.0, + 930046272.0, + 972099840.0, + 955244992.0, + 985020032.0, + 962813824.0, + 950037568.0, + 970445504.0, + 963828672.0, + 975663424.0, + 959841152.0, + 950675072.0, + 946751104.0, + 968549632.0, + 976556992.0, + 965479040.0, + 975826304.0, + 937158336.0, + 970600256.0, + 983125440.0, + 959760960.0, + 971615232.0, + 951297536.0, + 972050240.0, + 972057024.0, + 955729664.0, + 970269312.0, + 951941056.0, + 956084992.0, + 969265792.0, + 962445888.0, + 975414848.0, + 976239424.0, + 945387776.0, + 971847360.0, + 960410432.0, + 971381056.0, + 968721024.0, + 965416832.0, + 974304512.0, + 967333504.0, + 963842944.0, + 956424128.0, + 941470848.0, + 954526208.0, + 965194368.0, + 977462336.0, + 976274944.0, + 964775808.0, + 921698624.0, + 977154752.0, + 967402048.0, + 969439744.0, + 958520320.0, + 955023808.0, + 969488384.0, + 982145088.0, + 971976064.0, + 962958144.0, + 940867072.0, + 967438208.0, + 958047424.0, + 975604672.0, + 984077312.0, + 973499072.0, + 941667584.0, + 981995008.0, + 967646144.0, + 957658688.0, + 973554048.0, + 954348736.0, + 972350976.0, + 957113792.0, + 977383232.0, + 977440768.0, + 938197248.0, + 968629248.0, + 969027584.0, + 963562688.0, + 964570880.0, + 941094400.0, + 942868928.0, + 976054720.0, + 964561152.0, + 966265728.0, + 955035264.0, + 961535808.0, + 977594944.0, + 965112896.0, + 962419584.0, + 956202176.0, + 932968768.0, + 954615168.0, + 971031168.0, + 969518720.0, + 973900544.0, + 933633280.0, + 957480256.0, + 973260352.0 + ] + }, + "mem-allocated-bytes": { + "start_step": 0, + "end_step": 53183, + "step_interval": 5, + "values": [ + 12697244672.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697245696.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0, + 12697444352.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 538, + "step_interval": 5, + "values": [ + 3.57882, + 3.46053, + 3.44071, + 3.42167, + 3.41557, + 3.41167, + 3.40639, + 3.70469, + 3.40614, + 3.40461, + 3.40418, + 3.40457, + 3.4058, + 3.40552, + 3.40432, + 3.40132, + 3.39974, + 3.3997, + 3.39899, + 3.39892, + 3.40303, + 3.40219, + 3.4023, + 3.40694, + 3.40754, + 3.40621, + 3.40622, + 3.4068, + 3.40662, + 3.40558, + 3.40207, + 3.40601, + 3.40247, + 3.40246, + 3.40214, + 3.39978, + 3.40364, + 3.4028, + 3.41529, + 3.41488, + 3.41506, + 3.41612, + 3.4147, + 3.41362, + 3.41415, + 3.41328, + 3.40772, + 3.40883, + 3.40722, + 3.40638, + 3.40584, + 3.40696, + 3.40764, + 3.40703, + 3.40757, + 3.40934, + 3.40798, + 3.41966, + 3.40136, + 3.4013, + 3.40199, + 3.39865, + 3.39971, + 3.3997, + 3.39925, + 3.3985, + 3.3998, + 3.39822, + 3.39886, + 3.39721, + 7.76452, + 3.40286, + 3.3966, + 3.39748, + 3.39707, + 3.3953, + 3.39593, + 3.39593, + 3.39676, + 3.40901, + 3.40664, + 3.40628, + 3.40597, + 3.40474, + 3.40642, + 3.40886, + 3.47945, + 3.48178, + 3.48155, + 3.48108, + 3.48205, + 3.48135, + 3.48201, + 3.59385, + 3.48346, + 3.48397, + 3.48308, + 3.48148, + 3.48175, + 3.48116, + 3.48024, + 3.4036, + 3.40301, + 3.40493, + 3.40385, + 3.40345, + 3.40351, + 3.40362 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml index bf8879215..b600345a9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml @@ -6,9 +6,7 @@ ENV_VARS: NVTE_BWD_LAYERNORM_SM_MARGIN: 16 NCCL_P2P_NET_CHUNKSIZE: 2097152 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - TEST_TYPE: "release" - MODEL_ARGS: # Distributed args --distributed-timeout-minutes: 60 @@ -17,7 +15,6 @@ MODEL_ARGS: --use-distributed-optimizer: true --overlap-grad-reduce: true --overlap-param-gather: true - # Training args --use-mcore-models: true --sequence-parallel: true @@ -27,10 +24,8 @@ MODEL_ARGS: --global-batch-size: 1152 --train-samples: 19531250 --manual-gc: true - # Transformer Engine args --transformer-impl: transformer_engine - # Data args --data-cache-path: ${DATA_CACHE_PATH} --tokenizer-type: GPTSentencePieceTokenizer @@ -39,12 +34,11 @@ MODEL_ARGS: --split: 99,1,0 --no-mmap-bin-files: true --num-workers: 6 - # Add network size args --apply-layernorm-1p: true --untie-embeddings-and-output-weights: true - --no-position-embedding: true - --use-rotary-position-embeddings: true + --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container --rotary-percent: 0.5 --squared-relu: true --num-layers: 32 @@ -54,13 +48,11 @@ MODEL_ARGS: --num-query-groups: 8 --seq-length: 4096 --max-position-embeddings: 4096 - # Add regularization args --attention-dropout: 0.0 --hidden-dropout: 0.0 --clip-grad: 1.0 --weight-decay: 0.1 - # Add learning rate args --lr-decay-samples: 1949218748 --lr-warmup-samples: 3906252 @@ -71,19 +63,15 @@ MODEL_ARGS: --lr-decay-style: cosine --adam-beta1: 0.9 --adam-beta2: 0.95 - # Add validation args --eval-iters: 32 --eval-interval: 2000 - # Add checkpointing args --load: ${OUTPUT_PATH}/checkpoints --save: ${OUTPUT_PATH}/checkpoints - --save-interval: 500 - + --save-interval: 5000 # Add initialization args --init-method-std: 0.0134 - # Add logging args --log-timers-to-tensorboard: true --log-memory-to-tensorboard: true @@ -95,6 +83,5 @@ MODEL_ARGS: --tensorboard-dir: ${OUTPUT_PATH}/tensorboard --wandb-project: megatron-core-release-runs --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args - --bf16: true \ No newline at end of file + --bf16: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/model_config.yaml index 9453db100..418a7ed8d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/model_config.yaml @@ -6,9 +6,7 @@ ENV_VARS: NVTE_BWD_LAYERNORM_SM_MARGIN: 16 NCCL_P2P_NET_CHUNKSIZE: 2097152 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - TEST_TYPE: "release" - MODEL_ARGS: # Distributed args --distributed-timeout-minutes: 60 @@ -17,7 +15,6 @@ MODEL_ARGS: --use-distributed-optimizer: true --overlap-grad-reduce: true --overlap-param-gather: true - # Training args --use-mcore-models: true --sequence-parallel: true @@ -27,10 +24,8 @@ MODEL_ARGS: --global-batch-size: 1152 --train-samples: 4882812 --manual-gc: true - # Transformer Engine args --transformer-impl: transformer_engine - # Data args --data-cache-path: ${DATA_CACHE_PATH} --tokenizer-type: GPTSentencePieceTokenizer @@ -39,12 +34,11 @@ MODEL_ARGS: --split: 99,1,0 --no-mmap-bin-files: true --num-workers: 6 - # Add network size args --apply-layernorm-1p: true --untie-embeddings-and-output-weights: true - --no-position-embedding: true - --use-rotary-position-embeddings: true + --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container --rotary-percent: 0.5 --squared-relu: true --num-layers: 32 @@ -54,13 +48,11 @@ MODEL_ARGS: --num-query-groups: 8 --seq-length: 4096 --max-position-embeddings: 4096 - # Add regularization args --attention-dropout: 0.0 --hidden-dropout: 0.0 --clip-grad: 1.0 --weight-decay: 0.1 - # Add learning rate args --lr-decay-samples: 1949218748 --lr-warmup-samples: 3906252 @@ -71,19 +63,15 @@ MODEL_ARGS: --lr-decay-style: cosine --adam-beta1: 0.9 --adam-beta2: 0.95 - # Add validation args --eval-iters: 32 --eval-interval: 2000 - # Add checkpointing args --load: ${OUTPUT_PATH}/checkpoints --save: ${OUTPUT_PATH}/checkpoints - --save-interval: 500 - + --save-interval: 1000 # Add initialization args --init-method-std: 0.0134 - # Add logging args --log-timers-to-tensorboard: true --log-memory-to-tensorboard: true @@ -95,6 +83,5 @@ MODEL_ARGS: --tensorboard-dir: ${OUTPUT_PATH}/tensorboard --wandb-project: megatron-core-release-runs --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args - --bf16: true \ No newline at end of file + --bf16: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json new file mode 100644 index 000000000..ce02aad6c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.84013, + 10.8726, + 10.85028, + 10.7965, + 10.68165, + 10.60635, + 10.12791, + 10.22204, + 10.13807, + 9.82329 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1715.0, + 1828.0, + 1929.0, + 2000.0, + 1947.0, + 1769.0, + 1649.0, + 2052.0, + 2353.0, + 2301.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 5.42717, + 0.09122, + 0.08825, + 0.08981, + 0.08828, + 0.08996, + 0.08919, + 0.0901, + 0.08957, + 0.08977 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml index 459270a1b..69ad59f08 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -50,4 +49,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te/model_config.yaml similarity index 75% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts/model_config.yaml rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te/model_config.yaml index ba219d444..da4f2c131 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te/model_config.yaml @@ -1,9 +1,8 @@ ENV_VARS: - CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 + N_REPEATS: 10 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -17,7 +16,7 @@ MODEL_ARGS: --global-batch-size: 32 --seq-length: 1024 --max-position-embeddings: 1024 - --train-iters: 50 + --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 --save: ${CHECKPOINT_PATH} @@ -34,21 +33,20 @@ MODEL_ARGS: --clip-grad: 1.0 --lr-warmup-fraction: .01 --log-interval: 1 - --save-interval: 10000 + --save-interval: 50 --eval-interval: 1000 --eval-iters: 10 - --transformer-impl: local - --tensor-model-parallel-size: 2 - --pipeline-model-parallel-size: 2 - --sequence-parallel: true - --num-experts: 4 - --moe-router-load-balancing-type: sinkhorn - --moe-router-topk: 1 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 1 + --use-torch-fsdp2: true --deterministic-mode: true --no-gradient-accumulation-fusion: true - --ckpt-format: torch - --use-legacy-models: true + --no-async-tensor-model-parallel-allreduce: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: ckpt-resume \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml index dcb80dc00..fd1e7253c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -51,4 +50,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json new file mode 100644 index 000000000..9895a353a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.83373, + 10.86683, + 10.89023, + 10.81051, + 10.68459, + 10.60979, + 10.08992, + 10.21481, + 10.14018, + 9.80603 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1488.0, + 1854.0, + 1854.0, + 1884.0, + 1794.0, + 1784.0, + 1569.0, + 1942.0, + 2263.0, + 2147.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 13.39475, + 0.14158, + 0.14256, + 0.14166, + 0.14243, + 0.14232, + 0.143, + 0.14113, + 0.14164, + 0.14069 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml index d94f5277d..2b9410873 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -47,4 +46,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/golden_values_lts.json new file mode 100644 index 000000000..418a8d65d --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.83369, 10.86796, 10.8992, 10.86517, 10.85506, 10.82693, 10.6268, 10.61756, 10.53014, 10.24593]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2173.0, 2276.0, 2414.0, 2449.0, 2193.0, 1934.0, 2524.0]}, "iteration_timing_avg": 0.11905411764705882} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/model_config.yaml index 9f210d838..d9ed9c760 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -47,4 +46,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_lts.json new file mode 100644 index 000000000..fa1ca531d --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.83377, 10.86686, 10.89018, 10.81039, 10.68443, 10.60957, 10.08966, 10.21453, 10.13998, 9.80584, 9.83013, 9.60653, 9.67621, 9.68788, 9.59862, 9.07653, 9.47156, 9.06787, 9.32985, 9.51568]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1566.0, 1800.0, 1833.0, 1834.0, 1824.0, 1641.0, 1539.0, 1880.0, 2289.0, 2267.0, 2472.0, 2970.0, 3076.0, 3074.0, 3018.0, 2972.0, 3783.0, 2794.0, 2743.0, 3289.0]}, "iteration_timing_avg": 0.12010238805970147} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/model_config.yaml index b943bfec0..abb85baa5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_lts.json new file mode 100644 index 000000000..4924720d7 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_lts.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.79206, + 10.86691, + 10.89065, + 10.78186, + 10.65978, + 10.58022, + 10.08207, + 10.19156, + 10.13495, + 9.81167 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1626.0, + 1866.0, + 1959.0, + 1816.0, + 1890.0, + 1654.0, + 1537.0, + 1965.0, + 2436.0, + 2405.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 21.9348, + 0.1633, + 0.16334, + 0.16269, + 0.16133, + 0.16064, + 0.16007, + 0.15926, + 0.1592, + 0.15982 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/model_config.yaml index 108cb6b1a..e40b6f61e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -47,4 +46,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/model_config.yaml index 1c2a42eaa..a2960f3a3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/model_config.yaml index cb0214f26..6beae45b8 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -52,4 +51,4 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json new file mode 100644 index 000000000..3dddf6c91 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.82445, + 10.86393, + 10.85733, + 10.80809, + 10.70951, + 10.63738, + 10.16425, + 10.28201, + 10.19003, + 9.88697 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 12678.0, + 16220.0, + 16626.0, + 16055.0, + 13829.0, + 14904.0, + 12931.0, + 15765.0, + 16771.0, + 17621.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 16.34149, + 0.66962, + 0.66905, + 0.66791, + 0.67695, + 0.66977, + 0.67438, + 0.67368, + 0.6714, + 0.67874 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_lts.json similarity index 58% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_lts.json index 58284659f..8db9f81b4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_lts.json @@ -4,16 +4,16 @@ "end_step": 50, "step_interval": 5, "values": [ - 10.81962, - 10.8674, - 10.8579, - 10.80754, - 10.71119, - 10.63665, - 10.16221, - 10.27928, - 10.18787, - 9.88951 + 10.82445, + 10.86393, + 10.85733, + 10.80809, + 10.70951, + 10.63738, + 10.16425, + 10.28201, + 10.19003, + 9.88697 ] }, "num-zeros": { @@ -21,16 +21,16 @@ "end_step": 50, "step_interval": 5, "values": [ - 12597.0, - 15988.0, - 16507.0, - 15995.0, - 14088.0, - 14994.0, - 12887.0, - 15815.0, - 17049.0, - 17592.0 + 12678.0, + 16220.0, + 16626.0, + 16055.0, + 13829.0, + 14904.0, + 12931.0, + 15765.0, + 16771.0, + 17621.0 ] }, "iteration-time": { diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/model_config.yaml index 97d3d8c5f..d50c59d5f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -51,4 +50,4 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts/model_config.yaml index 1a1582573..2b01cfa62 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -52,4 +51,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts/golden_values.json deleted file mode 100644 index a675a63d5..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts/golden_values.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.79574, - 10.84041, - 10.81392, - 10.7652, - 10.65759, - 10.56196, - 10.08853, - 10.21342, - 10.11653, - 9.83431 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2977.0, - 3533.0, - 3432.0, - 3418.0, - 3277.0, - 3305.0, - 2851.0, - 3325.0, - 3684.0, - 3712.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 25.64274, - 0.6941, - 0.69152, - 0.69181, - 0.69128, - 0.68614, - 0.68462, - 0.6845, - 0.68711, - 0.68237 - ] - } -} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json new file mode 100644 index 000000000..4172a17a7 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.86122, + 10.88647, + 10.87773, + 10.83111, + 10.7165, + 10.60619, + 10.13147, + 10.22767, + 10.15929, + 9.83482 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1694.0, + 2148.0, + 2169.0, + 2103.0, + 1991.0, + 1900.0, + 1707.0, + 2189.0, + 2557.0, + 2606.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.61991, + 0.29135, + 0.28852, + 0.28971, + 0.29221, + 0.28994, + 0.28976, + 0.28887, + 0.28975, + 0.2869 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml index 37cc4615a..267a290a5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -47,4 +46,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/model_config.yaml index 528b691a2..77c55fac9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -47,4 +46,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/model_config.yaml index 4f5e8d93b..d5d441366 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_dev.json new file mode 100644 index 000000000..9fe4f01d8 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_dev.json @@ -0,0 +1,50 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.87346, + 10.89625, + 10.88939, + 10.88681, + 10.8893, + 10.84863, + 10.6962, + 10.63919, + 10.53931, + 10.31119 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 4.95266, + 0.07818, + 0.07961, + 0.07716, + 0.08368, + 0.08327, + 0.08409, + 0.08371, + 0.08372, + 0.08387 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 32, + "step_interval": 5, + "values": [ + 1300.0, + 1287.0, + 1565.0, + 1441.0, + 1419.0, + 1295.0, + 1177.0 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/model_config.yaml index 64d504bf2..7fac1317c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -49,4 +48,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_dev.json new file mode 100644 index 000000000..bad34329d --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_dev.json @@ -0,0 +1,50 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.87346, + 10.89625, + 10.88939, + 10.88681, + 10.88931, + 10.84864, + 10.6962, + 10.63918, + 10.5393, + 10.31119 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 5.32064, + 0.08204, + 0.08233, + 0.08176, + 0.09748, + 0.0966, + 0.09648, + 0.09617, + 0.09604, + 0.09646 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 32, + "step_interval": 5, + "values": [ + 1112.0, + 1124.0, + 1229.0, + 1665.0, + 1269.0, + 1219.0, + 1572.0 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/model_config.yaml index 190e5777f..2c05343a1 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json new file mode 100644 index 000000000..6c6d8e79f --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.84009, 10.89314, 10.908, 10.87524, 10.86367, 10.83848, 10.64647, 10.62126, 10.53743, 10.24831]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2044.0, 2242.0, 2368.0, 2598.0, 2188.0, 1850.0, 2436.0]}, "iteration_timing_avg": 0.10581941176470588} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml index 99d0ac8f6..2d4f4d2a1 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -47,4 +46,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_lts.json new file mode 100644 index 000000000..d4a5cfb78 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.84009, 10.89314, 10.908, 10.87524, 10.86367, 10.83848, 10.64647, 10.62126, 10.53743, 10.24831, 10.20828, 9.96658, 9.97022, 9.92437, 9.79137, 9.26612, 9.61914, 9.19057, 9.46177, 9.62185]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2044.0, 2242.0, 2368.0, 2598.0, 2188.0, 1850.0, 2436.0, 2732.0, 2678.0, 2452.0, 2879.0, 2572.0, 3456.0, 3237.0, 2990.0, 3067.0, 3173.0]}, "iteration_timing_avg": 0.10533134328358208} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/model_config.yaml index 6242b2ebb..05eb509e6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_lts.json new file mode 100644 index 000000000..0f5ad40c1 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0]}, "iteration_timing_avg": 0.1367805882352941} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/model_config.yaml index 81727e052..4b1288dbe 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -47,4 +46,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_lts.json new file mode 100644 index 000000000..b9816fbf8 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0]}, "iteration_timing_avg": 0.13371323529411766} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/model_config.yaml index 525d0f2c9..d55fb7510 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_lts.json new file mode 100644 index 000000000..4cf16ef91 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087, 10.19557, 9.94382, 9.95175, 9.90538, 9.79357, 9.25904, 9.61568, 9.19187, 9.46047, 9.6229]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0, 3566.0, 3139.0, 3236.0, 3208.0, 3413.0, 3913.0, 3194.0, 3581.0, 3625.0, 4695.0]}, "iteration_timing_avg": 0.1320626865671642} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/model_config.yaml index 516e1dd51..c0aceac27 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_lts.json new file mode 100644 index 000000000..302a1524b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79311, 10.85248, 10.87281, 10.83016, 10.82949, 10.78726, 10.565, 10.57088, 10.4836, 10.19521]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [2450.0, 2765.0, 2163.0, 2585.0, 2634.0, 2585.0, 2987.0]}, "iteration_timing_avg": 0.1333435294117647} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/model_config.yaml index 10fc8c2f2..c2439f9f3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -49,4 +48,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts/golden_values.json deleted file mode 100644 index 114dfb1e2..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.80264, 10.85778, 10.86259, 10.83903, 10.82934, 10.81016, 10.60251, 10.61471, 10.54092, 10.27186]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [8571.0, 7897.0, 7748.0, 9008.0, 9165.0, 8986.0, 9155.0]}, "iteration_timing_avg": 0.3671870588235294} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_lts.json new file mode 100644 index 000000000..b807a2e97 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0]}, "iteration_timing_avg": 0.1660379411764706} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/model_config.yaml index c547f4797..69dc9edf5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_lts.json new file mode 100644 index 000000000..546ccfca5 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.80264, 10.85778, 10.86259, 10.83903, 10.82934, 10.81016, 10.60251, 10.61471, 10.54092, 10.27186, 10.24338, 10.02058, 10.03017, 9.99471, 9.84885, 9.34867, 9.67263, 9.2457, 9.53365, 9.67548]}, "num-zeros": {"start_step": 0, "end_step": 84, "step_interval": 5, "values": [8571.0, 7897.0, 7748.0, 9008.0, 9165.0, 8986.0, 9155.0, 7960.0, 7684.0, 9743.0, 8727.0, 9382.0, 10992.0, 11177.0, 11270.0, 13404.0, 11533.0]}, "iteration_timing_avg": 0.3735462686567164} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/model_config.yaml index 72c98e80b..bd324b8ba 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -52,4 +51,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_lts.json new file mode 100644 index 000000000..c0a53bdb6 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708, 10.19741, 9.9562, 9.96369, 9.91398, 9.79604, 9.2686, 9.61975, 9.19501, 9.47332, 9.62216]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0, 3656.0, 3275.0, 3203.0, 3297.0, 3364.0, 3789.0, 3277.0, 3660.0, 3733.0, 4815.0]}, "iteration_timing_avg": 0.1628459701492537} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/model_config.yaml index 03ddd8a7c..e8723049f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -49,4 +48,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json new file mode 100644 index 000000000..18457f230 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0]}, "iteration_timing_avg": 0.23144205882352942} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml index 84128fa78..226809ade 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -47,4 +46,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_lts.json new file mode 100644 index 000000000..7b39f86c3 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0]}, "iteration_timing_avg": 0.23131970588235293} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/model_config.yaml index b664115f2..8746c03a3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_lts.json new file mode 100644 index 000000000..47198f9ec --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525, 10.21403, 9.9801, 9.96977, 9.93973, 9.81158, 9.28667, 9.63194, 9.19732, 9.48341, 9.62985]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0, 3451.0, 3205.0, 2940.0, 3143.0, 3310.0, 3884.0, 3232.0, 3491.0, 3751.0, 5022.0]}, "iteration_timing_avg": 0.22914074626865674} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/model_config.yaml index 0ec5d88ad..7d0be9144 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -48,4 +47,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values.json deleted file mode 100644 index 7335b2067..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [18.28053, 0.49505, 0.49249, 0.4863, 0.49126, 0.48294, 0.48297, 0.49211, 0.49244, 0.48476, 0.49685, 0.48221, 0.48444, 0.48262, 0.4868, 0.4822, 0.48935, 0.49261, 0.49648, 0.48319, 0.48763, 0.48829, 0.48803, 0.48167, 0.48323, 0.48629, 0.48421, 0.48466, 0.48642, 0.48171, 0.5845, 0.48341, 0.47926, 0.48909, 0.49939, 0.50358, 0.4812, 0.48449, 0.48356, 0.48264, 0.48384, 0.48252, 0.4847, 0.48316, 0.48125, 0.48107, 0.57559, 0.48254, 0.48595, 0.48176, 0.48343, 0.48901, 0.48231, 0.48126, 0.48705, 0.48449, 0.48313, 0.48504, 0.49265, 0.49529, 0.48979, 0.48846, 0.48904, 0.48991, 0.49197, 0.48869, 0.48889, 0.49026, 0.49051, 0.48812, 0.4895, 0.4888, 0.49274, 0.49157, 0.49398, 0.68596, 0.48574, 0.48994, 0.48496, 0.496, 0.48608, 0.49521, 0.48726, 0.49274, 0.48836, 0.49429, 0.49013, 0.49126, 0.48792, 0.49147, 0.49169, 0.48964, 0.49008, 0.49378, 0.49365, 0.49165, 0.49075, 0.57694, 0.48973, 0.48945, 0.48773, 0.49186, 0.48699, 0.49202, 0.48785, 0.48984, 0.48807, 0.4924, 0.48739, 0.48901, 0.48669, 0.48864, 0.48892, 0.48906, 0.48729, 0.48907, 0.4886, 0.49334, 0.48702, 0.57734, 0.70083, 0.49192, 0.48993, 0.48756, 0.48839, 0.49692, 0.49292, 0.48647, 0.49172, 0.4875, 0.49397, 0.48663, 0.49145, 0.48815, 0.49401, 0.48878, 0.49212, 0.48753, 0.49235, 0.48811, 0.49451, 0.48865, 0.58524, 0.49262, 0.49011, 0.48923, 0.48823, 0.49108, 0.4881, 0.49074, 0.49805, 0.49124, 0.48831, 0.49161, 0.48613, 0.49324, 0.48948, 0.49372, 0.48427, 0.49263, 0.48691, 0.49317, 0.49667, 0.4969, 0.57482, 0.61619, 0.48773, 0.48884, 0.49076, 0.49017, 0.48952, 0.49239, 0.49075, 0.48963, 0.4911, 0.48939, 0.48983, 0.49046, 0.49409, 0.48869, 0.49044, 0.4872, 0.49356, 0.48711, 0.49475, 0.49335, 0.49242, 0.48938, 0.48799, 0.49308, 0.48649, 0.49513, 0.57985, 0.49149, 0.49028, 0.4911, 0.49172, 0.48942, 0.49435, 0.48938, 0.47502, 0.48947, 0.48882, 0.48685, 0.48977, 0.4839, 0.49208, 0.49183, 0.4899, 0.49107, 0.48954, 0.48936, 0.49081, 0.48809, 0.49012, 0.49118, 0.49592, 0.49005, 0.49234, 0.48935, 0.49702, 0.4881, 0.49255, 0.4923, 0.49215, 0.49408, 0.4896, 0.49166, 0.49036, 0.57641, 0.49203, 0.4866, 0.49827, 0.49306, 0.48826, 0.49197, 0.50213, 0.49344, 0.48736, 0.49635, 0.57884, 0.49438, 0.49181, 0.49665, 0.49267, 0.48679, 0.48884, 0.48977, 0.49284, 0.48791, 0.49204, 0.49178, 0.49595, 0.4931, 0.49191, 0.48826, 0.49306, 0.48701, 0.48992, 0.48579, 0.49069, 0.48562, 0.49508, 0.48592, 0.49748, 0.4852, 0.49001, 0.48851, 0.48928, 0.48685, 0.4898, 0.49343, 0.48889, 0.49276, 0.4874, 0.50472, 0.49085, 0.59958, 0.49141, 0.49279, 0.49191, 0.48975, 0.4895, 0.49082, 0.48927, 0.4914, 0.48634, 0.48671, 0.48679, 0.49495, 0.48847, 0.49036, 0.48784, 0.49319, 0.4893, 0.49337, 0.58198, 0.58629, 0.4953, 0.49089, 0.48763, 0.49392, 0.48743, 0.49484, 0.48893, 0.49356, 0.48948, 0.49182, 0.48987, 0.49043, 0.49529, 0.49039, 0.4921, 0.49072, 0.59678, 0.49229, 0.49187, 0.4928, 0.49741, 0.49468, 0.48644, 0.49313, 0.49332, 0.48749, 0.49394, 0.48779, 0.49346, 0.48849, 0.49244, 0.48985, 0.49183, 0.49358, 0.48865, 0.49267, 0.4914, 0.49166, 0.48871, 0.49327, 0.49077, 0.49024, 0.49629, 0.48853, 0.57947, 0.49147, 0.48886, 0.50383, 0.48817, 0.49188, 0.4873, 0.49974, 0.49014, 0.4908, 0.4922, 0.49589, 0.49266, 0.48782, 0.49383, 0.48872, 0.49176, 0.49069, 0.49264, 0.49042, 0.4914, 0.4912, 0.48803, 0.49078, 0.49007, 0.48811, 0.49406, 0.48945, 0.48976, 0.49052, 0.49238, 0.48839, 0.48749, 0.48884, 0.49154, 0.48706, 0.48761, 0.49108, 0.49077, 0.49131, 0.49425, 0.48822, 0.49246, 0.49172, 0.49273, 0.57851, 0.49276, 0.49599, 0.48901, 0.49655, 0.49128, 0.48808, 0.49162, 0.49012, 0.49189, 0.50308, 0.49552, 0.48646]}, "forward-compute-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [9.21276, 0.28687, 0.28815, 0.2833, 0.28439, 0.27844, 0.27842, 0.28317, 0.28459, 0.28018, 0.29052, 0.27923, 0.27964, 0.27881, 0.28284, 0.27894, 0.2858, 0.28599, 0.29109, 0.28083, 0.28444, 0.28303, 0.2848, 0.27728, 0.28052, 0.2809, 0.27929, 0.2805, 0.28333, 0.27803, 0.3776, 0.27848, 0.27391, 0.28208, 0.29927, 0.30354, 0.28082, 0.28432, 0.28327, 0.28318, 0.28355, 0.28207, 0.28438, 0.28242, 0.28127, 0.28045, 0.37514, 0.2813, 0.28253, 0.28106, 0.28235, 0.28881, 0.28182, 0.28128, 0.28489, 0.28348, 0.2813, 0.28279, 0.29008, 0.29295, 0.28746, 0.2869, 0.28708, 0.28818, 0.28744, 0.28543, 0.28582, 0.28782, 0.28724, 0.28631, 0.28595, 0.28734, 0.2881, 0.28983, 0.2918, 0.48123, 0.28384, 0.28784, 0.28341, 0.28813, 0.28363, 0.29108, 0.2853, 0.28861, 0.28671, 0.29218, 0.28714, 0.29008, 0.28661, 0.29, 0.28895, 0.28724, 0.289, 0.29102, 0.28959, 0.28779, 0.28919, 0.37298, 0.28802, 0.28671, 0.28631, 0.29013, 0.28597, 0.29054, 0.28653, 0.28662, 0.28618, 0.28937, 0.285, 0.28745, 0.28473, 0.2862, 0.28623, 0.28613, 0.28465, 0.28674, 0.2875, 0.2909, 0.28626, 0.37409, 0.49531, 0.29025, 0.28653, 0.28605, 0.284, 0.29546, 0.29024, 0.28506, 0.29074, 0.28487, 0.29199, 0.28427, 0.28721, 0.28569, 0.28978, 0.28671, 0.29019, 0.2858, 0.29107, 0.28549, 0.28872, 0.28587, 0.38328, 0.28744, 0.28899, 0.28716, 0.28682, 0.28652, 0.28709, 0.28668, 0.29569, 0.28914, 0.28688, 0.28981, 0.28508, 0.29181, 0.28828, 0.29083, 0.28368, 0.28892, 0.28472, 0.2903, 0.29275, 0.29136, 0.3738, 0.41333, 0.28566, 0.28691, 0.28887, 0.2879, 0.28701, 0.2905, 0.28746, 0.28816, 0.28899, 0.28753, 0.2884, 0.28928, 0.29105, 0.28699, 0.28797, 0.28497, 0.29203, 0.28489, 0.28827, 0.29119, 0.29128, 0.28793, 0.28557, 0.29143, 0.28602, 0.29322, 0.37776, 0.28815, 0.28911, 0.28768, 0.28978, 0.2868, 0.2925, 0.28589, 0.27191, 0.28653, 0.28666, 0.28333, 0.28729, 0.28057, 0.28965, 0.2861, 0.28679, 0.28928, 0.28452, 0.28737, 0.28913, 0.28511, 0.28745, 0.28832, 0.29349, 0.28729, 0.28924, 0.28804, 0.29076, 0.28598, 0.29056, 0.28869, 0.28825, 0.29164, 0.28711, 0.28995, 0.2878, 0.37312, 0.28833, 0.28482, 0.29549, 0.28742, 0.28591, 0.28649, 0.29968, 0.29157, 0.2854, 0.29423, 0.37624, 0.29269, 0.28871, 0.29189, 0.28756, 0.28409, 0.28672, 0.28672, 0.29028, 0.28554, 0.29097, 0.28867, 0.29335, 0.29036, 0.28781, 0.28622, 0.28846, 0.28532, 0.28399, 0.28365, 0.28792, 0.28385, 0.29346, 0.28436, 0.29447, 0.28249, 0.28597, 0.28637, 0.28537, 0.28417, 0.28799, 0.28802, 0.28653, 0.29059, 0.28295, 0.30255, 0.28676, 0.39524, 0.28938, 0.28909, 0.28993, 0.28689, 0.2868, 0.28486, 0.2869, 0.28468, 0.28373, 0.28395, 0.28399, 0.29311, 0.28649, 0.28867, 0.2844, 0.29111, 0.28595, 0.29083, 0.37422, 0.38481, 0.2917, 0.28795, 0.28411, 0.29214, 0.28545, 0.29182, 0.28619, 0.29032, 0.28643, 0.28955, 0.287, 0.28693, 0.29048, 0.28673, 0.28964, 0.28608, 0.39417, 0.28909, 0.28926, 0.28892, 0.29626, 0.29035, 0.28418, 0.29096, 0.28911, 0.2861, 0.29247, 0.28616, 0.28914, 0.28625, 0.28976, 0.28808, 0.28866, 0.29068, 0.28692, 0.29086, 0.28868, 0.29004, 0.28595, 0.29148, 0.28842, 0.2886, 0.29171, 0.28773, 0.3764, 0.28898, 0.28636, 0.29892, 0.28549, 0.28973, 0.28465, 0.29697, 0.28725, 0.28663, 0.2894, 0.294, 0.29116, 0.28622, 0.29179, 0.28632, 0.29035, 0.28768, 0.28989, 0.28709, 0.2891, 0.28817, 0.28602, 0.28837, 0.28768, 0.28625, 0.28964, 0.28715, 0.287, 0.28748, 0.29025, 0.28485, 0.28473, 0.2867, 0.28777, 0.28402, 0.28515, 0.28793, 0.28644, 0.2893, 0.28758, 0.28612, 0.28687, 0.29012, 0.2871, 0.37328, 0.28876, 0.29273, 0.28732, 0.29333, 0.28722, 0.28605, 0.2878, 0.28786, 0.28733, 0.29635, 0.29189, 0.28435]}, "backward-compute-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [3.24795, 0.21194, 0.21471, 0.20869, 0.21204, 0.20759, 0.20377, 0.2107, 0.20945, 0.20618, 0.21705, 0.20521, 0.20785, 0.20627, 0.20635, 0.2064, 0.20649, 0.21053, 0.21523, 0.20491, 0.20938, 0.20895, 0.21121, 0.20684, 0.20811, 0.20914, 0.20848, 0.20944, 0.21029, 0.2088, 0.20823, 0.20765, 0.20786, 0.21144, 0.20746, 0.20856, 0.20791, 0.20961, 0.20962, 0.20803, 0.20624, 0.20748, 0.20646, 0.20637, 0.20506, 0.20636, 0.20873, 0.20709, 0.21021, 0.20645, 0.20725, 0.21067, 0.20689, 0.20484, 0.21018, 0.20758, 0.20809, 0.20663, 0.21735, 0.22092, 0.2181, 0.21664, 0.21604, 0.21705, 0.21811, 0.2175, 0.21613, 0.21894, 0.2186, 0.21706, 0.21821, 0.21776, 0.22265, 0.21862, 0.2187, 0.21766, 0.21611, 0.217, 0.21459, 0.22041, 0.21715, 0.2188, 0.21633, 0.21946, 0.21474, 0.21906, 0.21831, 0.21662, 0.21778, 0.21777, 0.21604, 0.21593, 0.21431, 0.21926, 0.2178, 0.21741, 0.21712, 0.22133, 0.2158, 0.21733, 0.21522, 0.21854, 0.21582, 0.21924, 0.21532, 0.21807, 0.216, 0.22003, 0.21598, 0.21559, 0.21655, 0.21799, 0.21734, 0.21749, 0.21785, 0.21759, 0.21855, 0.21936, 0.21602, 0.21592, 0.21786, 0.22091, 0.21874, 0.21753, 0.21923, 0.22306, 0.22024, 0.21591, 0.22007, 0.2187, 0.222, 0.2157, 0.22232, 0.21719, 0.22251, 0.21763, 0.22074, 0.21731, 0.21953, 0.21712, 0.22337, 0.22066, 0.22071, 0.21949, 0.21972, 0.21565, 0.21695, 0.22019, 0.21716, 0.219, 0.22553, 0.21923, 0.21738, 0.2203, 0.21678, 0.22028, 0.21797, 0.22029, 0.21479, 0.22065, 0.21605, 0.22109, 0.22372, 0.22023, 0.2184, 0.21646, 0.21673, 0.21835, 0.21624, 0.21877, 0.21593, 0.21993, 0.21906, 0.21748, 0.21846, 0.21846, 0.21773, 0.21782, 0.22154, 0.21764, 0.2193, 0.2172, 0.21983, 0.21556, 0.22293, 0.22107, 0.22132, 0.21857, 0.21717, 0.22128, 0.21593, 0.22043, 0.22094, 0.22038, 0.21956, 0.21936, 0.21966, 0.21754, 0.22141, 0.21803, 0.21648, 0.21739, 0.21902, 0.21686, 0.21805, 0.21493, 0.22077, 0.22186, 0.21962, 0.22048, 0.22052, 0.21855, 0.21913, 0.21681, 0.21996, 0.22012, 0.22218, 0.22009, 0.21986, 0.21939, 0.22266, 0.2163, 0.21865, 0.22182, 0.2197, 0.22192, 0.21676, 0.22102, 0.21734, 0.22013, 0.21984, 0.21564, 0.22434, 0.22271, 0.21673, 0.22212, 0.22818, 0.22064, 0.21733, 0.22214, 0.21857, 0.2223, 0.22007, 0.22387, 0.22019, 0.21548, 0.21818, 0.21601, 0.22079, 0.21586, 0.22149, 0.2206, 0.2192, 0.22065, 0.22097, 0.21714, 0.22179, 0.21621, 0.21994, 0.21491, 0.21991, 0.21504, 0.2197, 0.21388, 0.2201, 0.21487, 0.21828, 0.21636, 0.2175, 0.2155, 0.21587, 0.22018, 0.2151, 0.21983, 0.21588, 0.22793, 0.21875, 0.21694, 0.21987, 0.21989, 0.2186, 0.21826, 0.21718, 0.21971, 0.21741, 0.22031, 0.21565, 0.21643, 0.21559, 0.22115, 0.21694, 0.21849, 0.2154, 0.2201, 0.2167, 0.21944, 0.22561, 0.21402, 0.22049, 0.21782, 0.21537, 0.22116, 0.2162, 0.21949, 0.21494, 0.21795, 0.21647, 0.2181, 0.21867, 0.21751, 0.22266, 0.21692, 0.21888, 0.218, 0.22288, 0.21842, 0.21856, 0.21818, 0.22158, 0.22161, 0.21476, 0.21952, 0.21926, 0.21497, 0.21832, 0.21576, 0.21887, 0.2162, 0.21752, 0.21687, 0.21921, 0.22035, 0.21626, 0.22133, 0.21774, 0.22037, 0.21522, 0.22047, 0.21579, 0.21844, 0.22391, 0.21642, 0.21898, 0.21906, 0.21598, 0.22975, 0.21527, 0.21717, 0.21546, 0.22404, 0.21811, 0.21888, 0.2205, 0.22021, 0.22075, 0.21565, 0.21932, 0.21653, 0.21917, 0.21911, 0.22008, 0.21787, 0.21844, 0.21948, 0.21617, 0.21938, 0.21829, 0.21659, 0.2228, 0.21857, 0.21702, 0.21841, 0.21741, 0.21545, 0.21539, 0.21773, 0.21824, 0.21609, 0.21521, 0.21832, 0.21767, 0.21765, 0.21961, 0.21554, 0.21864, 0.21727, 0.21996, 0.21834, 0.21793, 0.22003, 0.21486, 0.22016, 0.21713, 0.21621, 0.21798, 0.21593, 0.21822, 0.22518, 0.21883, 0.21389]}, "batch-generator-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.60577, 0.00374, 0.00393, 0.00334, 0.0036, 0.00342, 0.00344, 0.00397, 0.00331, 0.00323, 0.00356, 0.00332, 0.00341, 0.00356, 0.00347, 0.00308, 0.00337, 0.00327, 0.00342, 0.00359, 0.00317, 0.00312, 0.00326, 0.00315, 0.00321, 0.00318, 0.00314, 0.00309, 0.00313, 0.0031, 0.00327, 0.00314, 0.00303, 0.00338, 0.00311, 0.00306, 0.00302, 0.00321, 0.00306, 0.0032, 0.00305, 0.00309, 0.00302, 0.00328, 0.00297, 0.00295, 0.00322, 0.00301, 0.00307, 0.00325, 0.00287, 0.00312, 0.00289, 0.00302, 0.00308, 0.00307, 0.00308, 0.0035, 0.00327, 0.0032, 0.00318, 0.00312, 0.00322, 0.00336, 0.00333, 0.00345, 0.00311, 0.00326, 0.00307, 0.00318, 0.00309, 0.00331, 0.0031, 0.00327, 0.00333, 0.0033, 0.00321, 0.00328, 0.00317, 0.00325, 0.00309, 0.0033, 0.00326, 0.00323, 0.00321, 0.00319, 0.00318, 0.00329, 0.00315, 0.00331, 0.00368, 0.00361, 0.00377, 0.00374, 0.00383, 0.00345, 0.00348, 0.00347, 0.00339, 0.0035, 0.00312, 0.00344, 0.00325, 0.00318, 0.00318, 0.00323, 0.00328, 0.00331, 0.00329, 0.00318, 0.00327, 0.0032, 0.00317, 0.00314, 0.00313, 0.00316, 0.00327, 0.00348, 0.00319, 0.00309, 0.00338, 0.00315, 0.00347, 0.00335, 0.00315, 0.00314, 0.00339, 0.00316, 0.00323, 0.00311, 0.00331, 0.00317, 0.00311, 0.00316, 0.00317, 0.00314, 0.00323, 0.00319, 0.00311, 0.00328, 0.00326, 0.00315, 0.00319, 0.0035, 0.00303, 0.00311, 0.00331, 0.00334, 0.00314, 0.00323, 0.00345, 0.00325, 0.00319, 0.00322, 0.00331, 0.00339, 0.00342, 0.00343, 0.00335, 0.00349, 0.00338, 0.00342, 0.00327, 0.00325, 0.00331, 0.00327, 0.00328, 0.00325, 0.00321, 0.00326, 0.00324, 0.00346, 0.00329, 0.00347, 0.00325, 0.00327, 0.00322, 0.0032, 0.00311, 0.00307, 0.00322, 0.00303, 0.00312, 0.00323, 0.00329, 0.00312, 0.00323, 0.00323, 0.00307, 0.00315, 0.00324, 0.00314, 0.00308, 0.00308, 0.00313, 0.00322, 0.00318, 0.0032, 0.0032, 0.00322, 0.02747, 0.00304, 0.0031, 0.00322, 0.00309, 0.00303, 0.00319, 0.00304, 0.00319, 0.00315, 0.00305, 0.00324, 0.00328, 0.00297, 0.0033, 0.00302, 0.00329, 0.00319, 0.00309, 0.00319, 0.00324, 0.00336, 0.00317, 0.00324, 0.00322, 0.00343, 0.00323, 0.00314, 0.00337, 0.00333, 0.00319, 0.00305, 0.00351, 0.00342, 0.00323, 0.00333, 0.00325, 0.00329, 0.00309, 0.00337, 0.00313, 0.00331, 0.00309, 0.00329, 0.00319, 0.00325, 0.00323, 0.00324, 0.00332, 0.0034, 0.0033, 0.00322, 0.00318, 0.00319, 0.00329, 0.00315, 0.00329, 0.00325, 0.00333, 0.00322, 0.00337, 0.00313, 0.00313, 0.00327, 0.00332, 0.00313, 0.00307, 0.00312, 0.00306, 0.00322, 0.00309, 0.0033, 0.00323, 0.00341, 0.00326, 0.0035, 0.00329, 0.00341, 0.00333, 0.00334, 0.00347, 0.00314, 0.00336, 0.00336, 0.00329, 0.0032, 0.00322, 0.00331, 0.00337, 0.00336, 0.00312, 0.00321, 0.00407, 0.00319, 0.00353, 0.00339, 0.00344, 0.00327, 0.00338, 0.00335, 0.00325, 0.00334, 0.00318, 0.00329, 0.00329, 0.00323, 0.00318, 0.00325, 0.00322, 0.00317, 0.00327, 0.00307, 0.00322, 0.00305, 0.00323, 0.00318, 0.00328, 0.00317, 0.00326, 0.00313, 0.00312, 0.00317, 0.00319, 0.00322, 0.00326, 0.00311, 0.00318, 0.00349, 0.00314, 0.00329, 0.00324, 0.00339, 0.0031, 0.00326, 0.00308, 0.00316, 0.0031, 0.0034, 0.00318, 0.00327, 0.00321, 0.00313, 0.00335, 0.00311, 0.00333, 0.00329, 0.0031, 0.00325, 0.00325, 0.00326, 0.0033, 0.00323, 0.00315, 0.00321, 0.00322, 0.003, 0.00355, 0.00301, 0.00302, 0.00319, 0.00323, 0.0032, 0.00321, 0.0031, 0.00344, 0.00317, 0.0033, 0.00322, 0.00317, 0.00318, 0.00314, 0.00328, 0.0033, 0.0033, 0.0031, 0.00321, 0.0033, 0.00315, 0.00323, 0.00342, 0.00315, 0.00321, 0.00324, 0.00312, 0.00341, 0.00323, 0.00333, 0.00335, 0.00334, 0.00324, 0.00319, 0.00335, 0.00319, 0.0032, 0.00317, 0.0033, 0.00322, 0.00334, 0.0034, 0.00306]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [3e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [4e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [6.03213, 0.0015, 0.00156, 0.00153, 0.00152, 0.00153, 0.00156, 0.00153, 0.00152, 0.00153, 0.00155, 0.00152, 0.00157, 0.00153, 0.00155, 0.00153, 0.00153, 0.00151, 0.00155, 0.00153, 0.00154, 0.00152, 0.00154, 0.00153, 0.00155, 0.00154, 0.00154, 0.00154, 0.00154, 0.00153, 0.00156, 0.00152, 0.00152, 0.00153, 0.00156, 0.00153, 0.00153, 0.00155, 0.00153, 0.00152, 0.00154, 0.00155, 0.00155, 0.00152, 0.00152, 0.00153, 0.00154, 0.00153, 0.00154, 0.00152, 0.00154, 0.00154, 0.00155, 0.00153, 0.00156, 0.00154, 0.00156, 0.00153, 0.00156, 0.00151, 0.00154, 0.00153, 0.00156, 0.00151, 0.00156, 0.00155, 0.00155, 0.00152, 0.00155, 0.00152, 0.00154, 0.00153, 0.00156, 0.00153, 0.00154, 0.00154, 0.00156, 0.00154, 0.00155, 0.00155, 0.00155, 0.00153, 0.00154, 0.00152, 0.00155, 0.00154, 0.00156, 0.00153, 0.00153, 0.00153, 0.00155, 0.00154, 0.00155, 0.00153, 0.00154, 0.00153, 0.00155, 0.00153, 0.00154, 0.00152, 0.00155, 0.00152, 0.00155, 0.00154, 0.00155, 0.00154, 0.00155, 0.00153, 0.00154, 0.00152, 0.00155, 0.00153, 0.00153, 0.00154, 0.00154, 0.00151, 0.00155, 0.00153, 0.00156, 0.00153, 0.00155, 0.00154, 0.00156, 0.00156, 0.00155, 0.00154, 0.00155, 0.00153, 0.00152, 0.00153, 0.00155, 0.00154, 0.00155, 0.00154, 0.00154, 0.00154, 0.00155, 0.00151, 0.00152, 0.00153, 0.00153, 0.00151, 0.00153, 0.00154, 0.00156, 0.00155, 0.00157, 0.00154, 0.00156, 0.00154, 0.00155, 0.00151, 0.00154, 0.00153, 0.00154, 0.00153, 0.00156, 0.00155, 0.00155, 0.00152, 0.00157, 0.00153, 0.00154, 0.00154, 0.00155, 0.00154, 0.00151, 0.00154, 0.00155, 0.00152, 0.00155, 0.00152, 0.00156, 0.00153, 0.00153, 0.00155, 0.00154, 0.00153, 0.00154, 0.00152, 0.00154, 0.00155, 0.00154, 0.00152, 0.00157, 0.00154, 0.00154, 0.00152, 0.00155, 0.00152, 0.00157, 0.00152, 0.00154, 0.00153, 0.00156, 0.00153, 0.00156, 0.00154, 0.00156, 0.00153, 0.00154, 0.00153, 0.00157, 0.00155, 0.00154, 0.00156, 0.00154, 0.00153, 0.00151, 0.00156, 0.00156, 0.00155, 0.00155, 0.00154, 0.00155, 0.00154, 0.00155, 0.00152, 0.00154, 0.00154, 0.00154, 0.00156, 0.00157, 0.00154, 0.00155, 0.00155, 0.00153, 0.00153, 0.00154, 0.00155, 0.00155, 0.00155, 0.00155, 0.00154, 0.00154, 0.00154, 0.00154, 0.00153, 0.00154, 0.00154, 0.00154, 0.00154, 0.00155, 0.00154, 0.00156, 0.00156, 0.00154, 0.00155, 0.00153, 0.00155, 0.00152, 0.00156, 0.00154, 0.00156, 0.00156, 0.00152, 0.00154, 0.00153, 0.00153, 0.00155, 0.00154, 0.00157, 0.00154, 0.00153, 0.00157, 0.00155, 0.00156, 0.00155, 0.00157, 0.00155, 0.00155, 0.00153, 0.00156, 0.00158, 0.00155, 0.00155, 0.00157, 0.00153, 0.00155, 0.00154, 0.00155, 0.00153, 0.00155, 0.00155, 0.00154, 0.00151, 0.00154, 0.00156, 0.00156, 0.00155, 0.00155, 0.00155, 0.00155, 0.00153, 0.00155, 0.00156, 0.00154, 0.00155, 0.00153, 0.00155, 0.00155, 0.00153, 0.00154, 0.00154, 0.00156, 0.00156, 0.00155, 0.00155, 0.00154, 0.00153, 0.00155, 0.00155, 0.00155, 0.00154, 0.00153, 0.00154, 0.00154, 0.00155, 0.00156, 0.00156, 0.00156, 0.00156, 0.00156, 0.00156, 0.00155, 0.00155, 0.00154, 0.00156, 0.00154, 0.00156, 0.00155, 0.00154, 0.00156, 0.00154, 0.00153, 0.00155, 0.00152, 0.00156, 0.00151, 0.00155, 0.00154, 0.00155, 0.00155, 0.00156, 0.00153, 0.00155, 0.00154, 0.00156, 0.00154, 0.00154, 0.00154, 0.00155, 0.00155, 0.00155, 0.00153, 0.00155, 0.00154, 0.00154, 0.00155, 0.00156, 0.00153, 0.00153, 0.00154, 0.00155, 0.00153, 0.00154, 0.00155, 0.00154, 0.00154, 0.00155, 0.00155, 0.00155, 0.00153, 0.00155, 0.00154, 0.00157, 0.00156, 0.00153, 0.00157, 0.00157, 0.00156, 0.00157, 0.00154, 0.00155, 0.00157, 0.00155, 0.00155, 0.00153, 0.00153, 0.00152, 0.00154, 0.00155, 0.00155, 0.00154, 0.00153, 0.00155, 0.00154, 0.00155, 0.00155, 0.00155]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00038, 0.00024, 0.00024, 0.00015, 0.00015, 0.00016, 0.00015, 0.00016, 0.00015, 0.00013, 0.00013, 0.00015, 0.00015, 0.00013, 0.00015, 0.00013, 0.00015, 0.00013, 0.00015, 0.00015, 0.00013, 0.00015, 0.00013, 0.00015, 0.00013, 0.00014, 0.00013, 0.00013, 0.00015, 0.00013, 0.00015, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00014, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00016, 0.00013, 0.00013, 0.00013, 0.00015, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00025, 0.00018, 0.00018, 0.00019, 0.00018, 0.0003, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.00019, 0.00019, 0.00018, 0.00019, 0.00019, 0.00019, 0.00018, 0.00019, 0.00019, 0.00019, 0.00021, 0.00018, 0.00021, 0.00018, 0.00019, 0.00019, 0.00018, 0.00018, 0.00018, 0.0002, 0.00019, 0.00018, 0.00018, 0.00018, 0.00019, 0.00021, 0.00018, 0.00019, 0.00019, 0.00018, 0.00018, 0.00021, 0.00018, 0.00019, 0.00019, 0.00019, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.0002, 0.00018, 0.00021, 0.00019, 0.00018, 0.00018, 0.0002, 0.00023, 0.00018, 0.00018, 0.0002, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00019, 0.00021, 0.00018, 0.00018, 0.00021, 0.00018, 0.0002, 0.00018, 0.00018, 0.00021, 0.00018, 0.00019, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.0002, 0.00021, 0.00019, 0.00018, 0.00021, 0.00021, 0.00018, 0.00019, 0.00019, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.00018, 0.00021, 0.00018, 0.00019, 0.00021, 0.00018, 0.00018, 0.00021, 0.00018, 0.00021, 0.00018, 0.00018, 0.00019, 0.00021, 0.00021, 0.00021, 0.00021, 0.00018, 0.00018, 0.00019, 0.00019, 0.00018, 0.0002, 0.00021, 0.00021, 0.0002, 0.00018, 0.00019, 0.00019, 0.00018, 0.00018, 0.00018, 0.00021, 0.00019, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.00021, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.0002, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00021, 0.00018, 0.0002, 0.00018, 0.00018, 0.00018, 0.00021, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00019, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.0002, 0.00018, 0.00018, 0.00019, 0.00018, 0.00019, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00021, 0.00019, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00021, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00019, 0.00018, 0.00018, 0.00019, 0.00021, 0.00019, 0.00021, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00018, 0.00018, 0.00021, 0.00018, 0.00018, 0.00021, 0.00019, 0.00019, 0.00019, 0.00021, 0.00023, 0.00018, 0.00021, 0.00019, 0.00018, 0.00021, 0.00019, 0.00019, 0.00019, 0.00019, 0.00018, 0.00019, 0.00019, 0.00018, 0.00019, 0.00018, 0.00019, 0.00018, 0.00022, 0.00021, 0.00018]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.62631, 0.00104, 0.00106, 0.00093, 0.00092, 0.00096, 0.00095, 0.00096, 0.00092, 0.00091, 0.0009, 0.00091, 0.00101, 0.00091, 0.00091, 0.0009, 0.0009, 0.0009, 0.00093, 0.00094, 0.0009, 0.00115, 0.0009, 0.00092, 0.00091, 0.00098, 0.00089, 0.00091, 0.00091, 0.0009, 0.00094, 0.0009, 0.00095, 0.00091, 0.00091, 0.0009, 0.0009, 0.00091, 0.00091, 0.00091, 0.00091, 0.00091, 0.00091, 0.00091, 0.00092, 0.0009, 0.00093, 0.00093, 0.00091, 0.00091, 0.00101, 0.00091, 0.0009, 0.0009, 0.0009, 0.00091, 0.00091, 0.00107, 0.00099, 0.001, 0.00101, 0.001, 0.00179, 0.001, 0.001, 0.00101, 0.0011, 0.00101, 0.001, 0.00101, 0.00101, 0.00101, 0.001, 0.001, 0.00101, 0.00109, 0.00106, 0.001, 0.001, 0.00102, 0.00101, 0.00102, 0.00109, 0.00101, 0.00104, 0.001, 0.00099, 0.00103, 0.00102, 0.001, 0.001, 0.00113, 0.00082, 0.00079, 0.0008, 0.001, 0.00102, 0.00105, 0.001, 0.001, 0.001, 0.00102, 0.00079, 0.00105, 0.00079, 0.00106, 0.0008, 0.00079, 0.00099, 0.00087, 0.00101, 0.0008, 0.00099, 0.00086, 0.00101, 0.00083, 0.00081, 0.001, 0.0008, 0.001, 0.00085, 0.00081, 0.001, 0.00079, 0.001, 0.00101, 0.001, 0.00079, 0.001, 0.00106, 0.001, 0.001, 0.00103, 0.00104, 0.00079, 0.00101, 0.00084, 0.00079, 0.0008, 0.0008, 0.00109, 0.00105, 0.00099, 0.0008, 0.00101, 0.00101, 0.00102, 0.00102, 0.0008, 0.00079, 0.00111, 0.00101, 0.00099, 0.0008, 0.001, 0.00108, 0.00107, 0.00103, 0.00103, 0.00084, 0.00105, 0.001, 0.00101, 0.001, 0.00101, 0.00101, 0.001, 0.00101, 0.00101, 0.00114, 0.00099, 0.0008, 0.00079, 0.00101, 0.001, 0.001, 0.00105, 0.00101, 0.001, 0.00113, 0.00101, 0.001, 0.00106, 0.001, 0.00101, 0.00101, 0.00101, 0.00101, 0.00106, 0.00105, 0.00107, 0.00106, 0.00102, 0.001, 0.00104, 0.00101, 0.00105, 0.001, 0.00104, 0.00105, 0.00104, 0.00103, 0.001, 0.001, 0.001, 0.00109, 0.00101, 0.00104, 0.001, 0.00108, 0.00108, 0.001, 0.00101, 0.001, 0.00103, 0.00106, 0.00102, 0.00106, 0.00102, 0.00099, 0.00101, 0.00105, 0.00104, 0.00101, 0.00105, 0.00102, 0.00103, 0.00102, 0.001, 0.001, 0.00104, 0.001, 0.00101, 0.00101, 0.001, 0.00105, 0.00101, 0.00107, 0.00102, 0.001, 0.00101, 0.00101, 0.00101, 0.00108, 0.00101, 0.001, 0.00106, 0.00101, 0.001, 0.001, 0.00105, 0.00101, 0.00116, 0.00112, 0.00101, 0.001, 0.00103, 0.00101, 0.00103, 0.00101, 0.00105, 0.00103, 0.00102, 0.001, 0.00101, 0.001, 0.00108, 0.00108, 0.00101, 0.00106, 0.00109, 0.00106, 0.00102, 0.00104, 0.001, 0.001, 0.00099, 0.00101, 0.00101, 0.001, 0.001, 0.001, 0.00102, 0.00105, 0.001, 0.00103, 0.00103, 0.001, 0.00101, 0.001, 0.00107, 0.00101, 0.001, 0.001, 0.00102, 0.001, 0.00111, 0.001, 0.00102, 0.00104, 0.00099, 0.001, 0.00101, 0.00101, 0.00105, 0.00101, 0.001, 0.00101, 0.00107, 0.00113, 0.00103, 0.00105, 0.00102, 0.00105, 0.00101, 0.00101, 0.00102, 0.001, 0.00101, 0.00103, 0.001, 0.00102, 0.00108, 0.00103, 0.00103, 0.00101, 0.00104, 0.001, 0.00103, 0.00101, 0.00107, 0.00106, 0.00099, 0.00103, 0.00102, 0.00101, 0.00102, 0.001, 0.00101, 0.00101, 0.00102, 0.001, 0.00101, 0.0011, 0.00101, 0.001, 0.00101, 0.001, 0.00108, 0.001, 0.0011, 0.00108, 0.00101, 0.001, 0.00102, 0.00102, 0.00101, 0.001, 0.00102, 0.00108, 0.00101, 0.00103, 0.001, 0.00101, 0.00101, 0.001, 0.00109, 0.001, 0.001, 0.00105, 0.00101, 0.00105, 0.001, 0.00102, 0.0011, 0.00103, 0.00103, 0.00102, 0.00106, 0.00104, 0.00104, 0.00107, 0.00101, 0.001, 0.00111, 0.00102, 0.00101, 0.00103, 0.00101, 0.00102, 0.001, 0.00102, 0.00103, 0.00101, 0.00101, 0.0011, 0.001, 0.00105, 0.00106, 0.00101]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00488, 0.00438, 0.00439, 0.00461, 0.00443, 0.0046, 0.00465, 0.00446, 0.00441, 0.00439, 0.00443, 0.0044, 0.00516, 0.00445, 0.0044, 0.0044, 0.00439, 0.0044, 0.0044, 0.00441, 0.00443, 0.00441, 0.00443, 0.00439, 0.00443, 0.0051, 0.0044, 0.00439, 0.00443, 0.00441, 0.0044, 0.00438, 0.00442, 0.00442, 0.00442, 0.00442, 0.00443, 0.0044, 0.00442, 0.00439, 0.0045, 0.00441, 0.00439, 0.00439, 0.0044, 0.00441, 0.00438, 0.00441, 0.00441, 0.0044, 0.00485, 0.00441, 0.00442, 0.00439, 0.0044, 0.00438, 0.00445, 0.00462, 0.00437, 0.00439, 0.0044, 0.00439, 0.0044, 0.00442, 0.00439, 0.00441, 0.00442, 0.00439, 0.00439, 0.00439, 0.00442, 0.0044, 0.00439, 0.00441, 0.00438, 0.00523, 0.00508, 0.00442, 0.00437, 0.00496, 0.00442, 0.00437, 0.00556, 0.00439, 0.00438, 0.00443, 0.00439, 0.0044, 0.00439, 0.00442, 0.00441, 0.0052, 0.00441, 0.00441, 0.00438, 0.00444, 0.00441, 0.0044, 0.00441, 0.00439, 0.00443, 0.00439, 0.00438, 0.00443, 0.0044, 0.00439, 0.00442, 0.00443, 0.00439, 0.00439, 0.00441, 0.00441, 0.0044, 0.00544, 0.00439, 0.0044, 0.0044, 0.00442, 0.00441, 0.00438, 0.00439, 0.00441, 0.00442, 0.00439, 0.00438, 0.00441, 0.00442, 0.0044, 0.0044, 0.00441, 0.00436, 0.0044, 0.00438, 0.00442, 0.00442, 0.00442, 0.00444, 0.00442, 0.00441, 0.0044, 0.00439, 0.00439, 0.00439, 0.00441, 0.00441, 0.00443, 0.00439, 0.00439, 0.00439, 0.00439, 0.00438, 0.0044, 0.00439, 0.00441, 0.00441, 0.00481, 0.00443, 0.0044, 0.0044, 0.00442, 0.0044, 0.00439, 0.0044, 0.00438, 0.00454, 0.0044, 0.00439, 0.0044, 0.00439, 0.0044, 0.0044, 0.00438, 0.00441, 0.00437, 0.00439, 0.0044, 0.00441, 0.00438, 0.00441, 0.00439, 0.00441, 0.00442, 0.0044, 0.00439, 0.00438, 0.00441, 0.00439, 0.00441, 0.0044, 0.0044, 0.0044, 0.00439, 0.0044, 0.00442, 0.00467, 0.00439, 0.0044, 0.0044, 0.00442, 0.00441, 0.00442, 0.0044, 0.00442, 0.00442, 0.00441, 0.00509, 0.00443, 0.0044, 0.00442, 0.00438, 0.00487, 0.00531, 0.00442, 0.00442, 0.00442, 0.00442, 0.00441, 0.00439, 0.00441, 0.0044, 0.00439, 0.0044, 0.00441, 0.00439, 0.00439, 0.0044, 0.0044, 0.00439, 0.00443, 0.00441, 0.00454, 0.00439, 0.00441, 0.0044, 0.00441, 0.00439, 0.00441, 0.00442, 0.0044, 0.00441, 0.00438, 0.0044, 0.00439, 0.0044, 0.0044, 0.00442, 0.0044, 0.0044, 0.0044, 0.00438, 0.0044, 0.0044, 0.0044, 0.0044, 0.0044, 0.00441, 0.00441, 0.0044, 0.00442, 0.0044, 0.00439, 0.00439, 0.00439, 0.00439, 0.00439, 0.0044, 0.00442, 0.00441, 0.00439, 0.00443, 0.00439, 0.0044, 0.0044, 0.00439, 0.0044, 0.0044, 0.00441, 0.0044, 0.00438, 0.00441, 0.00442, 0.0044, 0.00439, 0.00443, 0.00534, 0.00438, 0.00442, 0.0044, 0.0044, 0.00441, 0.00495, 0.00439, 0.00441, 0.00438, 0.00441, 0.00441, 0.0044, 0.00437, 0.00441, 0.00439, 0.0044, 0.00442, 0.0044, 0.00442, 0.00439, 0.00437, 0.00441, 0.0044, 0.00439, 0.0044, 0.00457, 0.00441, 0.00441, 0.00442, 0.00441, 0.00443, 0.00439, 0.00443, 0.00439, 0.00439, 0.00439, 0.00441, 0.00486, 0.00439, 0.00441, 0.00441, 0.00453, 0.0044, 0.00437, 0.00441, 0.0044, 0.00442, 0.0044, 0.00442, 0.00441, 0.00441, 0.00439, 0.00439, 0.00441, 0.00438, 0.0044, 0.00442, 0.00443, 0.0044, 0.0044, 0.00442, 0.00441, 0.00439, 0.00442, 0.00441, 0.0044, 0.00439, 0.00438, 0.00439, 0.00442, 0.00439, 0.00441, 0.00439, 0.0044, 0.00441, 0.0044, 0.00442, 0.00443, 0.0044, 0.00438, 0.0044, 0.00439, 0.00444, 0.00439, 0.00442, 0.0044, 0.00439, 0.00441, 0.00439, 0.00442, 0.00439, 0.00438, 0.00439, 0.00438, 0.0044, 0.00442, 0.0044, 0.00438, 0.00442, 0.00443, 0.0044, 0.0044, 0.00439, 0.00441, 0.00439, 0.0044, 0.00444, 0.00455, 0.00442, 0.00443, 0.00441, 0.00442, 0.00442, 0.00443, 0.0044]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00313, 0.00096, 0.00097, 0.00093, 0.00094, 0.00094, 0.00094, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00094, 0.00092, 0.00093, 0.00092, 0.00094, 0.00092, 0.00093, 0.00093, 0.00093, 0.00092, 0.00092, 0.00092, 0.00092, 0.00094, 0.00092, 0.00093, 0.00093, 0.00092, 0.00092, 0.00092, 0.00092, 0.00093, 0.00092, 0.00092, 0.00092, 0.00099, 0.00092, 0.00093, 0.00094, 0.00093, 0.00092, 0.00092, 0.00092, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00094, 0.00092, 0.00092, 0.00092, 0.00092, 0.00092, 0.00092, 0.00096, 0.00093, 0.00093, 0.00092, 0.00093, 0.00092, 0.00092, 0.00092, 0.00094, 0.00093, 0.00092, 0.00093, 0.00092, 0.00093, 0.00093, 0.00092, 0.00093, 0.00092, 0.00097, 0.00095, 0.00092, 0.00093, 0.00093, 0.00092, 0.00099, 0.00095, 0.00093, 0.00094, 0.00093, 0.00092, 0.00093, 0.00092, 0.00093, 0.00094, 0.00095, 0.00093, 0.00093, 0.00093, 0.00092, 0.00092, 0.00093, 0.00093, 0.00092, 0.00093, 0.00093, 0.00093, 0.00094, 0.00095, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00092, 0.00093, 0.00092, 0.00092, 0.00093, 0.00092, 0.00094, 0.00093, 0.00093, 0.00092, 0.00092, 0.00093, 0.00092, 0.00093, 0.00093, 0.00093, 0.00092, 0.00092, 0.00093, 0.00093, 0.00092, 0.00092, 0.00094, 0.00093, 0.00092, 0.00093, 0.00094, 0.00094, 0.00092, 0.00093, 0.00092, 0.00093, 0.00092, 0.00093, 0.00092, 0.00093, 0.00093, 0.00092, 0.00092, 0.00093, 0.00092, 0.00093, 0.00094, 0.00093, 0.00092, 0.00094, 0.00093, 0.00093, 0.00092, 0.00093, 0.00095, 0.00093, 0.00092, 0.00092, 0.00093, 0.00094, 0.00092, 0.00092, 0.00093, 0.00093, 0.00093, 0.00092, 0.00094, 0.00094, 0.00092, 0.00094, 0.00092, 0.00093, 0.00093, 0.00092, 0.00093, 0.00092, 0.00093, 0.00092, 0.00092, 0.00094, 0.00093, 0.00093, 0.00092, 0.00092, 0.00093, 0.00092, 0.00092, 0.00093, 0.00094, 0.00093, 0.00094, 0.00093, 0.00093, 0.00093, 0.00093, 0.00095, 0.00092, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00092, 0.00094, 0.00093, 0.00095, 0.00094, 0.00094, 0.00092, 0.00093, 0.00093, 0.00094, 0.00093, 0.00093, 0.00094, 0.00092, 0.00093, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00097, 0.00093, 0.00092, 0.00094, 0.00092, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00093, 0.00092, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00094, 0.00094, 0.00093, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00094, 0.00092, 0.00094, 0.00093, 0.00092, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00092, 0.00092, 0.00095, 0.00093, 0.00093, 0.00093, 0.00093, 0.00092, 0.00094, 0.00093, 0.00092, 0.00094, 0.00093, 0.00093, 0.00093, 0.00092, 0.00094, 0.00093, 0.00093, 0.00092, 0.00093, 0.00094, 0.00093, 0.00093, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00094, 0.00092, 0.00092, 0.00093, 0.00094, 0.00093, 0.00094, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00092, 0.00094, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00092, 0.00092, 0.00092, 0.00092, 0.00093, 0.00093, 0.00093, 0.00093, 0.00092, 0.00093, 0.00093, 0.00094, 0.00094, 0.00093, 0.00093, 0.00093, 0.00094, 0.00092, 0.00093, 0.00093, 0.00094, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00094, 0.00094, 0.00093, 0.00094, 0.00095, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00093, 0.00096, 0.00093, 0.00093, 0.00093, 0.00093, 0.00094, 0.00094, 0.00094]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.0012, 0.001, 0.00119, 0.00096, 0.00096, 0.00096, 0.00097, 0.00096, 0.00096, 0.00096, 0.00095, 0.00096, 0.00097, 0.00095, 0.00096, 0.00096, 0.00096, 0.00096, 0.00096, 0.00096, 0.00097, 0.00096, 0.00096, 0.00095, 0.00096, 0.00097, 0.00096, 0.00095, 0.00096, 0.00096, 0.00096, 0.00096, 0.00096, 0.00095, 0.00095, 0.00095, 0.00096, 0.00104, 0.00096, 0.00095, 0.00097, 0.00095, 0.00096, 0.00096, 0.00096, 0.00096, 0.00096, 0.00095, 0.00096, 0.00096, 0.00097, 0.00096, 0.00096, 0.00095, 0.00096, 0.00095, 0.00096, 0.001, 0.00099, 0.00099, 0.00099, 0.00098, 0.00099, 0.00099, 0.00098, 0.00098, 0.00098, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00101, 0.001, 0.00099, 0.001, 0.00098, 0.00099, 0.00099, 0.001, 0.00099, 0.001, 0.00099, 0.00098, 0.00098, 0.00099, 0.00099, 0.00098, 0.00103, 0.00099, 0.00099, 0.00099, 0.001, 0.001, 0.001, 0.00099, 0.00099, 0.001, 0.00099, 0.00099, 0.00101, 0.00099, 0.00099, 0.00099, 0.00099, 0.001, 0.00103, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.001, 0.00101, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00098, 0.001, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.00098, 0.00099, 0.00099, 0.00099, 0.00099, 0.00098, 0.00098, 0.00099, 0.00098, 0.00098, 0.001, 0.001, 0.001, 0.00099, 0.001, 0.001, 0.00099, 0.00099, 0.00099, 0.00101, 0.00099, 0.00099, 0.00099, 0.00099, 0.00101, 0.001, 0.00099, 0.00099, 0.00098, 0.00099, 0.00099, 0.001, 0.00099, 0.00098, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.00102, 0.00099, 0.00099, 0.00098, 0.001, 0.00099, 0.00099, 0.001, 0.00099, 0.00098, 0.00099, 0.00099, 0.00099, 0.00099, 0.00101, 0.001, 0.00101, 0.00099, 0.00099, 0.00099, 0.00099, 0.00101, 0.00099, 0.00101, 0.00099, 0.00099, 0.00099, 0.00101, 0.00099, 0.00099, 0.00099, 0.001, 0.00098, 0.001, 0.00099, 0.001, 0.00099, 0.00101, 0.00099, 0.00099, 0.001, 0.00099, 0.001, 0.00101, 0.00099, 0.001, 0.00098, 0.00099, 0.00105, 0.00099, 0.00099, 0.00101, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.00102, 0.00098, 0.00098, 0.00099, 0.001, 0.00099, 0.001, 0.001, 0.001, 0.00098, 0.00101, 0.00099, 0.001, 0.00098, 0.00099, 0.001, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00101, 0.001, 0.00098, 0.00101, 0.00099, 0.00098, 0.00099, 0.00103, 0.00098, 0.00099, 0.00099, 0.001, 0.00098, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00106, 0.00099, 0.00099, 0.00099, 0.00099, 0.00101, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.00101, 0.001, 0.00099, 0.001, 0.001, 0.001, 0.00098, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.001, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.001, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.00098, 0.00099, 0.00099, 0.001, 0.00099, 0.001, 0.00099, 0.001, 0.00101, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.001, 0.001, 0.00101, 0.00099, 0.00099, 0.00099, 0.001, 0.001, 0.001, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.00099, 0.001, 0.001, 0.00101, 0.001, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.00098, 0.00099, 0.001, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.00099, 0.001, 0.001, 0.00099, 0.00099, 0.00099, 0.00099, 0.001, 0.00099, 0.001, 0.001, 0.00099, 0.00099, 0.00099, 0.001, 0.001, 0.001, 0.001, 0.00099, 0.00099, 0.001, 0.00101, 0.00099]}, "optimizer-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.63786, 0.00795, 0.00821, 0.00789, 0.00772, 0.00795, 0.00797, 0.00777, 0.00768, 0.00764, 0.00767, 0.00766, 0.0086, 0.00767, 0.00766, 0.00763, 0.00766, 0.00763, 0.00768, 0.0077, 0.00769, 0.0079, 0.00766, 0.00765, 0.00767, 0.00848, 0.00762, 0.00762, 0.0077, 0.00763, 0.0077, 0.0076, 0.00769, 0.00767, 0.00763, 0.00763, 0.00766, 0.0078, 0.00766, 0.00762, 0.00777, 0.00763, 0.00763, 0.00761, 0.00765, 0.00763, 0.00767, 0.00766, 0.00766, 0.00764, 0.00825, 0.00763, 0.00764, 0.00762, 0.00762, 0.00761, 0.00768, 0.00821, 0.00776, 0.00779, 0.00781, 0.00778, 0.00875, 0.00781, 0.00783, 0.00782, 0.00792, 0.00779, 0.00782, 0.00781, 0.00783, 0.00781, 0.0078, 0.00782, 0.0078, 0.00884, 0.00896, 0.00783, 0.00778, 0.00843, 0.00783, 0.00789, 0.00911, 0.0078, 0.00787, 0.00783, 0.00779, 0.00784, 0.00781, 0.00784, 0.00782, 0.00886, 0.00764, 0.00763, 0.00759, 0.00785, 0.00785, 0.0079, 0.00781, 0.0078, 0.00787, 0.00782, 0.00759, 0.00793, 0.00762, 0.00785, 0.00763, 0.00765, 0.00781, 0.00773, 0.00784, 0.00762, 0.0078, 0.00885, 0.00779, 0.00767, 0.00763, 0.00782, 0.00761, 0.0078, 0.00773, 0.00766, 0.00783, 0.00758, 0.00778, 0.00785, 0.00781, 0.00759, 0.00779, 0.00791, 0.00776, 0.0078, 0.00782, 0.0079, 0.00761, 0.00781, 0.00773, 0.0076, 0.00764, 0.0076, 0.0079, 0.00789, 0.00777, 0.00763, 0.00782, 0.00784, 0.00781, 0.00782, 0.00757, 0.0076, 0.00788, 0.0078, 0.00778, 0.00762, 0.0078, 0.00834, 0.00794, 0.00785, 0.00783, 0.00773, 0.0079, 0.0078, 0.00783, 0.0078, 0.00801, 0.00782, 0.0078, 0.0078, 0.00781, 0.00801, 0.00781, 0.00758, 0.0076, 0.00778, 0.00779, 0.0078, 0.00791, 0.00781, 0.00781, 0.00797, 0.00782, 0.00782, 0.0079, 0.0078, 0.00784, 0.00783, 0.00781, 0.00782, 0.00788, 0.0079, 0.00791, 0.0079, 0.00782, 0.00781, 0.00814, 0.0078, 0.00785, 0.00782, 0.00793, 0.00792, 0.008, 0.00785, 0.00786, 0.00784, 0.00782, 0.00866, 0.00784, 0.00789, 0.00784, 0.00787, 0.00839, 0.0088, 0.00783, 0.00783, 0.00785, 0.00793, 0.00785, 0.0079, 0.00785, 0.0078, 0.00782, 0.00791, 0.00786, 0.00781, 0.0079, 0.00782, 0.00783, 0.00783, 0.00783, 0.00782, 0.00798, 0.00781, 0.00795, 0.00782, 0.00782, 0.00791, 0.00782, 0.00789, 0.00781, 0.00782, 0.00779, 0.00782, 0.00781, 0.00795, 0.00784, 0.00781, 0.00787, 0.00782, 0.00781, 0.0078, 0.00791, 0.00784, 0.00796, 0.00798, 0.00782, 0.00782, 0.00785, 0.00784, 0.00818, 0.00781, 0.00787, 0.00783, 0.00781, 0.0078, 0.00782, 0.00781, 0.00794, 0.00793, 0.0078, 0.00794, 0.00789, 0.00786, 0.00784, 0.0079, 0.00782, 0.00783, 0.00781, 0.00784, 0.00779, 0.00782, 0.00783, 0.00781, 0.00781, 0.00789, 0.00881, 0.00824, 0.00789, 0.00781, 0.00781, 0.0078, 0.0085, 0.00783, 0.00782, 0.00779, 0.00783, 0.0078, 0.00797, 0.00779, 0.00784, 0.00789, 0.00782, 0.00783, 0.00779, 0.00782, 0.00789, 0.00779, 0.00783, 0.00781, 0.00786, 0.00799, 0.00801, 0.0079, 0.00782, 0.00791, 0.00782, 0.00785, 0.00781, 0.00784, 0.00782, 0.00783, 0.00779, 0.00783, 0.0084, 0.00783, 0.00791, 0.00782, 0.00798, 0.00782, 0.0078, 0.00782, 0.00787, 0.00792, 0.0078, 0.00787, 0.00784, 0.00783, 0.00784, 0.00779, 0.00783, 0.00781, 0.00782, 0.00783, 0.00786, 0.00794, 0.00785, 0.00783, 0.00782, 0.00781, 0.00795, 0.00782, 0.00795, 0.00789, 0.00781, 0.00783, 0.00785, 0.00782, 0.00782, 0.0078, 0.00782, 0.00794, 0.00782, 0.00786, 0.00785, 0.00783, 0.0078, 0.00783, 0.0079, 0.00784, 0.00781, 0.00787, 0.00781, 0.0079, 0.00782, 0.00782, 0.00796, 0.00784, 0.00782, 0.00783, 0.00789, 0.00792, 0.00787, 0.00791, 0.00781, 0.00783, 0.00802, 0.00784, 0.00783, 0.00785, 0.00783, 0.00782, 0.00781, 0.00788, 0.00802, 0.00787, 0.00787, 0.00793, 0.00784, 0.00793, 0.00797, 0.00783]}, "learning-rate": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05]}, "batch-size": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0]}, "lm loss": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [10.88345, 10.90291, 10.88739, 10.83435, 10.68106, 10.65239, 10.43882, 10.15796, 9.94566, 9.85031, 9.59624, 9.85805, 9.88827, 9.63311, 9.79091, 9.51415, 9.46112, 9.65226, 9.38851, 9.33535, 9.24597, 9.15002, 9.1791, 9.00048, 9.19456, 9.06645, 9.16089, 9.17249, 9.30644, 8.99568, 8.93903, 9.04853, 9.05134, 8.65891, 8.72191, 8.75857, 8.68509, 8.7367, 8.66155, 8.76648, 8.66383, 8.85312, 8.83506, 8.49989, 8.39023, 8.43268, 8.49362, 8.38495, 8.4346, 8.58278, 8.36836, 8.19768, 8.22999, 8.22623, 8.27021, 7.91926, 8.10177, 7.89448, 8.24737, 8.23304, 8.007, 7.96876, 7.92354, 7.74219, 7.74672, 7.64691, 7.51972, 7.90702, 7.70393, 7.45184, 7.74158, 7.77006, 7.54684, 7.30265, 7.45642, 7.33883, 7.46797, 7.22942, 7.63514, 7.28131, 7.35335, 7.21286, 7.21895, 7.42346, 7.17843, 7.28509, 7.00192, 7.0089, 7.04286, 7.14056, 6.82835, 6.99014, 7.09279, 7.00447, 6.88003, 6.761, 6.99471, 7.0633, 6.70925, 6.5917, 6.73258, 6.74964, 6.73779, 6.74258, 6.66376, 6.41582, 6.64124, 6.62873, 6.45047, 6.63243, 6.75424, 6.61807, 6.73736, 6.70363, 6.63926, 6.51953, 6.61425, 6.42312, 6.67885, 6.26757, 6.26882, 6.32005, 6.41287, 6.37101, 6.46896, 6.31397, 6.36148, 6.25486, 6.22526, 6.42692, 6.35485, 6.35029, 6.19105, 6.18567, 6.26859, 6.415, 6.23334, 6.18337, 6.21035, 6.14535, 6.09626, 6.10387, 6.28772, 6.43606, 6.29503, 6.335, 6.13464, 6.21503, 6.02829, 6.06095, 5.9935, 6.28273, 6.22023, 5.99847, 5.81393, 6.16265, 5.87946, 6.14445, 5.82485, 6.19248, 6.18157, 6.12584, 5.97074, 6.14877, 5.98325, 6.23524, 5.93942, 5.83892, 5.82229, 5.72934, 6.05496, 6.0434, 6.11051, 5.93954, 6.09171, 6.01241, 6.04004, 6.0322, 5.99651, 5.89061, 6.00653, 5.67122, 5.75784, 5.94696, 5.9005, 5.91468, 5.82189, 5.89471, 5.77842, 5.61622, 5.78054, 5.69253, 5.90048, 5.66647, 5.77352, 5.78152, 5.97131, 5.71328, 5.92696, 5.81669, 5.94504, 5.4175, 5.97213, 5.95642, 5.93165, 5.48932, 5.49949, 5.70719, 5.6873, 5.5725, 5.66702, 5.76913, 5.57229, 5.82826, 5.61559, 5.69173, 5.731, 5.73072, 5.62169, 5.71676, 5.78883, 5.80232, 5.67949, 5.77122, 5.47901, 5.79612, 5.73059, 5.53929, 5.69307, 5.7447, 5.6605, 5.44825, 5.66038, 5.60993, 5.60208, 5.50359, 5.67847, 5.72987, 5.52511, 5.65798, 5.63632, 5.4706, 5.64734, 5.55245, 5.58744, 5.44937, 5.20181, 5.63792, 5.72045, 5.87194, 5.56238, 5.74796, 5.79022, 5.38902, 5.44605, 5.54282, 5.55739, 5.49575, 5.64498, 5.33577, 5.45876, 5.42673, 5.5365, 5.42129, 5.62761, 5.71678, 5.48104, 5.60527, 5.5126, 5.25058, 5.49118, 5.43681, 5.48508, 5.28923, 5.46474, 5.45286, 5.6724, 5.35082, 5.46484, 5.40053, 5.54964, 5.16851, 5.10998, 5.5302, 5.59551, 5.43932, 5.53394, 5.2946, 5.37074, 5.47423, 5.2811, 5.46993, 5.28979, 5.57821, 5.48542, 5.37281, 5.45382, 5.27315, 5.53883, 5.2931, 5.25971, 5.35796, 5.33386, 5.5094, 5.38011, 5.51219, 5.30068, 5.34103, 5.49541, 5.54901, 5.50235, 5.43059, 5.39677, 5.52711, 5.19094, 5.45817, 5.34325, 5.56956, 5.41302, 5.43584, 5.37612, 5.25951, 5.25447, 5.49422, 5.5781, 5.35768, 5.3279, 5.19136, 5.4016, 5.39747, 5.20526, 5.61362, 5.29418, 5.39709, 5.44712, 5.30146, 5.34724, 5.36676, 5.28901, 5.361, 5.45905, 5.27649, 5.47318, 5.21725, 5.22023, 5.35122, 5.28396, 5.21834, 5.10071, 5.23602, 5.43096, 5.33142, 5.33017, 5.66246, 5.3004, 5.30692, 5.39386, 5.13475, 5.06957, 5.3365, 5.37793, 5.21244, 5.29887, 5.36995, 5.34675, 5.15473, 5.24757, 5.27856, 5.16172, 5.08869, 5.37568, 5.11393, 5.55309, 5.15317, 5.32295, 5.06795, 5.13265, 5.17242, 5.01042, 5.01637, 5.20515, 5.17193, 5.18392, 5.30507, 5.25233, 5.31569, 5.14154, 5.24356, 5.12106, 5.31092, 5.36465, 5.24729, 5.09639, 5.1804, 5.29568, 5.10464, 5.27827, 5.10619, 5.10892, 5.03572]}, "lm loss vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [10.88345, 10.90291, 10.88739, 10.83435, 10.68106, 10.65239, 10.43882, 10.15796, 9.94566, 9.85031, 9.59624, 9.85805, 9.88827, 9.63311, 9.79091, 9.51415, 9.46112, 9.65226, 9.38851, 9.33535, 9.24597, 9.15002, 9.1791, 9.00048, 9.19456, 9.06645, 9.16089, 9.17249, 9.30644, 8.99568, 8.93903, 9.04853, 9.05134, 8.65891, 8.72191, 8.75857, 8.68509, 8.7367, 8.66155, 8.76648, 8.66383, 8.85312, 8.83506, 8.49989, 8.39023, 8.43268, 8.49362, 8.38495, 8.4346, 8.58278, 8.36836, 8.19768, 8.22999, 8.22623, 8.27021, 7.91926, 8.10177, 7.89448, 8.24737, 8.23304, 8.007, 7.96876, 7.92354, 7.74219, 7.74672, 7.64691, 7.51972, 7.90702, 7.70393, 7.45184, 7.74158, 7.77006, 7.54684, 7.30265, 7.45642, 7.33883, 7.46797, 7.22942, 7.63514, 7.28131, 7.35335, 7.21286, 7.21895, 7.42346, 7.17843, 7.28509, 7.00192, 7.0089, 7.04286, 7.14056, 6.82835, 6.99014, 7.09279, 7.00447, 6.88003, 6.761, 6.99471, 7.0633, 6.70925, 6.5917, 6.73258, 6.74964, 6.73779, 6.74258, 6.66376, 6.41582, 6.64124, 6.62873, 6.45047, 6.63243, 6.75424, 6.61807, 6.73736, 6.70363, 6.63926, 6.51953, 6.61425, 6.42312, 6.67885, 6.26757, 6.26882, 6.32005, 6.41287, 6.37101, 6.46896, 6.31397, 6.36148, 6.25486, 6.22526, 6.42692, 6.35485, 6.35029, 6.19105, 6.18567, 6.26859, 6.415, 6.23334, 6.18337, 6.21035, 6.14535, 6.09626, 6.10387, 6.28772, 6.43606, 6.29503, 6.335, 6.13464, 6.21503, 6.02829, 6.06095, 5.9935, 6.28273, 6.22023, 5.99847, 5.81393, 6.16265, 5.87946, 6.14445, 5.82485, 6.19248, 6.18157, 6.12584, 5.97074, 6.14877, 5.98325, 6.23524, 5.93942, 5.83892, 5.82229, 5.72934, 6.05496, 6.0434, 6.11051, 5.93954, 6.09171, 6.01241, 6.04004, 6.0322, 5.99651, 5.89061, 6.00653, 5.67122, 5.75784, 5.94696, 5.9005, 5.91468, 5.82189, 5.89471, 5.77842, 5.61622, 5.78054, 5.69253, 5.90048, 5.66647, 5.77352, 5.78152, 5.97131, 5.71328, 5.92696, 5.81669, 5.94504, 5.4175, 5.97213, 5.95642, 5.93165, 5.48932, 5.49949, 5.70719, 5.6873, 5.5725, 5.66702, 5.76913, 5.57229, 5.82826, 5.61559, 5.69173, 5.731, 5.73072, 5.62169, 5.71676, 5.78883, 5.80232, 5.67949, 5.77122, 5.47901, 5.79612, 5.73059, 5.53929, 5.69307, 5.7447, 5.6605, 5.44825, 5.66038, 5.60993, 5.60208, 5.50359, 5.67847, 5.72987, 5.52511, 5.65798, 5.63632, 5.4706, 5.64734, 5.55245, 5.58744, 5.44937, 5.20181, 5.63792, 5.72045, 5.87194, 5.56238, 5.74796, 5.79022, 5.38902, 5.44605, 5.54282, 5.55739, 5.49575, 5.64498, 5.33577, 5.45876, 5.42673, 5.5365, 5.42129, 5.62761, 5.71678, 5.48104, 5.60527, 5.5126, 5.25058, 5.49118, 5.43681, 5.48508, 5.28923, 5.46474, 5.45286, 5.6724, 5.35082, 5.46484, 5.40053, 5.54964, 5.16851, 5.10998, 5.5302, 5.59551, 5.43932, 5.53394, 5.2946, 5.37074, 5.47423, 5.2811, 5.46993, 5.28979, 5.57821, 5.48542, 5.37281, 5.45382, 5.27315, 5.53883, 5.2931, 5.25971, 5.35796, 5.33386, 5.5094, 5.38011, 5.51219, 5.30068, 5.34103, 5.49541, 5.54901, 5.50235, 5.43059, 5.39677, 5.52711, 5.19094, 5.45817, 5.34325, 5.56956, 5.41302, 5.43584, 5.37612, 5.25951, 5.25447, 5.49422, 5.5781, 5.35768, 5.3279, 5.19136, 5.4016, 5.39747, 5.20526, 5.61362, 5.29418, 5.39709, 5.44712, 5.30146, 5.34724, 5.36676, 5.28901, 5.361, 5.45905, 5.27649, 5.47318, 5.21725, 5.22023, 5.35122, 5.28396, 5.21834, 5.10071, 5.23602, 5.43096, 5.33142, 5.33017, 5.66246, 5.3004, 5.30692, 5.39386, 5.13475, 5.06957, 5.3365, 5.37793, 5.21244, 5.29887, 5.36995, 5.34675, 5.15473, 5.24757, 5.27856, 5.16172, 5.08869, 5.37568, 5.11393, 5.55309, 5.15317, 5.32295, 5.06795, 5.13265, 5.17242, 5.01042, 5.01637, 5.20515, 5.17193, 5.18392, 5.30507, 5.25233, 5.31569, 5.14154, 5.24356, 5.12106, 5.31092, 5.36465, 5.24729, 5.09639, 5.1804, 5.29568, 5.10464, 5.27827, 5.10619, 5.10892, 5.03572]}, "loss-scale": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [12.43997, 12.4994, 12.67738, 12.01981, 11.40989, 9.15396, 6.91154, 7.19653, 6.10097, 4.66447, 4.20211, 2.8807, 2.37647, 2.34175, 2.05101, 2.19366, 2.12083, 1.89191, 2.18481, 2.06821, 2.11865, 2.16674, 2.00167, 2.19993, 1.94652, 2.02914, 1.87967, 1.849, 1.87625, 2.13926, 2.1644, 1.83737, 1.7865, 2.10617, 2.09168, 2.03916, 1.97963, 1.83822, 1.96495, 1.70803, 2.13244, 1.91303, 1.67031, 1.85063, 1.89388, 1.7393, 1.73696, 1.73834, 1.81384, 1.54681, 1.72306, 1.83162, 1.75476, 1.78654, 1.54973, 1.8348, 1.71396, 1.79871, 1.46752, 1.54685, 1.64797, 1.57656, 1.70218, 1.63082, 1.61792, 1.6742, 1.70617, 1.4063, 1.49439, 1.5398, 1.39435, 1.372, 1.63172, 1.45579, 1.3529, 1.50085, 1.31258, 1.33724, 1.14869, 1.28976, 1.19311, 1.38603, 1.20251, 1.31173, 1.10965, 1.18009, 1.42638, 1.54885, 1.1348, 1.01505, 1.06293, 1.23147, 0.95714, 0.89268, 0.94079, 1.27319, 1.18212, 1.01407, 1.03886, 1.50527, 1.02205, 1.09161, 0.91857, 1.10077, 0.94051, 1.19162, 0.99345, 0.96782, 1.0889, 0.98132, 1.29717, 0.8425, 1.11704, 0.95051, 1.15684, 0.97961, 0.94467, 1.05905, 0.93968, 1.14615, 0.96345, 0.97578, 1.19987, 0.96535, 1.25273, 1.46243, 1.21921, 0.99922, 1.14431, 1.34353, 1.06135, 1.14405, 1.10872, 1.1588, 0.94471, 1.01308, 0.94383, 0.99273, 0.97851, 0.89198, 1.09779, 1.31177, 1.05508, 0.91714, 1.0117, 1.28832, 1.09784, 1.19667, 0.92098, 0.98378, 1.03891, 1.07858, 1.29929, 0.94354, 1.06388, 1.50705, 1.0007, 1.35362, 1.28287, 0.84574, 1.11813, 1.1825, 1.04876, 1.12893, 1.16116, 1.12585, 1.11897, 1.15162, 1.30322, 1.20265, 1.018, 0.99879, 0.90328, 1.21092, 1.0701, 1.06218, 1.10403, 1.0926, 1.05063, 1.07573, 1.20003, 1.25848, 1.34649, 1.12066, 1.50822, 1.14324, 1.4787, 1.1305, 1.14505, 1.16533, 1.14287, 1.24641, 1.38816, 1.42518, 1.1866, 1.45857, 1.17698, 1.2263, 1.01505, 1.21325, 1.36272, 1.305, 1.19874, 1.18217, 1.01807, 1.24602, 1.46217, 1.22746, 1.20492, 1.3465, 1.12878, 1.16877, 1.06974, 1.08696, 1.6092, 1.25397, 1.20201, 1.08861, 1.34872, 1.27688, 1.5104, 1.30437, 1.05297, 1.3032, 1.2672, 1.36045, 1.15533, 1.08165, 1.20493, 1.17126, 1.18099, 1.25764, 1.52555, 1.33265, 1.17044, 1.32121, 1.21081, 1.39328, 1.50488, 1.28381, 1.24675, 1.23603, 1.3193, 1.29405, 1.23259, 1.07163, 1.1052, 1.24045, 1.37927, 1.50839, 1.32285, 1.38782, 1.13484, 1.21127, 2.00278, 1.36691, 1.32213, 1.37434, 1.00254, 1.08214, 1.17335, 1.41525, 1.25392, 1.43316, 1.39572, 1.31067, 1.2846, 1.09515, 1.18724, 1.20128, 1.30643, 1.23357, 1.11402, 1.17568, 1.29277, 1.22678, 1.1362, 1.18826, 1.25873, 1.2814, 1.22295, 1.02105, 1.29626, 1.3106, 1.38573, 1.28368, 1.04758, 1.13079, 1.06747, 1.51913, 1.45844, 1.11656, 1.1972, 1.22395, 1.4347, 1.41031, 1.11466, 1.5639, 1.36293, 1.24572, 1.4447, 1.25296, 1.14388, 1.12495, 1.31276, 1.35398, 1.2105, 1.44264, 1.16726, 1.19041, 1.35889, 1.20903, 1.15845, 1.12041, 1.06639, 1.2833, 1.21736, 1.18244, 1.41925, 1.21164, 1.17543, 1.27955, 1.27399, 1.23019, 1.33022, 1.24584, 1.546, 1.32952, 1.1706, 1.31643, 1.32431, 1.26323, 1.13097, 1.34316, 1.10348, 1.33974, 1.18037, 1.18919, 1.42354, 1.37144, 1.33382, 1.39443, 1.37347, 1.18285, 1.1776, 1.31269, 1.10901, 1.33507, 1.39353, 1.28869, 1.32106, 1.36384, 1.307, 1.2118, 1.20055, 1.076, 1.20907, 1.28103, 1.2481, 1.49609, 1.25261, 1.22933, 1.23135, 1.40382, 1.47949, 1.50263, 1.27893, 1.27615, 1.34666, 1.30354, 1.1997, 1.51644, 1.42165, 1.35804, 1.19426, 1.23401, 1.36501, 1.05637, 1.11768, 1.22237, 1.39349, 1.3636, 1.33587, 1.44787, 1.23775, 1.25341, 1.15189, 1.07392, 1.29463, 1.16475, 1.13311, 1.32307, 1.04489, 1.17108, 1.24996, 1.21235, 1.90656, 1.20192, 1.24416, 1.32035]}, "grad-norm vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [12.43997, 12.4994, 12.67738, 12.01981, 11.40989, 9.15396, 6.91154, 7.19653, 6.10097, 4.66447, 4.20211, 2.8807, 2.37647, 2.34175, 2.05101, 2.19366, 2.12083, 1.89191, 2.18481, 2.06821, 2.11865, 2.16674, 2.00167, 2.19993, 1.94652, 2.02914, 1.87967, 1.849, 1.87625, 2.13926, 2.1644, 1.83737, 1.7865, 2.10617, 2.09168, 2.03916, 1.97963, 1.83822, 1.96495, 1.70803, 2.13244, 1.91303, 1.67031, 1.85063, 1.89388, 1.7393, 1.73696, 1.73834, 1.81384, 1.54681, 1.72306, 1.83162, 1.75476, 1.78654, 1.54973, 1.8348, 1.71396, 1.79871, 1.46752, 1.54685, 1.64797, 1.57656, 1.70218, 1.63082, 1.61792, 1.6742, 1.70617, 1.4063, 1.49439, 1.5398, 1.39435, 1.372, 1.63172, 1.45579, 1.3529, 1.50085, 1.31258, 1.33724, 1.14869, 1.28976, 1.19311, 1.38603, 1.20251, 1.31173, 1.10965, 1.18009, 1.42638, 1.54885, 1.1348, 1.01505, 1.06293, 1.23147, 0.95714, 0.89268, 0.94079, 1.27319, 1.18212, 1.01407, 1.03886, 1.50527, 1.02205, 1.09161, 0.91857, 1.10077, 0.94051, 1.19162, 0.99345, 0.96782, 1.0889, 0.98132, 1.29717, 0.8425, 1.11704, 0.95051, 1.15684, 0.97961, 0.94467, 1.05905, 0.93968, 1.14615, 0.96345, 0.97578, 1.19987, 0.96535, 1.25273, 1.46243, 1.21921, 0.99922, 1.14431, 1.34353, 1.06135, 1.14405, 1.10872, 1.1588, 0.94471, 1.01308, 0.94383, 0.99273, 0.97851, 0.89198, 1.09779, 1.31177, 1.05508, 0.91714, 1.0117, 1.28832, 1.09784, 1.19667, 0.92098, 0.98378, 1.03891, 1.07858, 1.29929, 0.94354, 1.06388, 1.50705, 1.0007, 1.35362, 1.28287, 0.84574, 1.11813, 1.1825, 1.04876, 1.12893, 1.16116, 1.12585, 1.11897, 1.15162, 1.30322, 1.20265, 1.018, 0.99879, 0.90328, 1.21092, 1.0701, 1.06218, 1.10403, 1.0926, 1.05063, 1.07573, 1.20003, 1.25848, 1.34649, 1.12066, 1.50822, 1.14324, 1.4787, 1.1305, 1.14505, 1.16533, 1.14287, 1.24641, 1.38816, 1.42518, 1.1866, 1.45857, 1.17698, 1.2263, 1.01505, 1.21325, 1.36272, 1.305, 1.19874, 1.18217, 1.01807, 1.24602, 1.46217, 1.22746, 1.20492, 1.3465, 1.12878, 1.16877, 1.06974, 1.08696, 1.6092, 1.25397, 1.20201, 1.08861, 1.34872, 1.27688, 1.5104, 1.30437, 1.05297, 1.3032, 1.2672, 1.36045, 1.15533, 1.08165, 1.20493, 1.17126, 1.18099, 1.25764, 1.52555, 1.33265, 1.17044, 1.32121, 1.21081, 1.39328, 1.50488, 1.28381, 1.24675, 1.23603, 1.3193, 1.29405, 1.23259, 1.07163, 1.1052, 1.24045, 1.37927, 1.50839, 1.32285, 1.38782, 1.13484, 1.21127, 2.00278, 1.36691, 1.32213, 1.37434, 1.00254, 1.08214, 1.17335, 1.41525, 1.25392, 1.43316, 1.39572, 1.31067, 1.2846, 1.09515, 1.18724, 1.20128, 1.30643, 1.23357, 1.11402, 1.17568, 1.29277, 1.22678, 1.1362, 1.18826, 1.25873, 1.2814, 1.22295, 1.02105, 1.29626, 1.3106, 1.38573, 1.28368, 1.04758, 1.13079, 1.06747, 1.51913, 1.45844, 1.11656, 1.1972, 1.22395, 1.4347, 1.41031, 1.11466, 1.5639, 1.36293, 1.24572, 1.4447, 1.25296, 1.14388, 1.12495, 1.31276, 1.35398, 1.2105, 1.44264, 1.16726, 1.19041, 1.35889, 1.20903, 1.15845, 1.12041, 1.06639, 1.2833, 1.21736, 1.18244, 1.41925, 1.21164, 1.17543, 1.27955, 1.27399, 1.23019, 1.33022, 1.24584, 1.546, 1.32952, 1.1706, 1.31643, 1.32431, 1.26323, 1.13097, 1.34316, 1.10348, 1.33974, 1.18037, 1.18919, 1.42354, 1.37144, 1.33382, 1.39443, 1.37347, 1.18285, 1.1776, 1.31269, 1.10901, 1.33507, 1.39353, 1.28869, 1.32106, 1.36384, 1.307, 1.2118, 1.20055, 1.076, 1.20907, 1.28103, 1.2481, 1.49609, 1.25261, 1.22933, 1.23135, 1.40382, 1.47949, 1.50263, 1.27893, 1.27615, 1.34666, 1.30354, 1.1997, 1.51644, 1.42165, 1.35804, 1.19426, 1.23401, 1.36501, 1.05637, 1.11768, 1.22237, 1.39349, 1.3636, 1.33587, 1.44787, 1.23775, 1.25341, 1.15189, 1.07392, 1.29463, 1.16475, 1.13311, 1.32307, 1.04489, 1.17108, 1.24996, 1.21235, 1.90656, 1.20192, 1.24416, 1.32035]}, "num-zeros": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [80.0, 89.0, 102.0, 88.0, 78.0, 115.0, 125.0, 114.0, 129.0, 106.0, 125.0, 179.0, 156.0, 184.0, 179.0, 191.0, 171.0, 216.0, 169.0, 200.0, 171.0, 184.0, 206.0, 173.0, 221.0, 181.0, 188.0, 209.0, 187.0, 188.0, 167.0, 165.0, 180.0, 204.0, 152.0, 155.0, 170.0, 179.0, 177.0, 197.0, 184.0, 162.0, 194.0, 184.0, 171.0, 206.0, 198.0, 200.0, 187.0, 238.0, 208.0, 173.0, 201.0, 145.0, 199.0, 194.0, 185.0, 173.0, 266.0, 238.0, 190.0, 195.0, 182.0, 188.0, 199.0, 262.0, 210.0, 233.0, 216.0, 199.0, 257.0, 213.0, 220.0, 243.0, 218.0, 215.0, 229.0, 219.0, 289.0, 212.0, 280.0, 229.0, 196.0, 274.0, 237.0, 246.0, 170.0, 203.0, 205.0, 236.0, 201.0, 203.0, 256.0, 220.0, 191.0, 173.0, 214.0, 225.0, 183.0, 151.0, 195.0, 174.0, 218.0, 189.0, 159.0, 151.0, 154.0, 154.0, 130.0, 202.0, 162.0, 186.0, 166.0, 187.0, 136.0, 145.0, 168.0, 100.0, 161.0, 124.0, 138.0, 163.0, 108.0, 167.0, 129.0, 131.0, 141.0, 148.0, 128.0, 124.0, 137.0, 168.0, 133.0, 114.0, 139.0, 123.0, 161.0, 139.0, 133.0, 152.0, 122.0, 111.0, 135.0, 155.0, 158.0, 101.0, 134.0, 164.0, 136.0, 163.0, 110.0, 153.0, 116.0, 132.0, 120.0, 115.0, 108.0, 85.0, 97.0, 169.0, 112.0, 115.0, 134.0, 105.0, 114.0, 156.0, 115.0, 103.0, 125.0, 113.0, 121.0, 138.0, 114.0, 130.0, 122.0, 118.0, 88.0, 106.0, 113.0, 121.0, 134.0, 131.0, 118.0, 130.0, 93.0, 111.0, 114.0, 111.0, 106.0, 95.0, 105.0, 107.0, 107.0, 87.0, 112.0, 90.0, 116.0, 104.0, 135.0, 140.0, 102.0, 104.0, 142.0, 144.0, 121.0, 87.0, 99.0, 136.0, 115.0, 105.0, 126.0, 112.0, 126.0, 125.0, 115.0, 116.0, 121.0, 145.0, 109.0, 111.0, 103.0, 112.0, 129.0, 115.0, 130.0, 97.0, 119.0, 103.0, 116.0, 135.0, 109.0, 115.0, 109.0, 113.0, 119.0, 116.0, 105.0, 107.0, 105.0, 109.0, 113.0, 115.0, 101.0, 114.0, 109.0, 123.0, 111.0, 117.0, 106.0, 92.0, 103.0, 118.0, 116.0, 130.0, 99.0, 107.0, 121.0, 96.0, 124.0, 112.0, 134.0, 104.0, 115.0, 104.0, 113.0, 107.0, 119.0, 124.0, 116.0, 115.0, 123.0, 139.0, 117.0, 118.0, 110.0, 112.0, 124.0, 112.0, 104.0, 98.0, 108.0, 134.0, 108.0, 126.0, 123.0, 118.0, 120.0, 122.0, 141.0, 105.0, 81.0, 122.0, 131.0, 123.0, 122.0, 101.0, 129.0, 88.0, 131.0, 124.0, 110.0, 124.0, 130.0, 141.0, 109.0, 107.0, 95.0, 104.0, 136.0, 123.0, 121.0, 123.0, 111.0, 117.0, 142.0, 120.0, 111.0, 108.0, 86.0, 121.0, 115.0, 111.0, 125.0, 128.0, 93.0, 126.0, 116.0, 124.0, 94.0, 107.0, 107.0, 128.0, 106.0, 110.0, 128.0, 104.0, 105.0, 114.0, 118.0, 117.0, 99.0, 123.0, 108.0, 107.0, 126.0, 119.0, 121.0, 121.0, 107.0, 116.0, 116.0, 116.0, 126.0, 145.0, 132.0, 133.0, 125.0, 100.0, 98.0, 129.0, 118.0, 121.0, 105.0, 107.0, 95.0, 113.0, 106.0, 108.0, 94.0, 121.0, 139.0, 118.0, 101.0, 98.0, 111.0, 117.0, 112.0, 129.0, 113.0, 119.0, 103.0, 123.0, 124.0, 107.0, 121.0, 117.0, 126.0, 123.0, 103.0, 113.0, 131.0, 117.0, 128.0, 123.0, 103.0, 149.0, 113.0, 101.0, 122.0, 110.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [80.0, 89.0, 102.0, 88.0, 78.0, 115.0, 125.0, 114.0, 129.0, 106.0, 125.0, 179.0, 156.0, 184.0, 179.0, 191.0, 171.0, 216.0, 169.0, 200.0, 171.0, 184.0, 206.0, 173.0, 221.0, 181.0, 188.0, 209.0, 187.0, 188.0, 167.0, 165.0, 180.0, 204.0, 152.0, 155.0, 170.0, 179.0, 177.0, 197.0, 184.0, 162.0, 194.0, 184.0, 171.0, 206.0, 198.0, 200.0, 187.0, 238.0, 208.0, 173.0, 201.0, 145.0, 199.0, 194.0, 185.0, 173.0, 266.0, 238.0, 190.0, 195.0, 182.0, 188.0, 199.0, 262.0, 210.0, 233.0, 216.0, 199.0, 257.0, 213.0, 220.0, 243.0, 218.0, 215.0, 229.0, 219.0, 289.0, 212.0, 280.0, 229.0, 196.0, 274.0, 237.0, 246.0, 170.0, 203.0, 205.0, 236.0, 201.0, 203.0, 256.0, 220.0, 191.0, 173.0, 214.0, 225.0, 183.0, 151.0, 195.0, 174.0, 218.0, 189.0, 159.0, 151.0, 154.0, 154.0, 130.0, 202.0, 162.0, 186.0, 166.0, 187.0, 136.0, 145.0, 168.0, 100.0, 161.0, 124.0, 138.0, 163.0, 108.0, 167.0, 129.0, 131.0, 141.0, 148.0, 128.0, 124.0, 137.0, 168.0, 133.0, 114.0, 139.0, 123.0, 161.0, 139.0, 133.0, 152.0, 122.0, 111.0, 135.0, 155.0, 158.0, 101.0, 134.0, 164.0, 136.0, 163.0, 110.0, 153.0, 116.0, 132.0, 120.0, 115.0, 108.0, 85.0, 97.0, 169.0, 112.0, 115.0, 134.0, 105.0, 114.0, 156.0, 115.0, 103.0, 125.0, 113.0, 121.0, 138.0, 114.0, 130.0, 122.0, 118.0, 88.0, 106.0, 113.0, 121.0, 134.0, 131.0, 118.0, 130.0, 93.0, 111.0, 114.0, 111.0, 106.0, 95.0, 105.0, 107.0, 107.0, 87.0, 112.0, 90.0, 116.0, 104.0, 135.0, 140.0, 102.0, 104.0, 142.0, 144.0, 121.0, 87.0, 99.0, 136.0, 115.0, 105.0, 126.0, 112.0, 126.0, 125.0, 115.0, 116.0, 121.0, 145.0, 109.0, 111.0, 103.0, 112.0, 129.0, 115.0, 130.0, 97.0, 119.0, 103.0, 116.0, 135.0, 109.0, 115.0, 109.0, 113.0, 119.0, 116.0, 105.0, 107.0, 105.0, 109.0, 113.0, 115.0, 101.0, 114.0, 109.0, 123.0, 111.0, 117.0, 106.0, 92.0, 103.0, 118.0, 116.0, 130.0, 99.0, 107.0, 121.0, 96.0, 124.0, 112.0, 134.0, 104.0, 115.0, 104.0, 113.0, 107.0, 119.0, 124.0, 116.0, 115.0, 123.0, 139.0, 117.0, 118.0, 110.0, 112.0, 124.0, 112.0, 104.0, 98.0, 108.0, 134.0, 108.0, 126.0, 123.0, 118.0, 120.0, 122.0, 141.0, 105.0, 81.0, 122.0, 131.0, 123.0, 122.0, 101.0, 129.0, 88.0, 131.0, 124.0, 110.0, 124.0, 130.0, 141.0, 109.0, 107.0, 95.0, 104.0, 136.0, 123.0, 121.0, 123.0, 111.0, 117.0, 142.0, 120.0, 111.0, 108.0, 86.0, 121.0, 115.0, 111.0, 125.0, 128.0, 93.0, 126.0, 116.0, 124.0, 94.0, 107.0, 107.0, 128.0, 106.0, 110.0, 128.0, 104.0, 105.0, 114.0, 118.0, 117.0, 99.0, 123.0, 108.0, 107.0, 126.0, 119.0, 121.0, 121.0, 107.0, 116.0, 116.0, 116.0, 126.0, 145.0, 132.0, 133.0, 125.0, 100.0, 98.0, 129.0, 118.0, 121.0, 105.0, 107.0, 95.0, 113.0, 106.0, 108.0, 94.0, 121.0, 139.0, 118.0, 101.0, 98.0, 111.0, 117.0, 112.0, 129.0, 113.0, 119.0, 103.0, 123.0, 124.0, 107.0, 121.0, 117.0, 126.0, 123.0, 103.0, 113.0, 131.0, 117.0, 128.0, 123.0, 103.0, 149.0, 113.0, 101.0, 122.0, 110.0]}, "params-norm": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [179.95625, 179.95625, 179.95625, 179.95625, 179.95625, 179.95624, 179.95622, 179.95612, 179.95593, 179.95575, 179.95451, 179.95384, 179.95331, 179.95131, 179.95029, 179.94963, 179.94899, 179.94896, 179.94923, 179.94928, 179.94922, 179.94897, 179.94885, 179.9491, 179.94991, 179.951, 179.95213, 179.95309, 179.95415, 179.95551, 179.9574, 179.95952, 179.96179, 179.96399, 179.96649, 179.96965, 179.97318, 179.97679, 179.98051, 179.98468, 179.98955, 179.99477, 180.00044, 180.00658, 180.01337, 180.02075, 180.02858, 180.03702, 180.04625, 180.05624, 180.06699, 180.0782, 180.09018, 180.10277, 180.11606, 180.12999, 180.14421, 180.159, 180.17467, 180.19148, 180.20897, 180.22713, 180.24684, 180.26782, 180.2896, 180.31204, 180.33545, 180.35973, 180.38542, 180.41144, 180.43797, 180.46524, 180.4928, 180.52104, 180.54993, 180.57939, 180.60922, 180.63998, 180.67151, 180.70398, 180.73651, 180.76875, 180.80157, 180.83536, 180.86948, 180.90508, 180.9411, 180.97647, 181.01176, 181.04828, 181.08588, 181.12448, 181.16327, 181.20253, 181.24295, 181.28366, 181.32249, 181.35963, 181.39644, 181.43352, 181.47067, 181.50752, 181.54518, 181.58394, 181.62318, 181.66335, 181.7032, 181.74304, 181.78291, 181.82195, 181.86037, 181.89832, 181.93773, 181.97792, 182.01897, 182.05927, 182.09976, 182.14062, 182.18091, 182.22133, 182.26169, 182.30261, 182.34355, 182.38451, 182.4248, 182.46426, 182.50208, 182.53731, 182.57451, 182.61168, 182.64999, 182.68562, 182.72139, 182.75731, 182.79347, 182.83156, 182.87192, 182.91328, 182.95439, 182.99614, 183.03891, 183.07968, 183.12061, 183.16183, 183.20284, 183.24399, 183.28496, 183.325, 183.3662, 183.40788, 183.45087, 183.49307, 183.53464, 183.57661, 183.61989, 183.66231, 183.70183, 183.7419, 183.78094, 183.81953, 183.86018, 183.90375, 183.94774, 183.9931, 184.03831, 184.08267, 184.12688, 184.16986, 184.21062, 184.25189, 184.29411, 184.3373, 184.38132, 184.42554, 184.46965, 184.51401, 184.55882, 184.60381, 184.64806, 184.69025, 184.73256, 184.7748, 184.817, 184.86073, 184.90417, 184.94685, 184.98766, 185.02675, 185.06696, 185.10852, 185.15274, 185.19722, 185.24055, 185.28352, 185.32553, 185.36723, 185.40932, 185.45212, 185.49559, 185.54068, 185.58374, 185.62703, 185.6687, 185.71231, 185.75662, 185.80209, 185.84537, 185.88788, 185.93077, 185.97299, 186.01599, 186.05911, 186.10475, 186.15176, 186.19826, 186.24303, 186.28674, 186.33194, 186.377, 186.42128, 186.46397, 186.50703, 186.55083, 186.59554, 186.63943, 186.68254, 186.72632, 186.77109, 186.81587, 186.86107, 186.90485, 186.94669, 186.9883, 187.03162, 187.07474, 187.11856, 187.16187, 187.20621, 187.25069, 187.29416, 187.33778, 187.38162, 187.42618, 187.47089, 187.51416, 187.56001, 187.60674, 187.6539, 187.70016, 187.74496, 187.7905, 187.83824, 187.88522, 187.93312, 187.98019, 188.02357, 188.06801, 188.11484, 188.1615, 188.21011, 188.26111, 188.31125, 188.35876, 188.4053, 188.45084, 188.49641, 188.54265, 188.58983, 188.64067, 188.69183, 188.74222, 188.79266, 188.84273, 188.89304, 188.94508, 188.99475, 189.04398, 189.09485, 189.14598, 189.1965, 189.24777, 189.29964, 189.35378, 189.40587, 189.45831, 189.50987, 189.56148, 189.61368, 189.66797, 189.71982, 189.77005, 189.81833, 189.86722, 189.91873, 189.97101, 190.02145, 190.07199, 190.12384, 190.17366, 190.22346, 190.27402, 190.3253, 190.37793, 190.43097, 190.48424, 190.53532, 190.58551, 190.63808, 190.69084, 190.74536, 190.79968, 190.85349, 190.90894, 190.96626, 191.02402, 191.08208, 191.13948, 191.19746, 191.25615, 191.31114, 191.36597, 191.4203, 191.47542, 191.53027, 191.58527, 191.63684, 191.68701, 191.73514, 191.78677, 191.83801, 191.8905, 191.94266, 191.99596, 192.05061, 192.1071, 192.16386, 192.21751, 192.27289, 192.32852, 192.37949, 192.43187, 192.48483, 192.53804, 192.59248, 192.64667, 192.70181, 192.75798, 192.81502, 192.87016, 192.92496, 192.98015, 193.03481, 193.09019, 193.14693, 193.20465, 193.26526, 193.32504, 193.38451, 193.44281, 193.49977, 193.55804, 193.61533, 193.67177, 193.72891, 193.78667, 193.84259, 193.89799, 193.95425, 194.01086, 194.06876, 194.12726, 194.18596, 194.24385, 194.30168, 194.35782, 194.41516, 194.47411, 194.53342, 194.59587, 194.65793, 194.71797, 194.77441, 194.83284, 194.88989, 194.94766, 195.00539, 195.06413, 195.12605, 195.19096, 195.25722, 195.32449, 195.39157, 195.45724, 195.52281, 195.58981, 195.65671, 195.7216, 195.78194, 195.84415, 195.90858]}, "params-norm vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [179.95625, 179.95625, 179.95625, 179.95625, 179.95625, 179.95624, 179.95622, 179.95612, 179.95593, 179.95575, 179.95451, 179.95384, 179.95331, 179.95131, 179.95029, 179.94963, 179.94899, 179.94896, 179.94923, 179.94928, 179.94922, 179.94897, 179.94885, 179.9491, 179.94991, 179.951, 179.95213, 179.95309, 179.95415, 179.95551, 179.9574, 179.95952, 179.96179, 179.96399, 179.96649, 179.96965, 179.97318, 179.97679, 179.98051, 179.98468, 179.98955, 179.99477, 180.00044, 180.00658, 180.01337, 180.02075, 180.02858, 180.03702, 180.04625, 180.05624, 180.06699, 180.0782, 180.09018, 180.10277, 180.11606, 180.12999, 180.14421, 180.159, 180.17467, 180.19148, 180.20897, 180.22713, 180.24684, 180.26782, 180.2896, 180.31204, 180.33545, 180.35973, 180.38542, 180.41144, 180.43797, 180.46524, 180.4928, 180.52104, 180.54993, 180.57939, 180.60922, 180.63998, 180.67151, 180.70398, 180.73651, 180.76875, 180.80157, 180.83536, 180.86948, 180.90508, 180.9411, 180.97647, 181.01176, 181.04828, 181.08588, 181.12448, 181.16327, 181.20253, 181.24295, 181.28366, 181.32249, 181.35963, 181.39644, 181.43352, 181.47067, 181.50752, 181.54518, 181.58394, 181.62318, 181.66335, 181.7032, 181.74304, 181.78291, 181.82195, 181.86037, 181.89832, 181.93773, 181.97792, 182.01897, 182.05927, 182.09976, 182.14062, 182.18091, 182.22133, 182.26169, 182.30261, 182.34355, 182.38451, 182.4248, 182.46426, 182.50208, 182.53731, 182.57451, 182.61168, 182.64999, 182.68562, 182.72139, 182.75731, 182.79347, 182.83156, 182.87192, 182.91328, 182.95439, 182.99614, 183.03891, 183.07968, 183.12061, 183.16183, 183.20284, 183.24399, 183.28496, 183.325, 183.3662, 183.40788, 183.45087, 183.49307, 183.53464, 183.57661, 183.61989, 183.66231, 183.70183, 183.7419, 183.78094, 183.81953, 183.86018, 183.90375, 183.94774, 183.9931, 184.03831, 184.08267, 184.12688, 184.16986, 184.21062, 184.25189, 184.29411, 184.3373, 184.38132, 184.42554, 184.46965, 184.51401, 184.55882, 184.60381, 184.64806, 184.69025, 184.73256, 184.7748, 184.817, 184.86073, 184.90417, 184.94685, 184.98766, 185.02675, 185.06696, 185.10852, 185.15274, 185.19722, 185.24055, 185.28352, 185.32553, 185.36723, 185.40932, 185.45212, 185.49559, 185.54068, 185.58374, 185.62703, 185.6687, 185.71231, 185.75662, 185.80209, 185.84537, 185.88788, 185.93077, 185.97299, 186.01599, 186.05911, 186.10475, 186.15176, 186.19826, 186.24303, 186.28674, 186.33194, 186.377, 186.42128, 186.46397, 186.50703, 186.55083, 186.59554, 186.63943, 186.68254, 186.72632, 186.77109, 186.81587, 186.86107, 186.90485, 186.94669, 186.9883, 187.03162, 187.07474, 187.11856, 187.16187, 187.20621, 187.25069, 187.29416, 187.33778, 187.38162, 187.42618, 187.47089, 187.51416, 187.56001, 187.60674, 187.6539, 187.70016, 187.74496, 187.7905, 187.83824, 187.88522, 187.93312, 187.98019, 188.02357, 188.06801, 188.11484, 188.1615, 188.21011, 188.26111, 188.31125, 188.35876, 188.4053, 188.45084, 188.49641, 188.54265, 188.58983, 188.64067, 188.69183, 188.74222, 188.79266, 188.84273, 188.89304, 188.94508, 188.99475, 189.04398, 189.09485, 189.14598, 189.1965, 189.24777, 189.29964, 189.35378, 189.40587, 189.45831, 189.50987, 189.56148, 189.61368, 189.66797, 189.71982, 189.77005, 189.81833, 189.86722, 189.91873, 189.97101, 190.02145, 190.07199, 190.12384, 190.17366, 190.22346, 190.27402, 190.3253, 190.37793, 190.43097, 190.48424, 190.53532, 190.58551, 190.63808, 190.69084, 190.74536, 190.79968, 190.85349, 190.90894, 190.96626, 191.02402, 191.08208, 191.13948, 191.19746, 191.25615, 191.31114, 191.36597, 191.4203, 191.47542, 191.53027, 191.58527, 191.63684, 191.68701, 191.73514, 191.78677, 191.83801, 191.8905, 191.94266, 191.99596, 192.05061, 192.1071, 192.16386, 192.21751, 192.27289, 192.32852, 192.37949, 192.43187, 192.48483, 192.53804, 192.59248, 192.64667, 192.70181, 192.75798, 192.81502, 192.87016, 192.92496, 192.98015, 193.03481, 193.09019, 193.14693, 193.20465, 193.26526, 193.32504, 193.38451, 193.44281, 193.49977, 193.55804, 193.61533, 193.67177, 193.72891, 193.78667, 193.84259, 193.89799, 193.95425, 194.01086, 194.06876, 194.12726, 194.18596, 194.24385, 194.30168, 194.35782, 194.41516, 194.47411, 194.53342, 194.59587, 194.65793, 194.71797, 194.77441, 194.83284, 194.88989, 194.94766, 195.00539, 195.06413, 195.12605, 195.19096, 195.25722, 195.32449, 195.39157, 195.45724, 195.52281, 195.58981, 195.65671, 195.7216, 195.78194, 195.84415, 195.90858]}, "iteration-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [18.92793, 0.51136, 0.50959, 0.5023, 0.50706, 0.49889, 0.49918, 0.50787, 0.50805, 0.50023, 0.51244, 0.49782, 0.5011, 0.49829, 0.50242, 0.49765, 0.50512, 0.50815, 0.51211, 0.49886, 0.50327, 0.50436, 0.50354, 0.4972, 0.49868, 0.50277, 0.49981, 0.50008, 0.50203, 0.49718, 0.60026, 0.49876, 0.49477, 0.5046, 0.51537, 0.5196, 0.49706, 0.49993, 0.49908, 0.49804, 0.4994, 0.49794, 0.50015, 0.49859, 0.49669, 0.49649, 0.59124, 0.49837, 0.50138, 0.49717, 0.49966, 0.50461, 0.4977, 0.49673, 0.5025, 0.49998, 0.49865, 0.50151, 0.50846, 0.51111, 0.50552, 0.50429, 0.50589, 0.50627, 0.50795, 0.505, 0.50478, 0.50608, 0.5063, 0.50392, 0.50528, 0.50464, 0.50852, 0.50732, 0.50975, 0.70338, 0.50322, 0.50607, 0.5008, 0.51264, 0.50202, 0.51117, 0.50466, 0.50856, 0.50482, 0.5101, 0.50604, 0.50708, 0.50371, 0.50732, 0.50754, 0.50725, 0.50576, 0.50944, 0.50954, 0.50758, 0.50654, 0.5929, 0.50552, 0.50521, 0.50353, 0.50768, 0.50269, 0.50818, 0.50339, 0.50584, 0.50369, 0.50801, 0.50311, 0.50501, 0.50259, 0.50478, 0.50477, 0.50612, 0.50304, 0.5048, 0.50419, 0.50917, 0.50259, 0.59305, 0.71675, 0.50782, 0.50595, 0.50366, 0.50416, 0.5131, 0.50874, 0.50202, 0.5075, 0.50344, 0.50969, 0.50236, 0.50738, 0.5042, 0.50968, 0.50453, 0.50797, 0.50316, 0.50801, 0.50385, 0.51048, 0.50461, 0.60109, 0.50835, 0.50599, 0.50503, 0.50405, 0.50686, 0.50365, 0.50633, 0.51394, 0.507, 0.50416, 0.5072, 0.50187, 0.50987, 0.50554, 0.50964, 0.49997, 0.5086, 0.50287, 0.50901, 0.51253, 0.51268, 0.59174, 0.63218, 0.50352, 0.50458, 0.50663, 0.50624, 0.50529, 0.50834, 0.50628, 0.50536, 0.50697, 0.50514, 0.5058, 0.5064, 0.51003, 0.50482, 0.50622, 0.50306, 0.50955, 0.50288, 0.51052, 0.50915, 0.50819, 0.50518, 0.50395, 0.50908, 0.50261, 0.5111, 0.59558, 0.50726, 0.50659, 0.50692, 0.50765, 0.50516, 0.51034, 0.50537, 0.49111, 0.50535, 0.50465, 0.50275, 0.50558, 0.5014, 0.5079, 0.5078, 0.50568, 0.5069, 0.50614, 0.50631, 0.5066, 0.50398, 0.50618, 0.50721, 0.51171, 0.50602, 0.50818, 0.50511, 0.51286, 0.50398, 0.50849, 0.50801, 0.50817, 0.50985, 0.50547, 0.50729, 0.50608, 0.59229, 0.50801, 0.50242, 0.51408, 0.50883, 0.5042, 0.508, 0.51821, 0.50964, 0.50309, 0.51214, 0.59459, 0.51016, 0.50757, 0.51259, 0.50854, 0.50258, 0.50468, 0.50579, 0.50859, 0.50372, 0.50798, 0.50757, 0.51184, 0.50914, 0.50776, 0.50432, 0.50917, 0.50287, 0.50616, 0.50167, 0.5065, 0.50145, 0.51091, 0.50163, 0.51326, 0.50092, 0.50601, 0.50447, 0.50502, 0.50274, 0.50572, 0.50976, 0.5047, 0.50868, 0.50316, 0.52048, 0.50699, 0.61568, 0.50722, 0.5088, 0.50773, 0.50579, 0.50532, 0.50689, 0.50615, 0.50762, 0.5023, 0.50258, 0.50262, 0.51065, 0.50567, 0.50633, 0.50361, 0.50893, 0.50511, 0.50936, 0.59793, 0.60202, 0.51102, 0.50683, 0.50341, 0.50975, 0.50313, 0.51068, 0.50494, 0.5094, 0.50552, 0.5077, 0.50574, 0.50655, 0.51164, 0.50641, 0.50789, 0.50671, 0.61258, 0.50815, 0.50767, 0.50856, 0.51335, 0.5105, 0.50233, 0.50903, 0.50975, 0.50328, 0.50987, 0.50357, 0.50951, 0.50423, 0.50818, 0.50563, 0.50771, 0.50968, 0.50443, 0.50847, 0.50717, 0.50752, 0.50453, 0.50914, 0.50657, 0.50601, 0.51204, 0.50439, 0.59526, 0.50772, 0.50461, 0.51966, 0.50388, 0.50764, 0.50335, 0.51566, 0.50622, 0.50664, 0.50857, 0.51175, 0.50837, 0.50352, 0.50963, 0.50442, 0.50747, 0.50672, 0.50844, 0.50629, 0.50717, 0.5071, 0.50387, 0.5066, 0.50594, 0.50388, 0.50981, 0.50538, 0.5055, 0.50641, 0.50813, 0.50422, 0.50345, 0.50462, 0.50731, 0.50278, 0.50356, 0.50701, 0.5066, 0.5073, 0.51, 0.50394, 0.50873, 0.50751, 0.50848, 0.59448, 0.50862, 0.5117, 0.50484, 0.51229, 0.50735, 0.50392, 0.50744, 0.50609, 0.50765, 0.51917, 0.51153, 0.50229]}, "lm loss validation": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [5.68727]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [5.68727]}, "lm loss validation ppl": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [295.08755]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [295.08755]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values_dev.json new file mode 100644 index 000000000..c759ae475 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values_dev.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.8833, + 10.90244, + 10.88662, + 10.83318, + 10.6762, + 10.64934, + 10.43397, + 10.15132, + 9.93913, + 9.84134, + 9.5886, + 9.85452, + 9.88457, + 9.62953, + 9.78805, + 9.51138, + 9.45839, + 9.64923, + 9.38614, + 9.33215, + 9.24219, + 9.14557, + 9.17566, + 8.99559, + 9.18951, + 9.06004, + 9.15559, + 9.16505, + 9.29785, + 8.9846, + 8.92921, + 9.04387, + 9.04308, + 8.65511, + 8.71722, + 8.75347, + 8.68373, + 8.73448, + 8.65881, + 8.76509, + 8.66102, + 8.85001, + 8.83242, + 8.49967, + 8.3894, + 8.43185, + 8.49362, + 8.38492, + 8.43303, + 8.58006, + 8.36747, + 8.19262, + 8.22634, + 8.22256, + 8.26796, + 7.91388, + 8.09614, + 7.89146, + 8.2469, + 8.23091, + 8.00558, + 7.96607, + 7.91878, + 7.74064, + 7.74043, + 7.64353, + 7.51615, + 7.90743, + 7.69899, + 7.45239, + 7.74097, + 7.76829, + 7.54181, + 7.29901, + 7.45239, + 7.33607, + 7.46255, + 7.22408, + 7.63701, + 7.27971, + 7.35197, + 7.21312, + 7.21651, + 7.42255, + 7.17701, + 7.28049, + 7.00057, + 7.00362, + 7.0382, + 7.13584, + 6.82274, + 6.98508, + 7.08808, + 7.00046, + 6.87376, + 6.75595, + 6.99172, + 7.05761, + 6.70449, + 6.5819, + 6.72818, + 6.74414, + 6.73568, + 6.74025, + 6.65976, + 6.4086, + 6.64092, + 6.621, + 6.44769, + 6.63067, + 6.74419, + 6.61028, + 6.72574, + 6.69594, + 6.62546, + 6.50829, + 6.60018, + 6.40775, + 6.66564, + 6.25029, + 6.2517, + 6.30277, + 6.39006, + 6.34934, + 6.45014, + 6.29146, + 6.34189, + 6.23672, + 6.20135, + 6.39859, + 6.32501, + 6.32243, + 6.16493, + 6.15827, + 6.23907, + 6.38353, + 6.19887, + 6.14407, + 6.17562, + 6.10888, + 6.05387, + 6.06583, + 6.25304, + 6.40434, + 6.25162, + 6.29199, + 6.09114, + 6.17247, + 5.99466, + 6.02134, + 5.95061, + 6.23865, + 6.17959, + 5.95837, + 5.77693, + 6.11779, + 5.84072, + 6.09813, + 5.78476, + 6.15517, + 6.14253, + 6.08389, + 5.92776, + 6.11285, + 5.94312, + 6.19361, + 5.89575, + 5.79177, + 5.77658, + 5.68463, + 6.01517, + 5.99439, + 6.06379, + 5.88864, + 6.03938, + 5.96752, + 5.99173, + 5.98642, + 5.94693, + 5.83816, + 5.95021, + 5.61696, + 5.69931, + 5.88617, + 5.8418, + 5.85952, + 5.76089, + 5.83643, + 5.72472, + 5.55795, + 5.72279, + 5.62456, + 5.83384, + 5.60371, + 5.70964, + 5.71305, + 5.90077, + 5.64296, + 5.84721, + 5.73799, + 5.87065, + 5.32845, + 5.89503, + 5.87432, + 5.85262, + 5.4122, + 5.40753, + 5.6225, + 5.59374, + 5.48037, + 5.56952, + 5.67164, + 5.474, + 5.74128, + 5.50855, + 5.59254, + 5.62042, + 5.6173, + 5.50903, + 5.61307, + 5.6694, + 5.68176, + 5.58253, + 5.66074, + 5.37239, + 5.67835, + 5.62699, + 5.41742, + 5.58719, + 5.62981, + 5.55162, + 5.33784, + 5.53833, + 5.48177, + 5.48342, + 5.37902, + 5.55461, + 5.60113, + 5.38725, + 5.52265, + 5.48637, + 5.32902, + 5.50379, + 5.40804, + 5.44024, + 5.31412, + 5.06315, + 5.47637, + 5.56625, + 5.71066, + 5.41144, + 5.59641, + 5.6328, + 5.23123, + 5.27182, + 5.39253, + 5.39442, + 5.32567, + 5.49583, + 5.18092, + 5.2993, + 5.24857, + 5.37717, + 5.25715, + 5.44127, + 5.53765, + 5.3134, + 5.43978, + 5.33655, + 5.07222, + 5.31412, + 5.25439, + 5.30253, + 5.10951, + 5.27338, + 5.26801, + 5.47298, + 5.15965, + 5.26921, + 5.20696, + 5.35595, + 4.98275, + 4.91391, + 5.32139, + 5.38782, + 5.22672, + 5.31644, + 5.10423, + 5.15896, + 5.26163, + 5.06463, + 5.26136, + 5.07195, + 5.33749, + 5.24642, + 5.14987, + 5.23852, + 5.03778, + 5.31313, + 5.04992, + 5.02354, + 5.14081, + 5.10984, + 5.26921, + 5.14803, + 5.27454, + 5.09393, + 5.09412, + 5.24833, + 5.31694, + 5.25175, + 5.18843, + 5.14133, + 5.28374, + 4.94582, + 5.20544, + 5.08881, + 5.30053, + 5.17192, + 5.18279, + 5.11003, + 4.98355, + 4.99209, + 5.21882, + 5.30942, + 5.09283, + 5.05041, + 4.91204, + 5.11771, + 5.1167, + 4.92322, + 5.33275, + 5.01952, + 5.10011, + 5.15937, + 5.00254, + 5.05909, + 5.06306, + 4.98904, + 5.07423, + 5.15838, + 4.97483, + 5.17683, + 4.92747, + 4.91596, + 5.06215, + 4.99131, + 4.90548, + 4.76895, + 4.93875, + 5.1077, + 5.01313, + 5.01358, + 5.32429, + 4.95302, + 4.99177, + 5.03879, + 4.79987, + 4.73503, + 4.9917, + 5.03536, + 4.87166, + 4.9475, + 5.03845, + 5.01972, + 4.80886, + 4.88618, + 4.89985, + 4.82715, + 4.74128, + 5.00393, + 4.74546, + 5.20303, + 4.77871, + 4.98658, + 4.73073, + 4.78023, + 4.81501, + 4.64456, + 4.65279, + 4.83952, + 4.80146, + 4.79663, + 4.91833, + 4.87809, + 4.91911, + 4.76246, + 4.87827, + 4.72709, + 4.90772, + 4.95311, + 4.86859, + 4.70331, + 4.77605, + 4.89682, + 4.70384, + 4.8551, + 4.68524, + 4.68185, + 4.64443 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 86.0, + 97.0, + 77.0, + 63.0, + 77.0, + 73.0, + 117.0, + 81.0, + 106.0, + 103.0, + 117.0, + 150.0, + 140.0, + 168.0, + 169.0, + 177.0, + 200.0, + 196.0, + 202.0, + 184.0, + 166.0, + 177.0, + 199.0, + 168.0, + 193.0, + 149.0, + 175.0, + 178.0, + 154.0, + 158.0, + 159.0, + 148.0, + 142.0, + 183.0, + 168.0, + 167.0, + 171.0, + 215.0, + 165.0, + 183.0, + 195.0, + 168.0, + 143.0, + 185.0, + 201.0, + 162.0, + 190.0, + 207.0, + 174.0, + 224.0, + 217.0, + 159.0, + 191.0, + 169.0, + 196.0, + 212.0, + 174.0, + 143.0, + 219.0, + 232.0, + 180.0, + 220.0, + 234.0, + 169.0, + 214.0, + 259.0, + 218.0, + 212.0, + 232.0, + 207.0, + 251.0, + 250.0, + 161.0, + 235.0, + 207.0, + 186.0, + 261.0, + 191.0, + 267.0, + 228.0, + 253.0, + 229.0, + 221.0, + 235.0, + 216.0, + 201.0, + 207.0, + 215.0, + 210.0, + 223.0, + 178.0, + 229.0, + 241.0, + 206.0, + 211.0, + 157.0, + 218.0, + 221.0, + 199.0, + 158.0, + 167.0, + 178.0, + 168.0, + 188.0, + 165.0, + 158.0, + 158.0, + 158.0, + 137.0, + 193.0, + 185.0, + 148.0, + 165.0, + 158.0, + 174.0, + 137.0, + 167.0, + 119.0, + 185.0, + 167.0, + 162.0, + 123.0, + 145.0, + 161.0, + 113.0, + 131.0, + 94.0, + 139.0, + 133.0, + 137.0, + 170.0, + 126.0, + 144.0, + 127.0, + 120.0, + 127.0, + 152.0, + 137.0, + 133.0, + 134.0, + 162.0, + 137.0, + 95.0, + 150.0, + 133.0, + 144.0, + 147.0, + 141.0, + 136.0, + 125.0, + 103.0, + 115.0, + 97.0, + 111.0, + 111.0, + 89.0, + 110.0, + 117.0, + 107.0, + 127.0, + 110.0, + 116.0, + 116.0, + 136.0, + 103.0, + 99.0, + 111.0, + 124.0, + 105.0, + 109.0, + 103.0, + 118.0, + 109.0, + 95.0, + 118.0, + 144.0, + 93.0, + 108.0, + 100.0, + 121.0, + 108.0, + 96.0, + 106.0, + 144.0, + 125.0, + 122.0, + 93.0, + 114.0, + 101.0, + 127.0, + 107.0, + 126.0, + 102.0, + 100.0, + 98.0, + 112.0, + 103.0, + 116.0, + 134.0, + 94.0, + 126.0, + 118.0, + 118.0, + 100.0, + 123.0, + 106.0, + 105.0, + 83.0, + 111.0, + 102.0, + 108.0, + 110.0, + 100.0, + 115.0, + 103.0, + 98.0, + 107.0, + 102.0, + 99.0, + 106.0, + 130.0, + 126.0, + 127.0, + 90.0, + 98.0, + 90.0, + 117.0, + 119.0, + 100.0, + 96.0, + 121.0, + 101.0, + 99.0, + 111.0, + 105.0, + 91.0, + 103.0, + 94.0, + 110.0, + 90.0, + 110.0, + 109.0, + 95.0, + 98.0, + 100.0, + 109.0, + 98.0, + 128.0, + 109.0, + 99.0, + 103.0, + 99.0, + 114.0, + 98.0, + 110.0, + 85.0, + 97.0, + 142.0, + 90.0, + 117.0, + 83.0, + 107.0, + 104.0, + 102.0, + 105.0, + 99.0, + 104.0, + 88.0, + 101.0, + 107.0, + 108.0, + 99.0, + 104.0, + 108.0, + 105.0, + 97.0, + 101.0, + 108.0, + 110.0, + 114.0, + 116.0, + 100.0, + 108.0, + 111.0, + 134.0, + 97.0, + 109.0, + 106.0, + 114.0, + 85.0, + 117.0, + 114.0, + 103.0, + 123.0, + 95.0, + 88.0, + 89.0, + 101.0, + 120.0, + 116.0, + 127.0, + 98.0, + 130.0, + 118.0, + 103.0, + 120.0, + 93.0, + 101.0, + 125.0, + 102.0, + 110.0, + 119.0, + 101.0, + 88.0, + 127.0, + 103.0, + 120.0, + 121.0, + 112.0, + 136.0, + 126.0, + 101.0, + 111.0, + 114.0, + 103.0, + 105.0, + 109.0, + 116.0, + 111.0, + 108.0, + 109.0, + 105.0, + 117.0, + 95.0, + 112.0, + 116.0, + 118.0, + 121.0, + 109.0, + 107.0, + 97.0, + 101.0, + 110.0, + 96.0, + 88.0, + 130.0, + 104.0, + 116.0, + 141.0, + 110.0, + 126.0, + 111.0, + 120.0, + 115.0, + 132.0, + 101.0, + 132.0, + 103.0, + 87.0, + 123.0, + 101.0, + 96.0, + 101.0, + 113.0, + 107.0, + 121.0, + 116.0, + 113.0, + 95.0, + 99.0, + 104.0, + 112.0, + 90.0, + 108.0, + 103.0, + 117.0, + 106.0, + 114.0, + 126.0, + 113.0, + 90.0, + 114.0, + 113.0, + 140.0, + 112.0, + 115.0, + 125.0, + 122.0, + 122.0, + 121.0, + 108.0, + 123.0, + 98.0, + 122.0, + 112.0, + 114.0, + 136.0, + 135.0, + 124.0, + 127.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 9.33072, + 0.37969, + 0.3867, + 0.39046, + 0.71873, + 0.38256, + 0.37315, + 0.37524, + 0.36944, + 0.37312, + 0.37427, + 0.37609, + 0.37691, + 0.37378, + 0.3748, + 0.37171, + 0.37454, + 0.37374, + 0.36874, + 0.3752, + 0.3711, + 0.37096, + 0.37248, + 0.36855, + 0.37987, + 0.38237, + 0.37301, + 0.37064, + 0.37284, + 0.37218, + 0.36973, + 0.36736, + 0.36966, + 0.37499, + 0.37066, + 0.37764, + 0.37572, + 0.37094, + 0.37367, + 0.37253, + 0.37593, + 0.37116, + 0.3711, + 0.37778, + 0.37155, + 0.37085, + 0.36952, + 0.37508, + 0.37548, + 0.38095, + 0.37291, + 0.37154, + 0.37099, + 0.36927, + 0.3727, + 0.37748, + 0.37423, + 0.38161, + 0.37206, + 0.37582, + 0.3751, + 0.37521, + 0.37579, + 0.3843, + 0.38471, + 0.39343, + 0.38245, + 0.37202, + 0.37512, + 0.37457, + 0.3767, + 0.3809, + 0.37685, + 0.37794, + 0.37766, + 0.37182, + 0.37032, + 0.36853, + 0.37837, + 0.38023, + 0.37444, + 0.37133, + 0.37618, + 0.37766, + 0.37506, + 0.37632, + 0.3801, + 0.37886, + 0.37663, + 0.36943, + 0.36983, + 0.3715, + 0.36856, + 0.36971, + 0.37105, + 0.36821, + 0.36936, + 0.37346, + 0.41784, + 0.37673, + 0.37144, + 0.37071, + 0.37031, + 0.37298, + 0.37588, + 0.3756, + 0.37347, + 0.38242, + 0.37911, + 0.54764, + 0.37973, + 0.38156, + 0.39236, + 0.37822, + 0.3697, + 0.37285, + 0.38125, + 0.38209, + 0.37865, + 0.38072, + 0.38122, + 0.37986, + 0.38034, + 0.37981, + 0.38328, + 0.37807, + 0.38055, + 0.3832, + 0.36995, + 0.38206, + 0.38372, + 0.38567, + 0.3812, + 0.38005, + 0.38254, + 0.38244, + 0.38168, + 0.38118, + 0.38283, + 0.38472, + 0.3835, + 0.38063, + 0.38557, + 0.3843, + 0.38091, + 0.38202, + 0.38245, + 0.38516, + 0.37498, + 0.3723, + 0.37436, + 0.37103, + 0.3695, + 0.37203, + 0.37519, + 0.54118, + 0.37475, + 0.37358, + 0.37411, + 0.37405, + 0.37456, + 0.3745, + 0.37136, + 0.37621, + 0.37202, + 0.373, + 0.37397, + 0.37221, + 0.37845, + 0.37294, + 0.37833, + 0.37992, + 0.37911, + 0.37803, + 0.37925, + 0.37985, + 0.3727, + 0.37901, + 0.37373, + 0.37542, + 0.37778, + 0.37402, + 0.37537, + 0.37345, + 0.37323, + 0.3796, + 0.37226, + 0.37563, + 0.37458, + 0.37784, + 0.37195, + 0.37503, + 0.3753, + 0.54991, + 0.3707, + 0.37072, + 0.36734, + 0.37155, + 0.37337, + 0.37254, + 0.37077, + 0.37423, + 0.37483, + 0.37004, + 0.37069, + 0.37081, + 0.37165, + 0.37034, + 0.37015, + 0.37095, + 0.37197, + 0.37337, + 0.40008, + 0.37329, + 0.37851, + 0.374, + 0.37858, + 0.37453, + 0.37638, + 0.37597, + 0.37286, + 0.38096, + 0.37707, + 0.37106, + 0.37352, + 0.37279, + 0.37524, + 0.37497, + 0.41076, + 0.36917, + 0.37087, + 0.37171, + 0.37311, + 0.37307, + 0.36955, + 0.36813, + 0.36729, + 0.38713, + 0.37491, + 0.37489, + 0.37253, + 0.37112, + 0.37728, + 0.36993, + 0.37452, + 0.37127, + 0.37009, + 0.37711, + 0.37699, + 0.37589, + 0.37554, + 0.37267, + 0.3819, + 0.37774, + 0.37236, + 0.3769, + 0.37198, + 0.37151, + 0.36707, + 0.37125, + 0.37855, + 0.37806, + 0.37014, + 0.37031, + 0.37164, + 0.37899, + 0.37467, + 0.37348, + 0.38182, + 0.37435, + 0.3806, + 0.37719, + 0.37638, + 0.37477, + 0.37237, + 0.37865, + 0.3711, + 0.37491, + 0.37158, + 0.37482, + 0.3744, + 0.37558, + 0.37408, + 0.3765, + 0.37491, + 0.37773, + 0.37945, + 0.37283, + 0.37409, + 0.57331, + 0.37267, + 0.37515, + 0.37876, + 0.37131, + 0.36998, + 0.36831, + 0.37689, + 0.37104, + 0.37796, + 0.3776, + 0.37889, + 0.3789, + 0.38167, + 0.37888, + 0.37782, + 0.38072, + 0.37906, + 0.39179, + 0.37362, + 0.37514, + 0.37884, + 0.3718, + 0.3732, + 0.37328, + 0.37193, + 0.37268, + 0.37438, + 0.37533, + 0.37737, + 0.3799, + 0.37824, + 0.37318, + 0.37348, + 0.38644, + 0.37317, + 0.37552, + 0.37349, + 0.37952, + 0.37279, + 0.37525, + 0.37729, + 0.37658, + 0.38175, + 0.37911, + 0.38285, + 0.37703, + 0.37386, + 0.37333, + 0.37254, + 0.38348, + 0.38624, + 0.38767, + 0.37729, + 0.37494, + 0.3748, + 0.37604, + 0.37341, + 0.37345, + 0.37398, + 0.37676, + 0.37484, + 0.37314, + 0.37221, + 0.37146, + 0.37354, + 0.37185, + 0.37237, + 0.37319, + 0.37544, + 0.37588, + 0.37402, + 0.38246, + 0.377, + 0.3754, + 0.37227, + 0.38037, + 0.38689, + 0.38215, + 0.38483, + 0.38456, + 0.38612, + 0.37346, + 0.37238, + 0.3736, + 0.37485, + 0.3753, + 0.37849, + 0.38602, + 0.38352, + 0.38006, + 0.38036, + 0.38583, + 0.38083, + 0.37255, + 0.37355, + 0.37625, + 0.40762, + 0.37445, + 0.37449, + 0.37462, + 0.37751, + 0.38402, + 0.3824, + 0.37623, + 0.37718, + 0.38762, + 0.37136, + 0.37556, + 0.37615, + 0.37207 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values_lts.json new file mode 100644 index 000000000..18ec1c2a1 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/golden_values_lts.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.88323, + 10.90276, + 10.88694, + 10.83322, + 10.67715, + 10.64953, + 10.43427, + 10.15183, + 9.93935, + 9.84176, + 9.5891, + 9.85451, + 9.88462, + 9.6297, + 9.78821, + 9.51159, + 9.45846, + 9.64933, + 9.3862, + 9.3321, + 9.24228, + 9.14561, + 9.17558, + 8.99543, + 9.18928, + 9.05999, + 9.15558, + 9.16512, + 9.29813, + 8.98492, + 8.92943, + 9.04419, + 9.04322, + 8.65521, + 8.71738, + 8.75365, + 8.68379, + 8.73429, + 8.65884, + 8.76517, + 8.66123, + 8.85001, + 8.83236, + 8.4994, + 8.38904, + 8.43166, + 8.49319, + 8.38452, + 8.43286, + 8.57956, + 8.36712, + 8.19207, + 8.22579, + 8.22194, + 8.26717, + 7.91302, + 8.0955, + 7.89089, + 8.24619, + 8.23017, + 8.00469, + 7.96542, + 7.91804, + 7.73978, + 7.73961, + 7.64245, + 7.51511, + 7.90632, + 7.69783, + 7.45086, + 7.73945, + 7.76671, + 7.54095, + 7.29791, + 7.45173, + 7.33462, + 7.4612, + 7.22294, + 7.63514, + 7.27784, + 7.35079, + 7.21176, + 7.21704, + 7.42198, + 7.1767, + 7.28254, + 7.00176, + 7.0057, + 7.04106, + 7.14049, + 6.82528, + 6.98673, + 7.08928, + 7.00172, + 6.87462, + 6.75859, + 6.99286, + 7.05962, + 6.70626, + 6.58385, + 6.72973, + 6.74483, + 6.73638, + 6.74114, + 6.66099, + 6.40952, + 6.64131, + 6.62122, + 6.44763, + 6.63054, + 6.74432, + 6.60975, + 6.72503, + 6.69474, + 6.6247, + 6.50691, + 6.59911, + 6.4064, + 6.66409, + 6.24856, + 6.2516, + 6.3016, + 6.38875, + 6.34796, + 6.44852, + 6.28545, + 6.33925, + 6.23596, + 6.20233, + 6.39825, + 6.32525, + 6.32413, + 6.16984, + 6.16253, + 6.24375, + 6.3879, + 6.20637, + 6.15552, + 6.18702, + 6.12144, + 6.06949, + 6.07869, + 6.26293, + 6.41494, + 6.26452, + 6.30693, + 6.10587, + 6.18713, + 6.01158, + 6.03875, + 5.96545, + 6.25534, + 6.19897, + 5.97346, + 5.79144, + 6.13388, + 5.85851, + 6.11375, + 5.79987, + 6.16878, + 6.15254, + 6.09497, + 5.93885, + 6.1206, + 5.94963, + 6.20011, + 5.901, + 5.79876, + 5.78176, + 5.6937, + 6.02012, + 6.00074, + 6.06782, + 5.89184, + 6.04281, + 5.97078, + 5.99763, + 5.98979, + 5.94805, + 5.84122, + 5.95124, + 5.61843, + 5.70225, + 5.8906, + 5.84333, + 5.8628, + 5.76133, + 5.83588, + 5.72872, + 5.56229, + 5.72027, + 5.62406, + 5.83386, + 5.60151, + 5.71159, + 5.71751, + 5.89971, + 5.64532, + 5.85138, + 5.73855, + 5.87273, + 5.33013, + 5.8957, + 5.8746, + 5.85218, + 5.41494, + 5.41026, + 5.62571, + 5.59371, + 5.48334, + 5.57165, + 5.67238, + 5.4744, + 5.74362, + 5.51126, + 5.59605, + 5.62107, + 5.61572, + 5.50856, + 5.60876, + 5.67058, + 5.68967, + 5.58943, + 5.65884, + 5.37283, + 5.68049, + 5.62588, + 5.42149, + 5.58882, + 5.6294, + 5.55294, + 5.33966, + 5.53728, + 5.48414, + 5.48307, + 5.37506, + 5.55721, + 5.60131, + 5.38633, + 5.53162, + 5.48787, + 5.33174, + 5.50407, + 5.4065, + 5.44014, + 5.31531, + 5.06354, + 5.47634, + 5.5663, + 5.70998, + 5.41495, + 5.59526, + 5.6328, + 5.2319, + 5.2739, + 5.39497, + 5.39608, + 5.32487, + 5.49737, + 5.18209, + 5.29492, + 5.24643, + 5.37552, + 5.25606, + 5.44308, + 5.53741, + 5.31228, + 5.44067, + 5.33998, + 5.07194, + 5.31518, + 5.24712, + 5.30351, + 5.10936, + 5.27335, + 5.26643, + 5.46934, + 5.15835, + 5.2678, + 5.20457, + 5.35651, + 4.9827, + 4.91355, + 5.31913, + 5.38813, + 5.22706, + 5.31863, + 5.09862, + 5.15647, + 5.25815, + 5.06521, + 5.26139, + 5.07559, + 5.34225, + 5.2435, + 5.14354, + 5.23796, + 5.03841, + 5.31227, + 5.05047, + 5.02308, + 5.14022, + 5.10954, + 5.27005, + 5.14834, + 5.2764, + 5.09643, + 5.09616, + 5.24991, + 5.31987, + 5.25189, + 5.18613, + 5.14096, + 5.28633, + 4.94797, + 5.20474, + 5.08641, + 5.3005, + 5.17427, + 5.18273, + 5.10837, + 4.98264, + 4.99144, + 5.22303, + 5.30945, + 5.09288, + 5.0515, + 4.9141, + 5.12157, + 5.11768, + 4.92193, + 5.33538, + 5.01865, + 5.09977, + 5.15945, + 5.00134, + 5.062, + 5.06352, + 4.98951, + 5.07403, + 5.15561, + 4.97364, + 5.17698, + 4.92401, + 4.91763, + 5.06561, + 4.98934, + 4.90514, + 4.77142, + 4.93751, + 5.10748, + 5.01115, + 5.01315, + 5.32269, + 4.95385, + 4.98933, + 5.03967, + 4.80287, + 4.73643, + 4.99208, + 5.03327, + 4.86668, + 4.9473, + 5.03761, + 5.01854, + 4.81126, + 4.88589, + 4.89708, + 4.82611, + 4.73767, + 5.00493, + 4.74564, + 5.20177, + 4.77793, + 4.98531, + 4.72962, + 4.77857, + 4.81505, + 4.64522, + 4.64996, + 4.83534, + 4.80065, + 4.79383, + 4.91643, + 4.87724, + 4.9168, + 4.7603, + 4.87501, + 4.72665, + 4.90429, + 4.95354, + 4.86716, + 4.70097, + 4.77165, + 4.89297, + 4.70177, + 4.85355, + 4.68265, + 4.68029, + 4.64235 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 77.0, + 69.0, + 83.0, + 75.0, + 87.0, + 65.0, + 107.0, + 100.0, + 110.0, + 118.0, + 128.0, + 140.0, + 140.0, + 162.0, + 158.0, + 163.0, + 148.0, + 189.0, + 182.0, + 184.0, + 191.0, + 164.0, + 191.0, + 164.0, + 211.0, + 159.0, + 188.0, + 172.0, + 153.0, + 168.0, + 138.0, + 173.0, + 164.0, + 177.0, + 160.0, + 145.0, + 170.0, + 214.0, + 177.0, + 204.0, + 172.0, + 193.0, + 183.0, + 202.0, + 179.0, + 168.0, + 190.0, + 212.0, + 194.0, + 198.0, + 193.0, + 149.0, + 204.0, + 143.0, + 158.0, + 203.0, + 173.0, + 140.0, + 230.0, + 258.0, + 215.0, + 193.0, + 220.0, + 189.0, + 186.0, + 282.0, + 204.0, + 168.0, + 197.0, + 185.0, + 249.0, + 253.0, + 197.0, + 222.0, + 213.0, + 190.0, + 240.0, + 197.0, + 291.0, + 232.0, + 198.0, + 294.0, + 223.0, + 233.0, + 193.0, + 212.0, + 198.0, + 232.0, + 226.0, + 219.0, + 227.0, + 226.0, + 240.0, + 208.0, + 186.0, + 151.0, + 200.0, + 222.0, + 199.0, + 187.0, + 193.0, + 200.0, + 158.0, + 181.0, + 167.0, + 144.0, + 177.0, + 172.0, + 156.0, + 209.0, + 196.0, + 153.0, + 160.0, + 178.0, + 164.0, + 152.0, + 154.0, + 130.0, + 182.0, + 142.0, + 158.0, + 145.0, + 157.0, + 155.0, + 140.0, + 161.0, + 141.0, + 139.0, + 112.0, + 117.0, + 146.0, + 132.0, + 123.0, + 121.0, + 152.0, + 140.0, + 145.0, + 86.0, + 111.0, + 122.0, + 94.0, + 130.0, + 133.0, + 140.0, + 154.0, + 134.0, + 113.0, + 112.0, + 127.0, + 130.0, + 104.0, + 111.0, + 102.0, + 110.0, + 143.0, + 106.0, + 94.0, + 81.0, + 83.0, + 101.0, + 119.0, + 108.0, + 133.0, + 151.0, + 119.0, + 96.0, + 105.0, + 124.0, + 137.0, + 104.0, + 103.0, + 98.0, + 97.0, + 92.0, + 120.0, + 116.0, + 115.0, + 139.0, + 118.0, + 86.0, + 120.0, + 109.0, + 121.0, + 120.0, + 92.0, + 125.0, + 121.0, + 110.0, + 74.0, + 92.0, + 107.0, + 115.0, + 116.0, + 105.0, + 83.0, + 95.0, + 112.0, + 95.0, + 110.0, + 118.0, + 97.0, + 97.0, + 112.0, + 107.0, + 118.0, + 104.0, + 114.0, + 109.0, + 118.0, + 105.0, + 125.0, + 87.0, + 102.0, + 109.0, + 110.0, + 99.0, + 90.0, + 129.0, + 123.0, + 109.0, + 117.0, + 74.0, + 90.0, + 121.0, + 92.0, + 106.0, + 96.0, + 138.0, + 104.0, + 123.0, + 101.0, + 104.0, + 105.0, + 102.0, + 99.0, + 119.0, + 101.0, + 101.0, + 102.0, + 84.0, + 97.0, + 89.0, + 104.0, + 98.0, + 92.0, + 103.0, + 106.0, + 118.0, + 113.0, + 122.0, + 121.0, + 115.0, + 119.0, + 118.0, + 103.0, + 106.0, + 113.0, + 118.0, + 115.0, + 112.0, + 115.0, + 91.0, + 107.0, + 90.0, + 95.0, + 106.0, + 91.0, + 104.0, + 106.0, + 116.0, + 82.0, + 111.0, + 104.0, + 130.0, + 112.0, + 105.0, + 93.0, + 107.0, + 98.0, + 105.0, + 86.0, + 98.0, + 105.0, + 119.0, + 112.0, + 106.0, + 116.0, + 104.0, + 124.0, + 104.0, + 114.0, + 102.0, + 98.0, + 98.0, + 107.0, + 118.0, + 107.0, + 98.0, + 102.0, + 111.0, + 126.0, + 97.0, + 118.0, + 126.0, + 112.0, + 91.0, + 93.0, + 108.0, + 124.0, + 119.0, + 98.0, + 147.0, + 96.0, + 119.0, + 109.0, + 112.0, + 119.0, + 96.0, + 105.0, + 96.0, + 122.0, + 100.0, + 107.0, + 110.0, + 121.0, + 82.0, + 105.0, + 108.0, + 98.0, + 100.0, + 111.0, + 99.0, + 121.0, + 89.0, + 129.0, + 102.0, + 92.0, + 119.0, + 106.0, + 110.0, + 116.0, + 109.0, + 100.0, + 125.0, + 88.0, + 101.0, + 104.0, + 88.0, + 109.0, + 111.0, + 99.0, + 113.0, + 111.0, + 136.0, + 111.0, + 113.0, + 135.0, + 95.0, + 94.0, + 110.0, + 121.0, + 123.0, + 134.0, + 132.0, + 118.0, + 112.0, + 98.0, + 116.0, + 100.0, + 95.0, + 103.0, + 111.0, + 100.0, + 111.0, + 112.0, + 127.0, + 108.0, + 108.0, + 104.0, + 120.0, + 123.0, + 124.0, + 133.0, + 116.0, + 130.0, + 119.0, + 115.0, + 135.0, + 119.0, + 109.0, + 114.0, + 97.0, + 120.0, + 122.0, + 107.0, + 151.0, + 131.0, + 130.0, + 133.0, + 116.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 23.49073, + 0.45673, + 0.49857, + 0.45742, + 0.45417, + 0.45498, + 0.45169, + 0.44995, + 0.44985, + 0.46253, + 0.44641, + 0.45172, + 0.44994, + 0.44786, + 0.44991, + 0.46752, + 0.44937, + 0.44931, + 0.45455, + 0.45638, + 0.44949, + 0.44578, + 0.45615, + 0.45432, + 0.45019, + 0.45684, + 0.45146, + 0.45053, + 0.44516, + 0.44513, + 0.44748, + 0.44806, + 0.70306, + 0.44525, + 0.45604, + 0.45039, + 0.44938, + 0.44478, + 0.45854, + 0.44939, + 0.4453, + 0.4508, + 0.44723, + 0.44863, + 0.4456, + 0.44644, + 0.45712, + 0.45015, + 0.44577, + 0.44529, + 0.44891, + 0.45444, + 0.45302, + 0.44825, + 0.44762, + 0.45019, + 0.44869, + 0.57727, + 0.4499, + 0.45275, + 0.46154, + 0.44858, + 0.44579, + 0.45551, + 0.45026, + 0.44368, + 0.44584, + 0.44692, + 0.44436, + 0.44468, + 0.46316, + 0.44645, + 0.44314, + 0.4448, + 0.4471, + 0.45064, + 0.44559, + 0.44749, + 0.45139, + 0.4535, + 0.58646, + 0.44962, + 0.44927, + 0.46076, + 0.44914, + 0.4463, + 0.44803, + 0.45468, + 0.44878, + 0.45252, + 0.45032, + 0.45193, + 0.44895, + 0.44717, + 0.45458, + 0.45081, + 0.44639, + 0.45649, + 0.44958, + 0.44661, + 0.44544, + 0.45127, + 0.45634, + 0.44936, + 0.44802, + 0.45893, + 0.70259, + 0.58713, + 0.4441, + 0.44774, + 0.44927, + 0.45009, + 0.45029, + 0.44752, + 0.45399, + 0.44921, + 0.45252, + 0.44728, + 0.45779, + 0.45171, + 0.44784, + 0.45047, + 0.44749, + 0.45711, + 0.45055, + 0.44951, + 0.4473, + 0.44734, + 0.58434, + 0.45093, + 0.44969, + 0.56992, + 0.44965, + 0.45071, + 0.44913, + 0.44756, + 0.44547, + 0.44971, + 0.45838, + 0.4574, + 0.45394, + 0.45483, + 0.4512, + 0.44954, + 0.4479, + 0.44758, + 0.44853, + 0.45108, + 0.44804, + 0.44791, + 0.44831, + 0.45494, + 0.44761, + 0.44412, + 0.44433, + 0.44519, + 0.45125, + 0.447, + 0.4492, + 0.44787, + 0.44944, + 0.44622, + 0.4476, + 0.4447, + 0.45124, + 0.44854, + 0.44716, + 0.44676, + 0.44755, + 0.4655, + 0.4487, + 0.44985, + 0.44982, + 0.44694, + 0.44611, + 0.44694, + 0.44286, + 0.44458, + 0.44491, + 0.45147, + 0.44613, + 0.5801, + 0.45263, + 0.44887, + 0.44979, + 0.44625, + 0.45051, + 0.44896, + 0.4423, + 0.4475, + 0.44896, + 0.45016, + 0.45298, + 0.44594, + 0.44685, + 0.45698, + 0.44779, + 0.44749, + 0.44739, + 0.45153, + 0.57538, + 0.44826, + 0.45017, + 0.44753, + 0.44927, + 0.44831, + 0.44866, + 0.44895, + 0.44796, + 0.45036, + 0.44825, + 0.4478, + 0.44693, + 0.45241, + 0.44821, + 0.44687, + 0.44895, + 0.45248, + 0.45022, + 0.44649, + 0.4508, + 0.45026, + 0.4497, + 0.45016, + 0.44784, + 0.44722, + 0.45425, + 0.44892, + 0.45033, + 0.45322, + 0.45187, + 0.44969, + 0.45852, + 0.45233, + 0.45326, + 0.44695, + 0.44901, + 0.44797, + 0.45123, + 0.44468, + 0.44681, + 0.45333, + 0.44879, + 0.44331, + 0.44989, + 0.45159, + 0.44991, + 0.44774, + 0.44604, + 0.58441, + 0.44958, + 0.44496, + 0.44421, + 0.44393, + 0.44478, + 0.44417, + 0.44427, + 0.44729, + 0.4465, + 0.45195, + 0.44517, + 0.44747, + 0.4465, + 0.44691, + 0.44759, + 0.44365, + 0.44855, + 0.44391, + 0.44652, + 0.44474, + 0.45265, + 0.44285, + 0.44348, + 0.46714, + 0.44438, + 0.44968, + 0.58646, + 0.4456, + 0.57565, + 0.4451, + 0.44392, + 0.44762, + 0.44584, + 0.44731, + 0.44368, + 0.44143, + 0.44348, + 0.44286, + 0.44866, + 0.44303, + 0.4467, + 0.44242, + 0.44594, + 0.44457, + 0.44212, + 0.45173, + 0.45314, + 0.4537, + 0.45345, + 0.44645, + 0.44564, + 0.44791, + 0.44538, + 0.56436, + 0.4463, + 0.44361, + 0.44583, + 0.4472, + 0.44565, + 0.44765, + 0.44352, + 0.44439, + 0.45014, + 0.45393, + 0.44761, + 0.44365, + 0.44194, + 0.44055, + 0.44391, + 0.44516, + 0.43991, + 0.43973, + 0.44667, + 0.59303, + 0.44362, + 0.44564, + 0.4467, + 0.45244, + 0.84618, + 0.44873, + 0.44536, + 0.446, + 0.4484, + 0.45038, + 0.44833, + 0.45815, + 0.44989, + 0.45457, + 0.45252, + 0.45002, + 0.45094, + 0.44968, + 0.45105, + 0.44441, + 0.4415, + 0.44859, + 0.43942, + 0.44673, + 0.60446, + 0.44265, + 0.44754, + 0.45059, + 0.4443, + 0.57371, + 0.45333, + 0.44117, + 0.44025, + 0.44493, + 0.44453, + 0.44295, + 0.44557, + 0.4392, + 0.44354, + 0.45185, + 0.44735, + 0.4481, + 0.45094, + 0.44791, + 0.45131, + 0.44821, + 0.44249, + 0.44289, + 0.44532, + 0.58138, + 0.44778, + 0.44834, + 0.44647, + 0.44908, + 0.71286, + 0.44635, + 0.44907, + 0.44524, + 0.44548, + 0.44391, + 0.44473, + 0.4419, + 0.44386, + 0.44348, + 0.44854, + 0.44606, + 0.4454, + 0.44354, + 0.44676, + 0.44494, + 0.44387, + 0.44867, + 0.44496, + 0.44666, + 0.44531, + 0.44669 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/golden_values_dev.json new file mode 100644 index 000000000..c7f6bc858 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/golden_values_dev.json @@ -0,0 +1,1220 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.89393, + 10.90229, + 10.90382, + 10.89922, + 10.90215, + 10.87439, + 10.80338, + 10.63346, + 10.44036, + 10.2933, + 10.02712, + 10.16747, + 10.13781, + 9.86191, + 9.97684, + 9.67806, + 9.59836, + 9.7815, + 9.50325, + 9.44529, + 9.35262, + 9.25422, + 9.27971, + 9.09386, + 9.28651, + 9.15722, + 9.24673, + 9.26197, + 9.39815, + 9.08902, + 9.03506, + 9.14524, + 9.15344, + 8.76086, + 8.82546, + 8.85801, + 8.78594, + 8.83766, + 8.76271, + 8.8693, + 8.76505, + 8.95513, + 8.94138, + 8.60415, + 8.49526, + 8.5414, + 8.6052, + 8.49377, + 8.54563, + 8.69588, + 8.4793, + 8.31046, + 8.3419, + 8.3376, + 8.38481, + 8.03115, + 8.21697, + 8.01004, + 8.36596, + 8.3517, + 8.12379, + 8.08902, + 8.03892, + 7.85883, + 7.86204, + 7.76178, + 7.63785, + 8.03256, + 7.82491, + 7.57768, + 7.87018, + 7.89664, + 7.66577, + 7.41891, + 7.57946, + 7.45949, + 7.58407, + 7.3365, + 7.75477, + 7.39311, + 7.46005, + 7.326, + 7.3226, + 7.53323, + 7.28431, + 7.39059, + 7.10454, + 7.10309, + 7.135, + 7.23329, + 6.91494, + 7.07307, + 7.1732, + 7.08149, + 6.95567, + 6.83555, + 7.07147, + 7.13599, + 6.77635, + 6.65371, + 6.79924, + 6.81095, + 6.80156, + 6.80623, + 6.72479, + 6.46997, + 6.70288, + 6.67891, + 6.50415, + 6.69017, + 6.80201, + 6.66743, + 6.78224, + 6.74909, + 6.68039, + 6.55852, + 6.65127, + 6.45883, + 6.71595, + 6.30029, + 6.29946, + 6.35125, + 6.43625, + 6.39727, + 6.50048, + 6.33651, + 6.38488, + 6.28047, + 6.24359, + 6.44009, + 6.36825, + 6.36402, + 6.2045, + 6.19664, + 6.27933, + 6.42468, + 6.24025, + 6.18585, + 6.21348, + 6.14842, + 6.09617, + 6.1035, + 6.28976, + 6.44192, + 6.28932, + 6.33177, + 6.12937, + 6.2119, + 6.03064, + 6.05658, + 5.98505, + 6.27562, + 6.21999, + 5.99254, + 5.81222, + 6.1522, + 5.87811, + 6.13276, + 5.81621, + 6.18981, + 6.17418, + 6.11405, + 5.95877, + 6.13943, + 5.96879, + 6.22137, + 5.92302, + 5.81813, + 5.80612, + 5.71127, + 6.04011, + 6.02026, + 6.09059, + 5.91133, + 6.0647, + 5.9908, + 6.01775, + 6.01088, + 5.97305, + 5.86247, + 5.97385, + 5.63832, + 5.72202, + 5.91221, + 5.86536, + 5.88217, + 5.78585, + 5.85599, + 5.74904, + 5.58238, + 5.74505, + 5.64738, + 5.8552, + 5.62673, + 5.73069, + 5.73403, + 5.92154, + 5.66651, + 5.86965, + 5.76023, + 5.89258, + 5.35098, + 5.9205, + 5.89567, + 5.87366, + 5.43348, + 5.42769, + 5.64532, + 5.61424, + 5.50172, + 5.5911, + 5.69239, + 5.49278, + 5.76306, + 5.53002, + 5.61324, + 5.64004, + 5.63451, + 5.52873, + 5.63026, + 5.68897, + 5.69849, + 5.60119, + 5.67641, + 5.3926, + 5.69571, + 5.64274, + 5.43772, + 5.59953, + 5.64251, + 5.56535, + 5.35493, + 5.55145, + 5.49555, + 5.49469, + 5.38646, + 5.5675, + 5.61485, + 5.39936, + 5.53506, + 5.49708, + 5.34111, + 5.51556, + 5.42086, + 5.4521, + 5.32709, + 5.07441, + 5.48669, + 5.57797, + 5.72108, + 5.42477, + 5.60744, + 5.64535, + 5.24322, + 5.28211, + 5.40464, + 5.40345, + 5.33686, + 5.51041, + 5.19531, + 5.30946, + 5.26092, + 5.38482, + 5.26778, + 5.45655, + 5.54658, + 5.32255, + 5.44786, + 5.34468, + 5.0817, + 5.3265, + 5.26443, + 5.31477, + 5.1223, + 5.28586, + 5.27616, + 5.48205, + 5.16778, + 5.27791, + 5.21918, + 5.37082, + 4.99576, + 4.92396, + 5.33114, + 5.40116, + 5.23548, + 5.32971, + 5.1098, + 5.16761, + 5.27075, + 5.07658, + 5.27525, + 5.09175, + 5.35657, + 5.25632, + 5.16135, + 5.24941, + 5.05151, + 5.32323, + 5.06328, + 5.03807, + 5.15012, + 5.12121, + 5.2805, + 5.1623, + 5.28751, + 5.10857, + 5.107, + 5.26185, + 5.33273, + 5.26325, + 5.19866, + 5.15283, + 5.29684, + 4.9578, + 5.21696, + 5.09944, + 5.30924, + 5.18412, + 5.19534, + 5.12112, + 4.99133, + 5.00084, + 5.23319, + 5.32054, + 5.10638, + 5.06456, + 4.92573, + 5.13168, + 5.12607, + 4.93273, + 5.3413, + 5.03043, + 5.10934, + 5.16974, + 5.01126, + 5.07104, + 5.07587, + 5.0034, + 5.08619, + 5.1671, + 4.98476, + 5.18902, + 4.93793, + 4.92414, + 5.07774, + 4.99851, + 4.91554, + 4.78269, + 4.95064, + 5.12237, + 5.02596, + 5.02298, + 5.33707, + 4.96446, + 4.99962, + 5.05063, + 4.81016, + 4.74605, + 5.00281, + 5.04573, + 4.88142, + 4.95871, + 5.04942, + 5.02997, + 4.81942, + 4.89951, + 4.91098, + 4.83717, + 4.74869, + 5.01582, + 4.75783, + 5.21702, + 4.79022, + 4.99791, + 4.74194, + 4.7912, + 4.82664, + 4.65524, + 4.6621, + 4.85014, + 4.81175, + 4.80742, + 4.93171, + 4.88928, + 4.92931, + 4.77459, + 4.8876, + 4.73984, + 4.91676, + 4.96546, + 4.87897, + 4.71224, + 4.78675, + 4.90579, + 4.71528, + 4.86716, + 4.69307, + 4.69138, + 4.65331 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 9.25578, + 0.3326, + 0.33822, + 0.32857, + 0.3426, + 0.34934, + 0.34164, + 0.34303, + 0.34646, + 0.3405, + 0.34386, + 0.35065, + 0.33857, + 0.33893, + 0.35587, + 0.34445, + 0.3386, + 0.34381, + 0.3394, + 0.34322, + 0.33866, + 0.34045, + 0.34327, + 0.34138, + 0.34855, + 0.34967, + 0.34407, + 0.34762, + 0.35319, + 0.33655, + 0.33613, + 0.33455, + 0.33412, + 0.34143, + 0.33898, + 0.33485, + 0.3759, + 0.34214, + 0.33791, + 0.33356, + 0.33752, + 0.334, + 0.33322, + 0.33482, + 0.33655, + 0.33394, + 0.33331, + 0.3351, + 0.3314, + 0.33591, + 0.33346, + 0.33519, + 0.33236, + 0.33088, + 0.33279, + 0.3329, + 0.3359, + 0.33962, + 0.33166, + 0.3389, + 0.33537, + 0.33003, + 0.33507, + 0.33086, + 0.33492, + 0.3322, + 0.33134, + 0.33302, + 0.3341, + 0.33216, + 0.33239, + 0.33318, + 0.33361, + 0.33237, + 0.33266, + 0.33698, + 0.33954, + 0.33607, + 0.33264, + 0.33248, + 0.33964, + 0.33521, + 0.33566, + 0.33367, + 0.33504, + 0.33451, + 0.33413, + 0.33504, + 0.33696, + 0.3376, + 0.33765, + 0.33646, + 0.3365, + 0.33915, + 0.33487, + 0.33518, + 0.33513, + 0.33649, + 0.33811, + 0.33604, + 0.33597, + 0.33456, + 0.33512, + 0.33801, + 0.33645, + 0.337, + 0.3365, + 0.33969, + 0.34136, + 0.33618, + 0.3333, + 0.33291, + 0.33287, + 0.51594, + 0.34363, + 0.33638, + 0.33456, + 0.33793, + 0.33855, + 0.3359, + 0.33867, + 0.33647, + 0.3352, + 0.33624, + 0.33617, + 0.51401, + 0.33827, + 0.33714, + 0.33569, + 0.33609, + 0.334, + 0.33524, + 0.33575, + 0.33371, + 0.33439, + 0.34352, + 0.33393, + 0.33376, + 0.33687, + 0.3341, + 0.33377, + 0.33715, + 0.33643, + 0.33704, + 0.34004, + 0.33701, + 0.34317, + 0.34338, + 0.33355, + 0.34018, + 0.33372, + 0.33971, + 0.33659, + 0.33682, + 0.34053, + 0.34117, + 0.33512, + 0.33493, + 0.3356, + 0.33062, + 0.33407, + 0.33178, + 0.33299, + 0.33624, + 0.33672, + 0.33162, + 0.33801, + 0.50818, + 0.33122, + 0.33524, + 0.33395, + 0.33144, + 0.33808, + 0.33398, + 0.33057, + 0.33247, + 0.33608, + 0.33554, + 0.33546, + 0.33375, + 0.3376, + 0.34091, + 0.3369, + 0.33926, + 0.33962, + 0.33152, + 0.327, + 0.32552, + 0.32939, + 0.32366, + 0.32998, + 0.32721, + 0.3246, + 0.32935, + 0.32592, + 0.3266, + 0.33091, + 0.3258, + 0.32938, + 0.32694, + 0.33356, + 0.3274, + 0.32466, + 0.33347, + 0.3323, + 0.33117, + 0.32588, + 0.32403, + 0.32795, + 0.32369, + 0.32203, + 0.32301, + 0.32286, + 0.32055, + 0.3398, + 0.32238, + 0.33633, + 0.3256, + 0.33198, + 0.50333, + 0.33007, + 0.33025, + 0.3307, + 0.32366, + 0.3305, + 0.33215, + 0.32605, + 0.70345, + 0.33425, + 0.33421, + 0.32842, + 0.33332, + 0.33075, + 0.32626, + 0.32712, + 0.32341, + 0.32308, + 0.32473, + 0.32353, + 0.32932, + 0.33035, + 0.32401, + 0.33502, + 0.33327, + 0.33395, + 0.32981, + 0.32419, + 0.32325, + 0.33309, + 0.32184, + 0.33265, + 0.32364, + 0.3237, + 0.33155, + 0.32372, + 0.32382, + 0.32291, + 0.32388, + 0.32158, + 0.32223, + 0.32498, + 0.3253, + 0.33429, + 0.32815, + 0.32815, + 0.32262, + 0.32595, + 0.33413, + 0.33488, + 0.32392, + 0.32413, + 0.32569, + 0.49049, + 0.3248, + 0.33109, + 0.32587, + 0.32642, + 0.32518, + 0.32592, + 0.32421, + 0.71015, + 0.33488, + 0.33222, + 0.33776, + 0.33626, + 0.33446, + 0.33173, + 0.33291, + 0.33359, + 0.3356, + 0.32588, + 0.32604, + 0.32374, + 0.32432, + 0.32517, + 0.32336, + 0.32242, + 0.32382, + 0.32447, + 0.32621, + 0.32442, + 0.33073, + 0.32577, + 0.32967, + 0.32407, + 0.32569, + 0.32784, + 0.3461, + 0.32392, + 0.32392, + 0.32443, + 0.32222, + 0.32412, + 0.32365, + 0.32223, + 0.3256, + 0.32161, + 0.32484, + 0.32165, + 0.32169, + 0.32734, + 0.32352, + 0.32425, + 0.32547, + 0.3233, + 0.32457, + 0.32423, + 0.32358, + 0.32516, + 0.32609, + 0.32614, + 0.32573, + 0.32359, + 0.50412, + 0.32385, + 0.3249, + 0.33249, + 0.34813, + 0.33455, + 0.33984, + 0.33686, + 0.33544, + 0.32686, + 0.32733, + 0.32357, + 0.33073, + 0.32781, + 0.32687, + 0.32707, + 0.3227, + 0.32312, + 0.32367, + 0.32418, + 0.32795, + 0.32217, + 0.32661, + 0.32769, + 0.32438, + 0.32866, + 0.32324, + 0.32266, + 0.32478, + 0.32267, + 0.3259, + 0.32629, + 0.32532, + 0.33247, + 0.33203, + 0.32868, + 0.32809, + 0.32677, + 0.32893, + 0.32629, + 0.32723, + 0.32658, + 0.32474, + 0.33155, + 0.33378, + 0.3288, + 0.33409, + 0.32907, + 0.32732, + 0.32661, + 0.32706, + 0.51517, + 0.51886, + 0.32875, + 0.32613, + 0.32755, + 0.32594, + 0.32591, + 0.3275, + 0.32658, + 0.32598, + 0.32571, + 0.33078, + 0.32567, + 0.33064, + 0.32718, + 0.32881 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 1983, + "step_interval": 5, + "values": [ + 951.0, + 1294.0, + 1060.0, + 971.0, + 901.0, + 1117.0, + 1146.0, + 1481.0, + 1450.0, + 1359.0, + 1524.0, + 1946.0, + 2172.0, + 1538.0, + 2168.0, + 1978.0, + 1941.0, + 2017.0, + 2514.0, + 1951.0, + 2211.0, + 2190.0, + 2499.0, + 3109.0, + 2431.0, + 2741.0, + 2536.0, + 2192.0, + 2064.0, + 2948.0, + 2423.0, + 3485.0, + 2438.0, + 2456.0, + 2498.0, + 3614.0, + 2079.0, + 2299.0, + 2218.0, + 2691.0, + 3765.0, + 2801.0, + 2213.0, + 2801.0, + 2673.0, + 2229.0, + 2614.0, + 2534.0, + 2395.0, + 3023.0, + 3073.0, + 2519.0, + 2574.0, + 2151.0, + 2685.0, + 3348.0, + 2764.0, + 2698.0, + 2394.0, + 3505.0, + 2414.0, + 2978.0, + 2468.0, + 2605.0, + 2317.0, + 3165.0, + 2865.0, + 2919.0, + 2342.0, + 2556.0, + 2184.0, + 2857.0, + 2932.0, + 2812.0, + 3367.0, + 2539.0, + 2770.0, + 2638.0, + 3112.0, + 2799.0, + 2681.0, + 2540.0, + 3130.0, + 2387.0, + 2738.0, + 2862.0, + 2676.0, + 2320.0, + 2382.0, + 2816.0, + 2529.0, + 3200.0, + 2496.0, + 2423.0, + 2581.0, + 2432.0, + 2336.0, + 1902.0, + 2306.0, + 2607.0, + 2764.0, + 2214.0, + 2000.0, + 2180.0, + 1834.0, + 2352.0, + 2325.0, + 2334.0, + 2259.0, + 2077.0, + 2207.0, + 2478.0, + 2327.0, + 2507.0, + 2306.0, + 2729.0, + 2650.0, + 2051.0, + 2485.0, + 1970.0, + 2732.0, + 2407.0, + 2140.0, + 2130.0, + 2047.0, + 2243.0, + 1970.0, + 2569.0, + 2417.0, + 2222.0, + 2205.0, + 2295.0, + 2373.0, + 2311.0, + 1908.0, + 2299.0, + 2581.0, + 2254.0, + 2282.0, + 1506.0, + 2124.0, + 2356.0, + 2072.0, + 2489.0, + 2119.0, + 1906.0, + 2289.0, + 1838.0, + 2039.0, + 2864.0, + 2402.0, + 2108.0, + 1676.0, + 1774.0, + 2390.0, + 1925.0, + 2184.0, + 1979.0, + 2190.0, + 2016.0, + 1830.0, + 2377.0, + 1660.0, + 2153.0, + 2079.0, + 1918.0, + 2331.0, + 2555.0, + 1930.0, + 1627.0, + 1710.0, + 1702.0, + 1998.0, + 2075.0, + 1579.0, + 1644.0, + 1901.0, + 2428.0, + 2111.0, + 2256.0, + 2057.0, + 2184.0, + 2241.0, + 2111.0, + 2126.0, + 2146.0, + 1818.0, + 2432.0, + 1563.0, + 1864.0, + 1830.0, + 1783.0, + 1874.0, + 1963.0, + 1715.0, + 2022.0, + 2143.0, + 2015.0, + 1604.0, + 2044.0, + 1998.0, + 2159.0, + 2247.0, + 2858.0, + 2284.0, + 2138.0, + 2515.0, + 2295.0, + 2514.0, + 1794.0, + 2096.0, + 2257.0, + 2612.0, + 2054.0, + 2084.0, + 2161.0, + 2071.0, + 1911.0, + 1998.0, + 2301.0, + 2014.0, + 2010.0, + 1940.0, + 2338.0, + 2206.0, + 2436.0, + 2084.0, + 2300.0, + 1838.0, + 2266.0, + 2007.0, + 2320.0, + 1960.0, + 2174.0, + 2067.0, + 1904.0, + 2017.0, + 1784.0, + 1804.0, + 2096.0, + 2006.0, + 2020.0, + 1881.0, + 2441.0, + 2440.0, + 2196.0, + 1856.0, + 2861.0, + 2097.0, + 2002.0, + 1886.0, + 1765.0, + 2257.0, + 2195.0, + 1946.0, + 1758.0, + 2432.0, + 1695.0, + 2473.0, + 1924.0, + 1741.0, + 1858.0, + 2479.0, + 2441.0, + 2083.0, + 2289.0, + 2251.0, + 1860.0, + 1983.0, + 1939.0, + 2148.0, + 2379.0, + 2339.0, + 2165.0, + 2381.0, + 2161.0, + 1997.0, + 1732.0, + 1901.0, + 1990.0, + 2229.0, + 2281.0, + 2032.0, + 2062.0, + 2072.0, + 2291.0, + 2069.0, + 1668.0, + 1720.0, + 2157.0, + 2187.0, + 2037.0, + 2461.0, + 2170.0, + 2121.0, + 2135.0, + 1806.0, + 2596.0, + 2088.0, + 2654.0, + 1959.0, + 1994.0, + 1881.0, + 1998.0, + 2453.0, + 1943.0, + 2221.0, + 2296.0, + 1837.0, + 1837.0, + 2352.0, + 2099.0, + 2125.0, + 2191.0, + 2173.0, + 1981.0, + 2218.0, + 1957.0, + 2445.0, + 2377.0, + 2214.0, + 2626.0, + 2131.0, + 2373.0, + 2530.0, + 2365.0, + 2106.0, + 1956.0, + 2205.0, + 2115.0, + 2344.0, + 2587.0, + 2484.0, + 2203.0, + 2093.0, + 2128.0, + 2109.0, + 2625.0, + 2027.0, + 2489.0, + 2424.0, + 2757.0, + 2901.0, + 2295.0, + 2267.0, + 2149.0, + 2081.0, + 2612.0, + 2195.0, + 2530.0, + 1823.0, + 2341.0, + 2129.0, + 2062.0, + 2221.0, + 2154.0, + 2172.0, + 2180.0, + 2068.0, + 2300.0, + 2189.0, + 2218.0, + 2369.0, + 2760.0, + 2058.0, + 2860.0, + 2391.0, + 2134.0, + 2120.0, + 2116.0, + 2631.0, + 2251.0, + 2356.0, + 2021.0, + 2205.0, + 2097.0, + 2232.0, + 2222.0, + 2815.0, + 2508.0, + 2266.0, + 2552.0, + 2356.0, + 2411.0, + 2664.0, + 2454.0, + 2733.0, + 2655.0, + 2190.0, + 2416.0, + 2274.0, + 2189.0, + 2105.0, + 2436.0, + 2280.0, + 2628.0, + 2479.0 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values.json deleted file mode 100644 index 6a88c3a85..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [18.14133, 0.62524, 0.62888, 0.61879, 0.62017, 0.62262, 0.61644, 0.95648, 0.62134, 0.62122, 0.62167, 0.61736, 0.62014, 0.61878, 0.61905, 0.62285, 0.62143, 0.62527, 0.6188, 0.61821, 0.62092, 0.6146, 0.62538, 0.62186, 0.62062, 0.61709, 0.61586, 0.62211, 0.62113, 0.62256, 0.91616, 0.62172, 0.62082, 0.61854, 0.61851, 0.61865, 0.61838, 0.62057, 0.62054, 0.62121, 0.62279, 0.61565, 0.61588, 0.61809, 0.6285, 0.62159, 0.619, 0.62096, 0.6161, 0.61341, 0.61939, 0.61863, 0.61901, 0.69973, 0.62205, 0.6203, 0.62205, 0.61913, 0.61593, 0.61268, 0.62209, 0.62242, 0.62178, 0.61463, 0.61723, 0.61562, 0.62222, 0.61147, 0.61537, 0.61793, 0.61712, 0.61962, 0.62226, 0.73426, 0.61519, 0.61809, 0.62057, 0.72077, 0.62008, 0.6196, 0.61771, 0.61875, 0.61628, 0.61618, 0.61608, 0.61962, 0.61838, 0.61834, 0.61866, 0.62047, 0.61852, 0.61278, 0.61478, 0.61796, 0.61939, 0.61855, 0.61816, 0.61585, 0.72525, 0.61589, 0.71497, 0.61452, 0.61899, 0.61647, 0.61769, 0.61448, 0.6133, 0.6161, 0.61341, 0.61318, 0.61661, 0.61966, 0.61316, 0.61487, 0.61573, 0.61347, 0.61386, 0.61593, 0.61745, 0.6185, 0.61792, 0.61356, 0.61533, 0.61644, 0.70276, 0.61398, 0.6159, 0.61832, 0.61774, 0.61711, 0.61411, 0.61533, 0.62272, 0.61709, 0.61557, 0.61705, 0.61893, 0.6177, 0.61888, 0.62207, 0.6181, 0.61501, 0.61758, 0.61994, 0.62402, 0.61667, 0.61599, 0.62131, 0.62011, 0.73481, 0.61752, 0.6206, 0.61654, 0.62124, 0.61775, 0.61832, 0.62597, 0.61901, 0.6153, 0.61393, 0.62147, 0.62628, 0.62091, 0.61689, 0.61436, 0.61683, 0.61743, 0.62116, 0.62033, 0.71198, 0.71973, 0.62179, 0.61968, 0.62104, 0.73504, 0.61833, 0.62098, 0.61898, 0.62766, 0.61917, 0.61475, 0.61706, 0.62025, 0.62046, 0.62146, 0.61796, 0.61756, 0.61818, 0.61889, 0.61869, 0.61959, 0.61761, 0.79997, 0.71316, 0.7092, 0.61693, 0.61553, 0.61793, 0.62191, 0.61846, 0.60521, 0.63066, 0.62491, 0.6225, 0.62102, 0.62456, 0.6247, 0.6269, 0.62537, 0.62411, 0.6231, 0.62397, 0.61873, 0.61766, 0.72647, 0.61878, 0.70741, 0.62227, 0.71605, 0.62022, 0.61781, 0.62597, 0.62427, 0.73275, 0.61764, 0.62069, 0.61913, 0.61957, 0.62075, 0.61693, 0.62163, 0.62496, 0.62065, 0.61855, 0.62534, 0.62563, 0.63027, 0.62765, 0.62046, 0.62782, 0.6225, 0.62116, 0.71019, 0.62081, 0.62867, 0.61875, 0.61378, 0.61727, 0.6238, 0.62162, 0.62088, 0.61962, 0.62082, 0.62352, 0.62164, 0.62001, 0.62139, 0.62, 0.62818, 0.6266, 0.63112, 0.62627, 0.62702, 0.62774, 0.62831, 0.62063, 0.71258, 0.62584, 0.63033, 0.62439, 0.62649, 0.61461, 0.6209, 0.61667, 0.62067, 0.61793, 0.61954, 0.61977, 0.622, 0.6288, 0.62767, 0.62589, 0.62912, 0.62368, 0.61631, 0.73714, 0.6313, 0.61624, 0.61414, 0.62482, 0.6265, 0.62661, 0.62057, 0.62063, 0.62436, 0.62886, 0.62643, 0.62055, 0.61891, 0.62228, 0.62509, 0.62152, 0.62371, 0.62145, 0.61596, 0.62278, 0.62635, 0.63114, 0.72659, 0.72093, 0.62818, 0.62831, 0.61965, 0.62825, 0.62531, 0.6239, 0.6269, 0.6223, 0.62369, 0.62215, 0.62376, 0.62336, 0.62681, 0.62299, 0.62046, 0.61497, 0.61616, 0.61762, 0.62291, 0.61731, 0.61644, 0.61524, 0.61842, 0.62286, 0.61327, 0.61596, 0.6185, 0.61983, 0.62272, 0.61746, 0.6207, 0.6179, 0.61849, 0.62196, 0.62408, 0.62953, 0.62672, 0.62606, 0.61511, 0.61549, 0.6159, 0.62334, 0.62662, 0.75567, 0.62523, 0.62516, 0.62916, 0.62575, 0.62292, 0.62685, 0.62432, 0.62244, 0.61921, 0.61816, 0.61641, 0.61968, 0.62202, 0.6208, 0.6193, 0.61995, 0.62245, 0.61844, 0.61724, 0.61904, 0.61874, 0.62205, 0.6161, 0.61772, 0.70649, 0.62431, 0.61921, 0.62093, 0.61887, 0.62189, 0.62184, 0.62081, 0.62021, 0.62093, 0.62086, 0.62164, 0.6235, 0.61872, 0.62062, 0.61908, 0.62491, 0.62732, 0.62504, 0.61899, 0.62006, 0.6215]}, "forward-compute-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [9.27215, 0.36134, 0.36093, 0.35232, 0.35362, 0.35668, 0.35229, 0.68753, 0.35087, 0.35407, 0.35147, 0.35356, 0.35146, 0.35384, 0.35274, 0.35595, 0.35404, 0.35262, 0.35078, 0.34962, 0.35338, 0.34834, 0.35424, 0.35549, 0.35524, 0.34948, 0.35114, 0.35465, 0.35306, 0.35417, 0.64338, 0.35253, 0.35038, 0.34824, 0.3516, 0.35295, 0.35334, 0.3507, 0.3518, 0.35354, 0.35258, 0.3508, 0.35045, 0.35367, 0.35832, 0.35222, 0.35029, 0.35265, 0.35179, 0.34702, 0.35321, 0.35445, 0.35177, 0.43752, 0.35531, 0.35287, 0.3529, 0.34925, 0.35154, 0.34648, 0.34908, 0.35314, 0.34798, 0.3481, 0.35014, 0.35038, 0.35008, 0.34793, 0.34843, 0.35226, 0.35123, 0.34921, 0.351, 0.46524, 0.34642, 0.35022, 0.34926, 0.45533, 0.35075, 0.35197, 0.34952, 0.35294, 0.35156, 0.35367, 0.35231, 0.35148, 0.34881, 0.34904, 0.35192, 0.35269, 0.35151, 0.34592, 0.34953, 0.35046, 0.35109, 0.35197, 0.35201, 0.34972, 0.45764, 0.34845, 0.44993, 0.34761, 0.35227, 0.34673, 0.35005, 0.34603, 0.34781, 0.34961, 0.34726, 0.3482, 0.3514, 0.35199, 0.34526, 0.3478, 0.35064, 0.34875, 0.35162, 0.34733, 0.3494, 0.34825, 0.35136, 0.34918, 0.34966, 0.34867, 0.43767, 0.34863, 0.35097, 0.35094, 0.34677, 0.35081, 0.35072, 0.35015, 0.35172, 0.35213, 0.34826, 0.34865, 0.35048, 0.3496, 0.34911, 0.35588, 0.35342, 0.35191, 0.35141, 0.35102, 0.35709, 0.34876, 0.34872, 0.35106, 0.35322, 0.46707, 0.35188, 0.35176, 0.35, 0.35379, 0.3509, 0.35081, 0.3551, 0.35093, 0.34933, 0.34848, 0.35167, 0.35398, 0.34723, 0.34792, 0.34845, 0.34775, 0.35079, 0.34957, 0.35345, 0.44501, 0.45138, 0.34891, 0.35082, 0.3502, 0.46589, 0.35255, 0.35187, 0.35127, 0.35483, 0.35059, 0.34896, 0.34861, 0.35247, 0.35179, 0.34935, 0.35234, 0.34933, 0.35334, 0.34686, 0.35171, 0.35547, 0.35168, 0.52709, 0.44719, 0.44161, 0.34936, 0.34954, 0.35313, 0.34988, 0.35211, 0.33688, 0.35591, 0.3569, 0.35308, 0.35372, 0.35241, 0.35314, 0.35633, 0.353, 0.35616, 0.35467, 0.35273, 0.3514, 0.35129, 0.45541, 0.3499, 0.44221, 0.35081, 0.44665, 0.35109, 0.35024, 0.35427, 0.35423, 0.46289, 0.34881, 0.35173, 0.34964, 0.35399, 0.35206, 0.35147, 0.35326, 0.35451, 0.35111, 0.35112, 0.35937, 0.35913, 0.36067, 0.35939, 0.35289, 0.35237, 0.34936, 0.35284, 0.44138, 0.35073, 0.35858, 0.35425, 0.34953, 0.35087, 0.35453, 0.35091, 0.35251, 0.34904, 0.35282, 0.35193, 0.35492, 0.35161, 0.35115, 0.35118, 0.36151, 0.35849, 0.36407, 0.35821, 0.36041, 0.35561, 0.36252, 0.35429, 0.44699, 0.36096, 0.36201, 0.35407, 0.35747, 0.35035, 0.35103, 0.34874, 0.35637, 0.3524, 0.35102, 0.35202, 0.35462, 0.35968, 0.35397, 0.35259, 0.35547, 0.35321, 0.35018, 0.46643, 0.3583, 0.35092, 0.34697, 0.3538, 0.35589, 0.35223, 0.35164, 0.35261, 0.35967, 0.36013, 0.35806, 0.35023, 0.35024, 0.3526, 0.34984, 0.35259, 0.35298, 0.35284, 0.35138, 0.35036, 0.35288, 0.35847, 0.45332, 0.44559, 0.35561, 0.35336, 0.3521, 0.35312, 0.35227, 0.35234, 0.35359, 0.35468, 0.35224, 0.35204, 0.35651, 0.35583, 0.35358, 0.35435, 0.35427, 0.3497, 0.35079, 0.35172, 0.35517, 0.35178, 0.35126, 0.34889, 0.35033, 0.35332, 0.34892, 0.35261, 0.35094, 0.35215, 0.35764, 0.35341, 0.35384, 0.35265, 0.35263, 0.35262, 0.35604, 0.36288, 0.35642, 0.35552, 0.3484, 0.34851, 0.3514, 0.36023, 0.35789, 0.48902, 0.36035, 0.36141, 0.3626, 0.35908, 0.35622, 0.35631, 0.35269, 0.35075, 0.35039, 0.35096, 0.35039, 0.34953, 0.35289, 0.34822, 0.35154, 0.35088, 0.35383, 0.35072, 0.34872, 0.34826, 0.34902, 0.35267, 0.34801, 0.34971, 0.43955, 0.35085, 0.34994, 0.35373, 0.34855, 0.3492, 0.35231, 0.34725, 0.35003, 0.3473, 0.35104, 0.34755, 0.34992, 0.35186, 0.35388, 0.35074, 0.34993, 0.35194, 0.35167, 0.34626, 0.35392, 0.35198]}, "backward-compute-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [2.80897, 0.28475, 0.2809, 0.27885, 0.27971, 0.2768, 0.27791, 0.2813, 0.2828, 0.27982, 0.28277, 0.27676, 0.28261, 0.27806, 0.28033, 0.2756, 0.28082, 0.27955, 0.28018, 0.27766, 0.27802, 0.27721, 0.28203, 0.27953, 0.27943, 0.27922, 0.27814, 0.28056, 0.28107, 0.27624, 0.28037, 0.28169, 0.2828, 0.28312, 0.28074, 0.27837, 0.27679, 0.28303, 0.2829, 0.28043, 0.27823, 0.27266, 0.27336, 0.27459, 0.28023, 0.27652, 0.27746, 0.2779, 0.27563, 0.27401, 0.27717, 0.27499, 0.27806, 0.27139, 0.27365, 0.27659, 0.28082, 0.28038, 0.27531, 0.27517, 0.28057, 0.27667, 0.28628, 0.27883, 0.27588, 0.27536, 0.27984, 0.2729, 0.27334, 0.27425, 0.27422, 0.27613, 0.27623, 0.2746, 0.27458, 0.27341, 0.27807, 0.27236, 0.27663, 0.27538, 0.27514, 0.27306, 0.2725, 0.27083, 0.27026, 0.27509, 0.27586, 0.27515, 0.27392, 0.27389, 0.27372, 0.2727, 0.27096, 0.27354, 0.27409, 0.27274, 0.27274, 0.27361, 0.27352, 0.27457, 0.27411, 0.27589, 0.27459, 0.27704, 0.27375, 0.27488, 0.27373, 0.27473, 0.27336, 0.27408, 0.27412, 0.27621, 0.27573, 0.2757, 0.27319, 0.27286, 0.27081, 0.27628, 0.27632, 0.27773, 0.27459, 0.27302, 0.27391, 0.27706, 0.27302, 0.27235, 0.2728, 0.27422, 0.27771, 0.27408, 0.273, 0.27313, 0.27881, 0.2727, 0.27535, 0.27554, 0.27602, 0.27445, 0.27748, 0.27334, 0.27196, 0.27246, 0.27334, 0.2765, 0.27324, 0.27646, 0.27446, 0.27758, 0.27638, 0.2749, 0.27379, 0.27822, 0.27586, 0.27434, 0.27452, 0.2751, 0.27681, 0.27448, 0.27334, 0.27477, 0.27831, 0.27967, 0.28117, 0.27795, 0.27331, 0.27527, 0.27361, 0.27892, 0.27512, 0.27366, 0.27646, 0.27988, 0.27713, 0.27762, 0.27574, 0.27463, 0.27934, 0.27654, 0.28122, 0.27818, 0.27487, 0.27565, 0.27548, 0.27639, 0.27869, 0.27377, 0.27686, 0.2737, 0.27871, 0.27425, 0.27333, 0.27386, 0.27879, 0.2752, 0.27707, 0.27628, 0.27433, 0.27416, 0.28211, 0.27328, 0.27772, 0.2888, 0.28238, 0.28559, 0.28328, 0.28926, 0.29069, 0.28744, 0.28541, 0.28383, 0.28569, 0.28878, 0.28294, 0.28177, 0.28457, 0.28391, 0.27915, 0.28556, 0.28795, 0.28723, 0.28157, 0.28876, 0.288, 0.28233, 0.28245, 0.28563, 0.28586, 0.27943, 0.28324, 0.27971, 0.28335, 0.28509, 0.28373, 0.28221, 0.27996, 0.2821, 0.28282, 0.28146, 0.2827, 0.29287, 0.28819, 0.28375, 0.28224, 0.28618, 0.28593, 0.27803, 0.2775, 0.27939, 0.28305, 0.28516, 0.28387, 0.28394, 0.27989, 0.28606, 0.28244, 0.28311, 0.2822, 0.28452, 0.28083, 0.28371, 0.27966, 0.28404, 0.27905, 0.28671, 0.28017, 0.28042, 0.27826, 0.27799, 0.28104, 0.28485, 0.2833, 0.27803, 0.28505, 0.28078, 0.27731, 0.27811, 0.2825, 0.2845, 0.28366, 0.28285, 0.29128, 0.28986, 0.28737, 0.28519, 0.28008, 0.28508, 0.29026, 0.27934, 0.27842, 0.28735, 0.28334, 0.29041, 0.28444, 0.28192, 0.27975, 0.28248, 0.28157, 0.28471, 0.28418, 0.28337, 0.29038, 0.28525, 0.28937, 0.28336, 0.28092, 0.28765, 0.2938, 0.28931, 0.28955, 0.29117, 0.29147, 0.29048, 0.28242, 0.29224, 0.28996, 0.28762, 0.28995, 0.28361, 0.28955, 0.28314, 0.28125, 0.28279, 0.28923, 0.28566, 0.28096, 0.27889, 0.27987, 0.28102, 0.28378, 0.27825, 0.27822, 0.28139, 0.28151, 0.284, 0.28038, 0.27763, 0.28234, 0.28237, 0.27877, 0.27839, 0.28213, 0.27969, 0.27977, 0.28461, 0.28193, 0.28295, 0.28539, 0.28439, 0.28043, 0.28021, 0.27978, 0.27678, 0.28057, 0.28152, 0.27875, 0.27736, 0.28042, 0.28071, 0.27701, 0.28009, 0.28081, 0.28054, 0.27846, 0.27695, 0.27435, 0.28018, 0.27863, 0.2831, 0.27711, 0.27774, 0.27798, 0.27776, 0.27805, 0.27924, 0.27943, 0.27863, 0.27639, 0.27628, 0.27471, 0.28218, 0.2775, 0.27692, 0.28008, 0.28228, 0.27856, 0.28233, 0.27871, 0.28388, 0.27878, 0.2831, 0.28268, 0.27716, 0.2756, 0.27712, 0.28343, 0.28463, 0.28241, 0.28327, 0.27551, 0.27892]}, "batch-generator-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.62041, 0.00418, 0.00386, 0.00419, 0.00438, 0.0044, 0.00464, 0.00467, 0.00468, 0.00448, 0.00443, 0.00436, 0.00461, 0.00452, 0.00471, 0.00475, 0.00426, 0.00443, 0.00451, 0.00448, 0.00454, 0.00422, 0.00444, 0.00458, 0.00446, 0.00447, 0.00432, 0.00458, 0.00459, 0.00455, 0.00456, 0.0044, 0.00451, 0.00445, 0.00465, 0.00435, 0.00439, 0.00431, 0.00431, 0.00453, 0.0045, 0.00449, 0.00456, 0.00437, 0.00432, 0.0043, 0.00442, 0.0045, 0.0042, 0.00427, 0.0045, 0.00438, 0.00447, 0.00452, 0.0046, 0.00429, 0.00439, 0.00441, 0.00462, 0.00448, 0.00409, 0.00434, 0.00448, 0.0042, 0.00454, 0.00422, 0.00431, 0.00413, 0.00439, 0.00414, 0.00456, 0.00464, 0.00426, 0.00434, 0.00414, 0.00453, 0.00423, 0.00453, 0.00431, 0.00403, 0.00414, 0.0043, 0.00446, 0.00423, 0.00437, 0.00434, 0.00419, 0.0042, 0.00433, 0.00435, 0.00443, 0.00408, 0.00416, 0.00451, 0.00443, 0.00435, 0.00446, 0.00421, 0.00467, 0.00454, 0.00431, 0.00462, 0.00433, 0.00426, 0.00437, 0.00437, 0.00433, 0.00435, 0.00426, 0.00413, 0.00435, 0.00422, 0.00431, 0.00432, 0.0043, 0.00408, 0.00435, 0.00438, 0.00439, 0.00426, 0.00438, 0.00432, 0.00449, 0.00423, 0.00444, 0.00436, 0.00417, 0.00424, 0.0042, 0.00428, 0.00425, 0.00425, 0.0042, 0.00445, 0.0043, 0.00429, 0.00441, 0.0043, 0.00412, 0.00429, 0.0042, 0.00419, 0.0042, 0.00427, 0.00427, 0.00418, 0.00464, 0.00406, 0.00435, 0.0046, 0.0043, 0.00438, 0.00417, 0.00427, 0.0044, 0.00444, 0.0045, 0.00407, 0.00421, 0.00403, 0.00442, 0.00418, 0.00425, 0.00425, 0.00434, 0.00422, 0.00432, 0.00446, 0.00435, 0.00452, 0.00428, 0.00408, 0.00445, 0.00414, 0.00441, 0.00412, 0.00434, 0.00445, 0.00425, 0.00412, 0.00432, 0.00441, 0.00432, 0.00422, 0.00429, 0.00407, 0.00434, 0.00448, 0.00434, 0.00434, 0.00423, 0.00422, 0.0046, 0.00418, 0.00445, 0.00432, 0.00422, 0.00418, 0.00408, 0.00434, 0.03441, 0.00493, 0.00506, 0.00555, 0.00518, 0.00512, 0.00537, 0.00513, 0.00501, 0.00506, 0.00504, 0.00473, 0.00488, 0.00523, 0.00528, 0.00511, 0.00526, 0.00496, 0.00546, 0.00512, 0.0054, 0.00539, 0.00514, 0.00484, 0.00515, 0.00531, 0.00515, 0.00498, 0.00509, 0.0051, 0.00516, 0.00496, 0.00494, 0.00501, 0.00511, 0.00536, 0.00517, 0.00549, 0.00531, 0.00526, 0.00531, 0.00497, 0.00498, 0.00524, 0.00486, 0.00502, 0.00497, 0.00491, 0.00509, 0.00466, 0.00519, 0.00528, 0.00486, 0.00509, 0.0049, 0.005, 0.00508, 0.005, 0.00503, 0.00473, 0.00536, 0.00516, 0.00549, 0.00528, 0.00506, 0.00513, 0.00501, 0.00563, 0.00498, 0.00498, 0.0051, 0.00528, 0.00509, 0.005, 0.00495, 0.00509, 0.00508, 0.00485, 0.00479, 0.00485, 0.00507, 0.00499, 0.00463, 0.00497, 0.00487, 0.00529, 0.00518, 0.00483, 0.00513, 0.0051, 0.005, 0.005, 0.00514, 0.00496, 0.00492, 0.00547, 0.00506, 0.00502, 0.00481, 0.0051, 0.00498, 0.0051, 0.00475, 0.00498, 0.0048, 0.00528, 0.00523, 0.0053, 0.00561, 0.00522, 0.00517, 0.00528, 0.00505, 0.00511, 0.00538, 0.00531, 0.00528, 0.00554, 0.00534, 0.00512, 0.00541, 0.00533, 0.00508, 0.00518, 0.00519, 0.00548, 0.00545, 0.00554, 0.0052, 0.00506, 0.00513, 0.00502, 0.00523, 0.00513, 0.00478, 0.00487, 0.00503, 0.00512, 0.0051, 0.00529, 0.005, 0.00521, 0.00528, 0.00511, 0.00522, 0.00513, 0.00533, 0.00502, 0.0053, 0.00492, 0.00522, 0.00496, 0.00488, 0.00513, 0.00506, 0.00519, 0.00508, 0.00521, 0.00442, 0.00409, 0.00426, 0.0043, 0.00418, 0.00428, 0.00456, 0.00443, 0.00422, 0.00426, 0.0043, 0.00429, 0.00435, 0.00446, 0.0044, 0.00447, 0.00444, 0.0043, 0.0042, 0.00438, 0.00422, 0.00429, 0.00463, 0.00435, 0.00431, 0.00447, 0.00431, 0.00441, 0.00417, 0.00425, 0.0044, 0.00438, 0.00438, 0.00439, 0.00447, 0.00402, 0.00423, 0.00447, 0.00451, 0.00457, 0.00458, 0.00426]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [3e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [4e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [6.22336, 0.00298, 0.00292, 0.00297, 0.0029, 0.00289, 0.00306, 0.00314, 0.00321, 0.003, 0.00296, 0.00297, 0.00294, 0.00288, 0.00301, 0.00324, 0.00323, 0.00298, 0.00292, 0.00298, 0.00295, 0.0029, 0.00308, 0.00319, 0.00324, 0.00299, 0.00292, 0.00301, 0.00293, 0.00291, 0.00326, 0.00322, 0.00323, 0.0029, 0.00293, 0.003, 0.00291, 0.00287, 0.00303, 0.0032, 0.00322, 0.00298, 0.00294, 0.00295, 0.00296, 0.0029, 0.00305, 0.00322, 0.00321, 0.003, 0.00295, 0.00299, 0.00295, 0.00292, 0.00306, 0.00323, 0.0032, 0.00298, 0.00291, 0.00297, 0.00296, 0.00287, 0.00304, 0.00322, 0.0032, 0.00299, 0.00296, 0.00297, 0.00296, 0.00291, 0.00308, 0.00321, 0.00326, 0.00301, 0.00294, 0.00292, 0.00295, 0.00287, 0.00307, 0.00321, 0.00318, 0.00296, 0.00285, 0.00302, 0.00297, 0.00291, 0.003, 0.00323, 0.0032, 0.003, 0.00292, 0.00294, 0.00297, 0.00285, 0.00306, 0.00318, 0.00314, 0.003, 0.00289, 0.00296, 0.00296, 0.00288, 0.00307, 0.00321, 0.00321, 0.00301, 0.00289, 0.00297, 0.00297, 0.0029, 0.00298, 0.00323, 0.00321, 0.003, 0.00289, 0.00287, 0.00295, 0.00292, 0.00302, 0.00323, 0.00323, 0.003, 0.00292, 0.00291, 0.00298, 0.00286, 0.00306, 0.00321, 0.00322, 0.00302, 0.00289, 0.00293, 0.00286, 0.00288, 0.00306, 0.00322, 0.00319, 0.00295, 0.00285, 0.00297, 0.00295, 0.00289, 0.00305, 0.0032, 0.00324, 0.00298, 0.00291, 0.00297, 0.00289, 0.00289, 0.00304, 0.0032, 0.00314, 0.003, 0.00289, 0.00297, 0.00295, 0.00288, 0.00301, 0.00317, 0.00314, 0.003, 0.00291, 0.00299, 0.00296, 0.0029, 0.00306, 0.00324, 0.00319, 0.00301, 0.0029, 0.00296, 0.00296, 0.0029, 0.00306, 0.00319, 0.0032, 0.003, 0.00285, 0.00298, 0.00296, 0.00281, 0.00305, 0.00318, 0.00322, 0.00297, 0.00291, 0.00299, 0.00294, 0.00292, 0.00307, 0.00323, 0.00324, 0.00299, 0.0029, 0.00299, 0.00295, 0.0029, 0.00305, 0.00319, 0.0029, 0.00305, 0.00311, 0.00325, 0.00324, 0.00308, 0.00284, 0.00305, 0.00295, 0.00305, 0.003, 0.00324, 0.0032, 0.00306, 0.00286, 0.00306, 0.00294, 0.00305, 0.0031, 0.00318, 0.00323, 0.00308, 0.00288, 0.00306, 0.00297, 0.00304, 0.00309, 0.00321, 0.00322, 0.00308, 0.00287, 0.00299, 0.00294, 0.00304, 0.00311, 0.00324, 0.00325, 0.00304, 0.00281, 0.00302, 0.00293, 0.00307, 0.0031, 0.00323, 0.00319, 0.00306, 0.00286, 0.00306, 0.00291, 0.00305, 0.00311, 0.00314, 0.00323, 0.00303, 0.00285, 0.00298, 0.00294, 0.00302, 0.00307, 0.00322, 0.00318, 0.00303, 0.00287, 0.00303, 0.00294, 0.00301, 0.00322, 0.00321, 0.00326, 0.00304, 0.00288, 0.00305, 0.00292, 0.00304, 0.00303, 0.00323, 0.00323, 0.00307, 0.00289, 0.003, 0.00295, 0.00298, 0.00307, 0.00328, 0.00312, 0.00307, 0.00289, 0.00303, 0.00294, 0.00306, 0.00309, 0.00324, 0.0032, 0.00306, 0.0029, 0.00306, 0.00294, 0.00301, 0.00301, 0.00322, 0.00321, 0.00306, 0.00289, 0.00304, 0.00293, 0.00303, 0.00312, 0.00322, 0.00325, 0.00305, 0.00286, 0.00306, 0.00293, 0.00304, 0.0031, 0.00325, 0.00326, 0.00306, 0.00287, 0.00305, 0.00296, 0.00307, 0.00314, 0.00315, 0.00323, 0.00307, 0.00288, 0.00293, 0.0029, 0.00303, 0.00304, 0.00325, 0.00322, 0.00304, 0.0028, 0.00304, 0.00292, 0.00305, 0.00308, 0.00323, 0.00323, 0.00307, 0.00289, 0.00304, 0.00294, 0.00305, 0.00311, 0.00321, 0.00322, 0.00303, 0.00281, 0.00304, 0.00296, 0.003, 0.0031, 0.00322, 0.00314, 0.00301, 0.00281, 0.00298, 0.00288, 0.00303, 0.00307, 0.00321, 0.0032, 0.00301, 0.00281, 0.00303, 0.00288, 0.00301, 0.00309, 0.00316, 0.00319, 0.00302, 0.00284, 0.00306, 0.00292, 0.003, 0.00328, 0.00321, 0.0032, 0.00301, 0.00285, 0.00297, 0.00284, 0.003, 0.003, 0.00318, 0.00319, 0.00301, 0.00281, 0.00303, 0.00289, 0.003, 0.00305, 0.00315, 0.00308, 0.00303, 0.00279, 0.00299]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.0004, 0.00019, 0.00027, 0.00027, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00027, 0.00027, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00026, 0.00027, 0.00028, 0.00028, 0.00029, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00031, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00029, 0.00029, 0.00029, 0.00029, 0.00027, 0.00029, 0.00027, 0.00028, 0.00028, 0.00028, 0.00029, 0.00027, 0.00027, 0.00029, 0.00028, 0.0003, 0.00028, 0.00028, 0.00028, 0.00028, 0.00029, 0.00028, 0.00028, 0.00028, 0.00028, 0.00027, 0.00028, 0.00028, 0.00028, 0.00028, 0.00027, 0.00028, 0.00028, 0.00028, 0.00027, 0.00028, 0.00027, 0.00028, 0.00026, 0.00026, 0.00026, 0.00026, 0.00026, 0.00026, 0.00027, 0.00027, 0.00025, 0.00025, 0.00027, 0.00028, 0.00027, 0.00028, 0.00026, 0.00026, 0.00025, 0.00026, 0.00026, 0.00028, 0.00025, 0.00028, 0.00027, 0.00026, 0.00028, 0.00028, 0.00028, 0.00028, 0.00027, 0.00026, 0.00028, 0.00029, 0.00028, 0.00028, 0.00028, 0.00027, 0.00027, 0.00028, 0.00028, 0.00028, 0.00027, 0.00028, 0.00027, 0.00028, 0.00027, 0.00027, 0.00028, 0.00028, 0.00028, 0.00028, 0.00029, 0.00027, 0.00028, 0.00028, 0.00028, 0.00027, 0.00027, 0.00027, 0.00027, 0.00027, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00027, 0.00027, 0.00027, 0.00028, 0.00027, 0.00027, 0.00027, 0.00028, 0.00029, 0.00028, 0.00028, 0.00027, 0.00028, 0.00027, 0.00028, 0.00027, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00027, 0.00028, 0.00027, 0.00028, 0.00028, 0.00029, 0.00027, 0.00028, 0.00027, 0.00027, 0.00029, 0.00028, 0.00028, 0.00027, 0.00028, 0.00028, 0.00027, 0.00028, 0.00029, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00029, 0.00028, 0.00028, 0.00029, 0.00028, 0.00028, 0.00029, 0.00028, 0.00028, 0.00028, 0.00028, 0.00027, 0.00026, 0.00026, 0.00026, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00026, 0.00027, 0.00025, 0.00025, 0.00026, 0.00026, 0.00025, 0.00027, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00027, 0.00025, 0.00025, 0.00025, 0.00027, 0.00027, 0.00025, 0.00025, 0.00025, 0.00026, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00027, 0.00027, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00027, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00026, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00024, 0.00025, 0.00025, 0.00026, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00026, 0.00027, 0.00029, 0.00027, 0.00027, 0.00028, 0.00027, 0.00028, 0.00028, 0.00029, 0.00028, 0.00027, 0.00027, 0.00027, 0.00027, 0.00028, 0.00027, 0.00027, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00028, 0.00025, 0.00027, 0.00025, 0.00027, 0.00028, 0.00027, 0.00027, 0.00027, 0.00027, 0.00028, 0.00027, 0.00028, 0.00027, 0.00027, 0.00027, 0.00027]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.6202, 0.00104, 0.00121, 0.00115, 0.00122, 0.00121, 0.00123, 0.00124, 0.00122, 0.00123, 0.00125, 0.00122, 0.00121, 0.0012, 0.00122, 0.00127, 0.00121, 0.00123, 0.0012, 0.00123, 0.00121, 0.00116, 0.00125, 0.00122, 0.00122, 0.00124, 0.00122, 0.00123, 0.0012, 0.00122, 0.00125, 0.00122, 0.00126, 0.0012, 0.00122, 0.00123, 0.00121, 0.00127, 0.00121, 0.00121, 0.00121, 0.00121, 0.00123, 0.00122, 0.00123, 0.00124, 0.00121, 0.0012, 0.00122, 0.00119, 0.00121, 0.00122, 0.00137, 0.00122, 0.00121, 0.00123, 0.0012, 0.00126, 0.00121, 0.00122, 0.00122, 0.00129, 0.00122, 0.00122, 0.00122, 0.00123, 0.00125, 0.00125, 0.00124, 0.00122, 0.00123, 0.0013, 0.00124, 0.00121, 0.00123, 0.00118, 0.00123, 0.00121, 0.00123, 0.00118, 0.00118, 0.00118, 0.00119, 0.00119, 0.00119, 0.00121, 0.00121, 0.00122, 0.00121, 0.00123, 0.00123, 0.0012, 0.00128, 0.00117, 0.00122, 0.00123, 0.00124, 0.00121, 0.00118, 0.00119, 0.00121, 0.00122, 0.00121, 0.0012, 0.00118, 0.00124, 0.00122, 0.0012, 0.00125, 0.0012, 0.00121, 0.00101, 0.0012, 0.00121, 0.00124, 0.00123, 0.00123, 0.00123, 0.00122, 0.001, 0.00122, 0.00121, 0.001, 0.00125, 0.00122, 0.00121, 0.00124, 0.00121, 0.00121, 0.00099, 0.0012, 0.00125, 0.00121, 0.001, 0.0012, 0.00122, 0.00122, 0.00122, 0.0013, 0.00097, 0.00124, 0.00122, 0.00125, 0.00121, 0.0012, 0.0012, 0.00121, 0.00123, 0.0012, 0.0012, 0.00121, 0.00125, 0.00135, 0.00122, 0.00122, 0.00123, 0.00124, 0.00121, 0.00122, 0.0012, 0.0013, 0.00122, 0.00124, 0.001, 0.00123, 0.00121, 0.00121, 0.00126, 0.00124, 0.00129, 0.00129, 0.00124, 0.00121, 0.00119, 0.0012, 0.00123, 0.00123, 0.00127, 0.00122, 0.00122, 0.0012, 0.00121, 0.00128, 0.0012, 0.00125, 0.00124, 0.00121, 0.00123, 0.00121, 0.00132, 0.00122, 0.00121, 0.0012, 0.00122, 0.00123, 0.00123, 0.00121, 0.0012, 0.00122, 0.00123, 0.0012, 0.00123, 0.0012, 0.00118, 0.00118, 0.00121, 0.00124, 0.0012, 0.00121, 0.00121, 0.00119, 0.00119, 0.0012, 0.0012, 0.0012, 0.00118, 0.00126, 0.00121, 0.00118, 0.0012, 0.00117, 0.00119, 0.00121, 0.00118, 0.00119, 0.00122, 0.0012, 0.0012, 0.00126, 0.00121, 0.00128, 0.00107, 0.00115, 0.00121, 0.00119, 0.00119, 0.00116, 0.00118, 0.0012, 0.00121, 0.00119, 0.0012, 0.0012, 0.0012, 0.00116, 0.00121, 0.0012, 0.00116, 0.00121, 0.00113, 0.00119, 0.00127, 0.0012, 0.00119, 0.00118, 0.00119, 0.0012, 0.00121, 0.00119, 0.00118, 0.00119, 0.0012, 0.00119, 0.0012, 0.0012, 0.00127, 0.00122, 0.0012, 0.00118, 0.00118, 0.00121, 0.00118, 0.00123, 0.00119, 0.00122, 0.00116, 0.0012, 0.00118, 0.0012, 0.00122, 0.00122, 0.00121, 0.00117, 0.00121, 0.00117, 0.0012, 0.00118, 0.00119, 0.00122, 0.00118, 0.00125, 0.00119, 0.00121, 0.00118, 0.00133, 0.00119, 0.00119, 0.00119, 0.0012, 0.00128, 0.00121, 0.00122, 0.0012, 0.00123, 0.00115, 0.00118, 0.0012, 0.00122, 0.00119, 0.00122, 0.00121, 0.00119, 0.00126, 0.0012, 0.0012, 0.00118, 0.00116, 0.00119, 0.00118, 0.00121, 0.00119, 0.00125, 0.00122, 0.00119, 0.00116, 0.00117, 0.00119, 0.0012, 0.0012, 0.00117, 0.00118, 0.0012, 0.00124, 0.00122, 0.0012, 0.00118, 0.0012, 0.00119, 0.0012, 0.00118, 0.00119, 0.00121, 0.00119, 0.00119, 0.00121, 0.00118, 0.00126, 0.00118, 0.0012, 0.00119, 0.00117, 0.0012, 0.00118, 0.0012, 0.00119, 0.0012, 0.00119, 0.00125, 0.00117, 0.00123, 0.00118, 0.00122, 0.00122, 0.00122, 0.00117, 0.00123, 0.00122, 0.00121, 0.00121, 0.0012, 0.00121, 0.00128, 0.00123, 0.00116, 0.0012, 0.00123, 0.00123, 0.00116, 0.00123, 0.00121, 0.0012, 0.00121, 0.00122, 0.00124, 0.00128, 0.00122, 0.00117, 0.00123, 0.00124, 0.00122, 0.00118, 0.0012, 0.00117, 0.00125, 0.00122, 0.00117, 0.00115, 0.00118, 0.00113, 0.0012]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00555, 0.00512, 0.0052, 0.0051, 0.00517, 0.00513, 0.00514, 0.00513, 0.00512, 0.00511, 0.00508, 0.0051, 0.0051, 0.00512, 0.00511, 0.00509, 0.00508, 0.00511, 0.00514, 0.0051, 0.00509, 0.0051, 0.00514, 0.00512, 0.00512, 0.00512, 0.00514, 0.00517, 0.00511, 0.00513, 0.00513, 0.00516, 0.00515, 0.00515, 0.00516, 0.00514, 0.00513, 0.00543, 0.00514, 0.00512, 0.00514, 0.00513, 0.00513, 0.00516, 0.00512, 0.00515, 0.00511, 0.00513, 0.00515, 0.00514, 0.0051, 0.00512, 0.0057, 0.00511, 0.00513, 0.00513, 0.00514, 0.0053, 0.00514, 0.00511, 0.00513, 0.00512, 0.00513, 0.00518, 0.00513, 0.00514, 0.00512, 0.00513, 0.00512, 0.00509, 0.00512, 0.00539, 0.00514, 0.00514, 0.0051, 0.00512, 0.00511, 0.00512, 0.00511, 0.00511, 0.00512, 0.00513, 0.00511, 0.00514, 0.00512, 0.0051, 0.00514, 0.00511, 0.00512, 0.00522, 0.0051, 0.00514, 0.00572, 0.0051, 0.00515, 0.00526, 0.00509, 0.00511, 0.00513, 0.00513, 0.00518, 0.00514, 0.00511, 0.00512, 0.00512, 0.00511, 0.00514, 0.00512, 0.00518, 0.00514, 0.00512, 0.00513, 0.00512, 0.00512, 0.00512, 0.00511, 0.00509, 0.00514, 0.00519, 0.00512, 0.0051, 0.00513, 0.0051, 0.00548, 0.00514, 0.00512, 0.00512, 0.00511, 0.00511, 0.00512, 0.00511, 0.00519, 0.00533, 0.00509, 0.00512, 0.0051, 0.00513, 0.00511, 0.00515, 0.00508, 0.00512, 0.00513, 0.0057, 0.00513, 0.00513, 0.00516, 0.00518, 0.00515, 0.00517, 0.00513, 0.00514, 0.00516, 0.0057, 0.00516, 0.00515, 0.00514, 0.00513, 0.00513, 0.00516, 0.00516, 0.00566, 0.00514, 0.00514, 0.00515, 0.00516, 0.00515, 0.00513, 0.00517, 0.00513, 0.00513, 0.00601, 0.00514, 0.00522, 0.00513, 0.00515, 0.00514, 0.00517, 0.00511, 0.00515, 0.00516, 0.00515, 0.00514, 0.00515, 0.00512, 0.00587, 0.00517, 0.00518, 0.00516, 0.00513, 0.00541, 0.00514, 0.00515, 0.00513, 0.00516, 0.00521, 0.00531, 0.00532, 0.00517, 0.00516, 0.00515, 0.00511, 0.00529, 0.00509, 0.00511, 0.00512, 0.00512, 0.00512, 0.00515, 0.0053, 0.0051, 0.00512, 0.00512, 0.00512, 0.00511, 0.0051, 0.00513, 0.00512, 0.00513, 0.00513, 0.00512, 0.00559, 0.00511, 0.0051, 0.0051, 0.00512, 0.00515, 0.00512, 0.00511, 0.00579, 0.00512, 0.00511, 0.00512, 0.00511, 0.00511, 0.00511, 0.00513, 0.00508, 0.00513, 0.00511, 0.00509, 0.00512, 0.0051, 0.00512, 0.00511, 0.00512, 0.00513, 0.00511, 0.00514, 0.00511, 0.00512, 0.00512, 0.0059, 0.00513, 0.00514, 0.00512, 0.00511, 0.00513, 0.00511, 0.00511, 0.0051, 0.00509, 0.0051, 0.00512, 0.0051, 0.0051, 0.00511, 0.00513, 0.00513, 0.0051, 0.00513, 0.00511, 0.0051, 0.0051, 0.00511, 0.00512, 0.00511, 0.00509, 0.00513, 0.0051, 0.0051, 0.00518, 0.0051, 0.00513, 0.00509, 0.00513, 0.00512, 0.00511, 0.00515, 0.00512, 0.00512, 0.00512, 0.00512, 0.00512, 0.00511, 0.00601, 0.00512, 0.00524, 0.00512, 0.0051, 0.00511, 0.00509, 0.00512, 0.0051, 0.00512, 0.00511, 0.00511, 0.00526, 0.0051, 0.00511, 0.00512, 0.00511, 0.00511, 0.00514, 0.00511, 0.00512, 0.00509, 0.00511, 0.00512, 0.00512, 0.00509, 0.0051, 0.00511, 0.00511, 0.00513, 0.00512, 0.00541, 0.00512, 0.00515, 0.00511, 0.00509, 0.0051, 0.00512, 0.00511, 0.00512, 0.00511, 0.00517, 0.00514, 0.00513, 0.00513, 0.00512, 0.00511, 0.00514, 0.00511, 0.00514, 0.00509, 0.00508, 0.00513, 0.00509, 0.0051, 0.00513, 0.00511, 0.00571, 0.00519, 0.00511, 0.00511, 0.0051, 0.00511, 0.00512, 0.00513, 0.00511, 0.00511, 0.00511, 0.00511, 0.00512, 0.00511, 0.00509, 0.00514, 0.00511, 0.00516, 0.00512, 0.0053, 0.00511, 0.00512, 0.00521, 0.00512, 0.00513, 0.00514, 0.00512, 0.00512, 0.00514, 0.0051, 0.00511, 0.00513, 0.00512, 0.00509, 0.00519, 0.00512, 0.0051, 0.00509, 0.00596, 0.00512, 0.0051, 0.0051, 0.00513, 0.00513, 0.0051, 0.00511, 0.00509, 0.00512, 0.00511]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00417, 0.00096, 0.00098, 0.00098, 0.00099, 0.00097, 0.00098, 0.00098, 0.00097, 0.00096, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00099, 0.00098, 0.00097, 0.00098, 0.00097, 0.00097, 0.00096, 0.00098, 0.00098, 0.00099, 0.00099, 0.00097, 0.00096, 0.00098, 0.00098, 0.00101, 0.00097, 0.00097, 0.00097, 0.00098, 0.00097, 0.00097, 0.00097, 0.00098, 0.00098, 0.00098, 0.00096, 0.00097, 0.00097, 0.00097, 0.00098, 0.00098, 0.00097, 0.00097, 0.00097, 0.00098, 0.00098, 0.00099, 0.00098, 0.00097, 0.00097, 0.00096, 0.00097, 0.00096, 0.00097, 0.00098, 0.00097, 0.00097, 0.00098, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00098, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00098, 0.00096, 0.00097, 0.00097, 0.00098, 0.00096, 0.00096, 0.00097, 0.00098, 0.00096, 0.00097, 0.00096, 0.00097, 0.00099, 0.00096, 0.00098, 0.00098, 0.00097, 0.00097, 0.00096, 0.00097, 0.00096, 0.00097, 0.00099, 0.00096, 0.00096, 0.00097, 0.00097, 0.00097, 0.00098, 0.00096, 0.00097, 0.00097, 0.00098, 0.00099, 0.00098, 0.00096, 0.00096, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00098, 0.00099, 0.00097, 0.00096, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00098, 0.00097, 0.00098, 0.00097, 0.00096, 0.00097, 0.00099, 0.00098, 0.00097, 0.00098, 0.00096, 0.00097, 0.00097, 0.00096, 0.00097, 0.00098, 0.00099, 0.00097, 0.00097, 0.00097, 0.00098, 0.00097, 0.00097, 0.00097, 0.00099, 0.00098, 0.00097, 0.00097, 0.00098, 0.00097, 0.00096, 0.00097, 0.00098, 0.00097, 0.001, 0.00097, 0.00097, 0.00097, 0.00098, 0.00097, 0.00098, 0.00097, 0.00099, 0.00097, 0.00097, 0.00096, 0.00098, 0.00097, 0.00098, 0.00097, 0.00097, 0.00097, 0.00098, 0.00098, 0.00097, 0.00097, 0.00099, 0.00097, 0.00098, 0.00098, 0.00097, 0.00097, 0.00098, 0.00098, 0.001, 0.00097, 0.00096, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00096, 0.00096, 0.00096, 0.00097, 0.001, 0.00096, 0.00099, 0.00097, 0.00098, 0.00097, 0.00099, 0.00096, 0.00128, 0.00096, 0.00096, 0.00097, 0.00097, 0.00097, 0.00099, 0.00096, 0.00097, 0.00096, 0.00097, 0.00097, 0.00096, 0.00096, 0.00096, 0.00097, 0.00096, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00096, 0.00098, 0.00097, 0.00097, 0.00096, 0.00097, 0.001, 0.00097, 0.00096, 0.00097, 0.00097, 0.00097, 0.00096, 0.00097, 0.00096, 0.00096, 0.00097, 0.00096, 0.00096, 0.00096, 0.00097, 0.001, 0.00097, 0.00096, 0.00097, 0.00097, 0.00096, 0.00097, 0.00096, 0.00097, 0.00096, 0.00097, 0.00096, 0.00096, 0.00096, 0.00097, 0.00099, 0.00096, 0.00097, 0.00096, 0.00096, 0.00097, 0.00096, 0.00097, 0.00096, 0.00097, 0.00098, 0.00097, 0.00097, 0.00099, 0.00096, 0.00097, 0.00096, 0.00096, 0.00098, 0.00096, 0.00096, 0.00097, 0.00098, 0.00096, 0.00097, 0.00097, 0.00096, 0.00098, 0.00097, 0.00096, 0.00097, 0.00096, 0.00097, 0.00097, 0.00096, 0.00097, 0.00097, 0.00096, 0.00096, 0.00097, 0.00096, 0.00096, 0.00097, 0.00097, 0.00097, 0.00097, 0.00096, 0.00097, 0.00097, 0.00097, 0.00096, 0.00096, 0.00096, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00096, 0.00097, 0.00096, 0.00097, 0.00097, 0.00097, 0.00097, 0.00096, 0.00097, 0.00095, 0.00096, 0.00097, 0.00098, 0.00097, 0.00097, 0.00097, 0.00097, 0.00096, 0.00096, 0.00096, 0.00098, 0.00097, 0.00097, 0.00098, 0.00097, 0.00098, 0.00098, 0.00098, 0.00098, 0.001, 0.00098, 0.00098, 0.00098, 0.00097, 0.00097, 0.00098, 0.00098, 0.00101, 0.00098, 0.00098, 0.00097, 0.00098, 0.00097, 0.00097, 0.00099, 0.00097, 0.00098, 0.00098, 0.00096, 0.00098, 0.00097, 0.00098, 0.00099, 0.00097, 0.00098, 0.00097, 0.00097, 0.00098, 0.00098]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00118, 0.00099, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.001, 0.001, 0.00101, 0.00101, 0.00101, 0.00103, 0.001, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00102, 0.00101, 0.00102, 0.00101, 0.001, 0.001, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.001, 0.00101, 0.001, 0.00102, 0.00102, 0.001, 0.00101, 0.00101, 0.00101, 0.001, 0.00101, 0.00101, 0.00101, 0.00101, 0.00102, 0.00101, 0.00101, 0.001, 0.001, 0.00101, 0.00102, 0.00102, 0.001, 0.00101, 0.001, 0.00101, 0.001, 0.00101, 0.00101, 0.00101, 0.00105, 0.00101, 0.00102, 0.001, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.001, 0.001, 0.00102, 0.001, 0.00101, 0.00102, 0.00101, 0.00101, 0.00101, 0.001, 0.00101, 0.00103, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00102, 0.00101, 0.00101, 0.00106, 0.00101, 0.00102, 0.00101, 0.00101, 0.00101, 0.001, 0.00101, 0.001, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00102, 0.00102, 0.00102, 0.00101, 0.00101, 0.00102, 0.001, 0.00106, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00103, 0.00102, 0.00101, 0.00102, 0.00101, 0.00101, 0.00101, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001, 0.00102, 0.00101, 0.001, 0.001, 0.001, 0.001, 0.001, 0.00101, 0.00101, 0.00101, 0.00102, 0.00102, 0.00101, 0.00102, 0.00103, 0.00102, 0.00101, 0.00101, 0.00102, 0.00101, 0.00101, 0.00101, 0.00102, 0.00101, 0.00101, 0.00101, 0.00102, 0.00101, 0.00103, 0.00101, 0.00101, 0.00101, 0.00101, 0.00102, 0.00102, 0.00101, 0.00102, 0.00101, 0.00101, 0.00102, 0.00102, 0.00102, 0.00105, 0.00102, 0.00102, 0.00101, 0.00101, 0.00102, 0.00101, 0.00103, 0.00102, 0.00102, 0.00101, 0.00106, 0.00102, 0.00101, 0.00103, 0.00103, 0.00102, 0.00103, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00102, 0.00102, 0.00108, 0.00102, 0.00104, 0.00102, 0.00102, 0.00103, 0.00103, 0.00102, 0.00107, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00107, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00103, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00103, 0.00103, 0.00102, 0.00103, 0.00102, 0.00102, 0.00104, 0.00102, 0.00104, 0.00102, 0.00102, 0.00103, 0.00103, 0.00102, 0.00103, 0.00102, 0.00102, 0.00102, 0.00102, 0.00101, 0.00103, 0.00101, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00105, 0.00102, 0.00102, 0.00104, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00101, 0.00103, 0.00104, 0.00103, 0.00103, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00108, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00122, 0.00102, 0.00103, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00101, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00101, 0.00102, 0.00103, 0.00103, 0.00103, 0.00101, 0.00102, 0.00102, 0.00102, 0.00101, 0.00101, 0.00105, 0.00102, 0.00103, 0.00102, 0.00102, 0.00101, 0.00102, 0.00102, 0.00102, 0.00101, 0.00101, 0.00101, 0.00102, 0.00101, 0.00101, 0.00102, 0.00102, 0.00102, 0.00101, 0.00102, 0.00103, 0.00101, 0.00102, 0.00102, 0.00102, 0.00102, 0.00101, 0.00104, 0.00102, 0.00102, 0.00102, 0.00102, 0.00101, 0.00102, 0.00102, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00102, 0.00103, 0.00102, 0.00102, 0.00101, 0.00102, 0.00101, 0.00101]}, "optimizer-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.63386, 0.00867, 0.00903, 0.00886, 0.00906, 0.00897, 0.00901, 0.009, 0.00896, 0.00895, 0.00895, 0.00895, 0.00894, 0.00894, 0.00896, 0.009, 0.00892, 0.00896, 0.00899, 0.00897, 0.00892, 0.00887, 0.00902, 0.00897, 0.009, 0.00906, 0.00899, 0.00902, 0.00897, 0.00898, 0.0091, 0.00901, 0.00904, 0.00898, 0.00901, 0.009, 0.00902, 0.00937, 0.00899, 0.00896, 0.00901, 0.00897, 0.00899, 0.00902, 0.00897, 0.00903, 0.00895, 0.00898, 0.00899, 0.00895, 0.00896, 0.00898, 0.00978, 0.00897, 0.00898, 0.009, 0.00895, 0.0092, 0.00896, 0.00901, 0.009, 0.00904, 0.00898, 0.00902, 0.00897, 0.00899, 0.00902, 0.00902, 0.00899, 0.00899, 0.00898, 0.00934, 0.00904, 0.00896, 0.00897, 0.00891, 0.00895, 0.00892, 0.00894, 0.0089, 0.00889, 0.0089, 0.00891, 0.00892, 0.00888, 0.0089, 0.009, 0.00896, 0.00895, 0.0091, 0.00889, 0.00892, 0.00967, 0.00886, 0.009, 0.00913, 0.00896, 0.00896, 0.00889, 0.00895, 0.00901, 0.00899, 0.00903, 0.00893, 0.00893, 0.00898, 0.009, 0.00894, 0.00905, 0.00897, 0.00894, 0.00877, 0.00897, 0.00898, 0.00902, 0.00895, 0.00895, 0.009, 0.00905, 0.00875, 0.00895, 0.00897, 0.00872, 0.00942, 0.00901, 0.00898, 0.00897, 0.00894, 0.00895, 0.00876, 0.00895, 0.00907, 0.00917, 0.00872, 0.00895, 0.00893, 0.00898, 0.00897, 0.00906, 0.00866, 0.00896, 0.00897, 0.00964, 0.00897, 0.00897, 0.00898, 0.009, 0.009, 0.009, 0.00894, 0.00898, 0.00904, 0.00977, 0.00905, 0.00899, 0.00901, 0.00905, 0.00898, 0.00901, 0.00898, 0.00965, 0.009, 0.009, 0.00878, 0.00905, 0.00899, 0.00898, 0.00904, 0.00902, 0.00906, 0.01008, 0.00901, 0.00907, 0.00895, 0.00899, 0.00902, 0.00905, 0.00902, 0.00902, 0.00901, 0.00899, 0.00898, 0.00908, 0.00899, 0.00979, 0.00905, 0.00904, 0.00903, 0.009, 0.00938, 0.00899, 0.00901, 0.00904, 0.00902, 0.00909, 0.00923, 0.00917, 0.00901, 0.00905, 0.00903, 0.00899, 0.00918, 0.00889, 0.00891, 0.00894, 0.00894, 0.00896, 0.00895, 0.00912, 0.00892, 0.00889, 0.00896, 0.0089, 0.00891, 0.00901, 0.0089, 0.00904, 0.00893, 0.00893, 0.00894, 0.00942, 0.00889, 0.00938, 0.00887, 0.00892, 0.00897, 0.00893, 0.00896, 0.00974, 0.00891, 0.009, 0.00879, 0.00886, 0.00891, 0.0089, 0.00892, 0.00885, 0.00891, 0.0089, 0.00892, 0.00896, 0.0089, 0.00892, 0.00893, 0.00891, 0.00894, 0.00892, 0.00891, 0.00894, 0.00885, 0.00891, 0.00986, 0.00894, 0.00893, 0.00892, 0.00894, 0.00896, 0.00889, 0.00893, 0.00888, 0.0089, 0.00891, 0.0089, 0.0089, 0.00894, 0.00901, 0.00902, 0.00898, 0.00887, 0.00892, 0.00897, 0.00888, 0.00894, 0.00889, 0.00893, 0.00887, 0.00889, 0.00895, 0.00891, 0.00891, 0.00904, 0.00901, 0.00889, 0.00892, 0.00891, 0.00892, 0.00891, 0.00892, 0.00895, 0.00891, 0.00902, 0.00891, 0.00892, 0.00889, 0.01004, 0.00891, 0.00907, 0.00893, 0.00889, 0.00901, 0.00889, 0.00893, 0.00895, 0.00898, 0.00885, 0.00891, 0.00914, 0.00891, 0.00891, 0.00894, 0.00892, 0.00888, 0.009, 0.0089, 0.00948, 0.00889, 0.00887, 0.00893, 0.00889, 0.00889, 0.00891, 0.00896, 0.00894, 0.00893, 0.00888, 0.00921, 0.00895, 0.00893, 0.00894, 0.00887, 0.0089, 0.00897, 0.00896, 0.00894, 0.00893, 0.00896, 0.009, 0.00892, 0.00897, 0.00891, 0.00889, 0.00895, 0.0089, 0.00893, 0.00891, 0.00886, 0.009, 0.00888, 0.00889, 0.00894, 0.00885, 0.00955, 0.00901, 0.00895, 0.00891, 0.0089, 0.00889, 0.00898, 0.00888, 0.00898, 0.00889, 0.00895, 0.00895, 0.00896, 0.00891, 0.00895, 0.00904, 0.00897, 0.00901, 0.00897, 0.00919, 0.00904, 0.00899, 0.00902, 0.00895, 0.00901, 0.00901, 0.00892, 0.00909, 0.00899, 0.00896, 0.00901, 0.00899, 0.009, 0.00896, 0.00905, 0.0089, 0.00897, 0.00898, 0.00984, 0.00894, 0.00894, 0.00891, 0.00903, 0.00898, 0.00894, 0.00889, 0.0089, 0.0089, 0.00894]}, "learning-rate": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05]}, "batch-size": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0]}, "lm loss": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [10.88321, 10.90268, 10.88687, 10.83314, 10.67636, 10.64925, 10.43407, 10.15143, 9.939, 9.84142, 9.58871, 9.85432, 9.88466, 9.62953, 9.78812, 9.5115, 9.45845, 9.64924, 9.38622, 9.33216, 9.24226, 9.14549, 9.17557, 8.99547, 9.18942, 9.05996, 9.15554, 9.16495, 9.29785, 8.98464, 8.92921, 9.04391, 9.04317, 8.65502, 8.71709, 8.75344, 8.68371, 8.7343, 8.65869, 8.76488, 8.66084, 8.84969, 8.83212, 8.4992, 8.38905, 8.43151, 8.49327, 8.38449, 8.43266, 8.57974, 8.36712, 8.19218, 8.22599, 8.22213, 8.26761, 7.91363, 8.09574, 7.89107, 8.2463, 8.23044, 8.00478, 7.9653, 7.91788, 7.73983, 7.73952, 7.64266, 7.51535, 7.9067, 7.6981, 7.45174, 7.74028, 7.76751, 7.54113, 7.29838, 7.45192, 7.33549, 7.46187, 7.22351, 7.63653, 7.27884, 7.35151, 7.2129, 7.2187, 7.42237, 7.17713, 7.28373, 7.00153, 7.00528, 7.04066, 7.1397, 6.8246, 6.98624, 7.08901, 7.00075, 6.87398, 6.75446, 6.98902, 7.05484, 6.70056, 6.57618, 6.7239, 6.73842, 6.73087, 6.73636, 6.65702, 6.40579, 6.6386, 6.62005, 6.44721, 6.63067, 6.74344, 6.6111, 6.7266, 6.69523, 6.62503, 6.50683, 6.59892, 6.4067, 6.66402, 6.24864, 6.25205, 6.30302, 6.38991, 6.35064, 6.45057, 6.2892, 6.34021, 6.23934, 6.20441, 6.39672, 6.32669, 6.3228, 6.16602, 6.15875, 6.24058, 6.38585, 6.20055, 6.14534, 6.17669, 6.1094, 6.05525, 6.06665, 6.2527, 6.40409, 6.25252, 6.2934, 6.0919, 6.17395, 5.99575, 6.02272, 5.94996, 6.23797, 6.18154, 5.95877, 5.77498, 6.11727, 5.84271, 6.09751, 5.78563, 6.15394, 6.14296, 6.08411, 5.92729, 6.11238, 5.94309, 6.19339, 5.89494, 5.792, 5.77614, 5.6837, 6.01618, 5.99613, 6.06338, 5.88778, 6.04018, 5.96996, 5.99544, 5.98695, 5.94778, 5.84144, 5.95287, 5.61942, 5.70133, 5.88893, 5.84402, 5.86128, 5.76114, 5.83707, 5.72343, 5.55889, 5.72351, 5.62534, 5.83303, 5.60569, 5.7102, 5.70991, 5.89681, 5.64325, 5.84924, 5.73928, 5.87114, 5.33228, 5.89693, 5.872, 5.85316, 5.40988, 5.4088, 5.62665, 5.59641, 5.48639, 5.57896, 5.67332, 5.47579, 5.74541, 5.50851, 5.59461, 5.621, 5.62129, 5.51073, 5.61357, 5.67793, 5.68632, 5.58943, 5.66035, 5.37294, 5.67985, 5.62736, 5.42133, 5.58734, 5.63109, 5.55307, 5.34119, 5.53841, 5.48634, 5.48174, 5.37484, 5.55776, 5.60342, 5.38738, 5.52728, 5.4859, 5.33181, 5.50554, 5.40833, 5.44, 5.31717, 5.06482, 5.47629, 5.56511, 5.71212, 5.41184, 5.59499, 5.63272, 5.23153, 5.27192, 5.3912, 5.39311, 5.32484, 5.49539, 5.18175, 5.29693, 5.24506, 5.37468, 5.25384, 5.44332, 5.53548, 5.3125, 5.43753, 5.3339, 5.07, 5.31161, 5.25178, 5.30057, 5.1086, 5.27262, 5.26395, 5.46902, 5.15667, 5.26704, 5.20746, 5.35466, 4.98016, 4.91076, 5.3213, 5.39019, 5.22162, 5.3164, 5.10162, 5.1553, 5.25943, 5.06435, 5.26075, 5.07101, 5.33638, 5.24297, 5.14623, 5.23826, 5.03699, 5.31101, 5.04764, 5.02142, 5.13778, 5.10838, 5.26722, 5.14671, 5.27266, 5.09162, 5.0919, 5.24829, 5.3185, 5.25029, 5.18579, 5.14206, 5.28335, 4.94328, 5.20523, 5.08657, 5.29719, 5.17312, 5.18231, 5.10943, 4.98051, 4.99195, 5.21896, 5.30825, 5.09051, 5.05174, 4.91264, 5.11732, 5.11518, 4.92322, 5.33386, 5.02007, 5.09792, 5.16007, 4.99811, 5.05898, 5.06488, 4.98971, 5.07389, 5.15699, 4.97292, 5.17835, 4.92646, 4.91925, 5.06679, 4.99198, 4.90773, 4.77047, 4.93905, 5.10914, 5.0148, 5.01342, 5.32728, 4.95518, 4.99041, 5.04238, 4.79783, 4.72965, 4.99227, 5.0394, 4.87169, 4.95051, 5.03887, 5.01995, 4.81482, 4.88854, 4.89947, 4.82779, 4.74234, 5.00778, 4.7467, 5.20619, 4.78181, 4.98955, 4.73414, 4.78105, 4.81703, 4.64628, 4.65374, 4.83873, 4.80327, 4.79812, 4.9214, 4.87849, 4.92132, 4.76615, 4.87858, 4.72843, 4.9077, 4.95342, 4.86965, 4.70236, 4.77862, 4.89666, 4.70572, 4.85677, 4.68692, 4.68192, 4.64505]}, "lm loss vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [10.88321, 10.90268, 10.88687, 10.83314, 10.67636, 10.64925, 10.43407, 10.15143, 9.939, 9.84142, 9.58871, 9.85432, 9.88466, 9.62953, 9.78812, 9.5115, 9.45845, 9.64924, 9.38622, 9.33216, 9.24226, 9.14549, 9.17557, 8.99547, 9.18942, 9.05996, 9.15554, 9.16495, 9.29785, 8.98464, 8.92921, 9.04391, 9.04317, 8.65502, 8.71709, 8.75344, 8.68371, 8.7343, 8.65869, 8.76488, 8.66084, 8.84969, 8.83212, 8.4992, 8.38905, 8.43151, 8.49327, 8.38449, 8.43266, 8.57974, 8.36712, 8.19218, 8.22599, 8.22213, 8.26761, 7.91363, 8.09574, 7.89107, 8.2463, 8.23044, 8.00478, 7.9653, 7.91788, 7.73983, 7.73952, 7.64266, 7.51535, 7.9067, 7.6981, 7.45174, 7.74028, 7.76751, 7.54113, 7.29838, 7.45192, 7.33549, 7.46187, 7.22351, 7.63653, 7.27884, 7.35151, 7.2129, 7.2187, 7.42237, 7.17713, 7.28373, 7.00153, 7.00528, 7.04066, 7.1397, 6.8246, 6.98624, 7.08901, 7.00075, 6.87398, 6.75446, 6.98902, 7.05484, 6.70056, 6.57618, 6.7239, 6.73842, 6.73087, 6.73636, 6.65702, 6.40579, 6.6386, 6.62005, 6.44721, 6.63067, 6.74344, 6.6111, 6.7266, 6.69523, 6.62503, 6.50683, 6.59892, 6.4067, 6.66402, 6.24864, 6.25205, 6.30302, 6.38991, 6.35064, 6.45057, 6.2892, 6.34021, 6.23934, 6.20441, 6.39672, 6.32669, 6.3228, 6.16602, 6.15875, 6.24058, 6.38585, 6.20055, 6.14534, 6.17669, 6.1094, 6.05525, 6.06665, 6.2527, 6.40409, 6.25252, 6.2934, 6.0919, 6.17395, 5.99575, 6.02272, 5.94996, 6.23797, 6.18154, 5.95877, 5.77498, 6.11727, 5.84271, 6.09751, 5.78563, 6.15394, 6.14296, 6.08411, 5.92729, 6.11238, 5.94309, 6.19339, 5.89494, 5.792, 5.77614, 5.6837, 6.01618, 5.99613, 6.06338, 5.88778, 6.04018, 5.96996, 5.99544, 5.98695, 5.94778, 5.84144, 5.95287, 5.61942, 5.70133, 5.88893, 5.84402, 5.86128, 5.76114, 5.83707, 5.72343, 5.55889, 5.72351, 5.62534, 5.83303, 5.60569, 5.7102, 5.70991, 5.89681, 5.64325, 5.84924, 5.73928, 5.87114, 5.33228, 5.89693, 5.872, 5.85316, 5.40988, 5.4088, 5.62665, 5.59641, 5.48639, 5.57896, 5.67332, 5.47579, 5.74541, 5.50851, 5.59461, 5.621, 5.62129, 5.51073, 5.61357, 5.67793, 5.68632, 5.58943, 5.66035, 5.37294, 5.67985, 5.62736, 5.42133, 5.58734, 5.63109, 5.55307, 5.34119, 5.53841, 5.48634, 5.48174, 5.37484, 5.55776, 5.60342, 5.38738, 5.52728, 5.4859, 5.33181, 5.50554, 5.40833, 5.44, 5.31717, 5.06482, 5.47629, 5.56511, 5.71212, 5.41184, 5.59499, 5.63272, 5.23153, 5.27192, 5.3912, 5.39311, 5.32484, 5.49539, 5.18175, 5.29693, 5.24506, 5.37468, 5.25384, 5.44332, 5.53548, 5.3125, 5.43753, 5.3339, 5.07, 5.31161, 5.25178, 5.30057, 5.1086, 5.27262, 5.26395, 5.46902, 5.15667, 5.26704, 5.20746, 5.35466, 4.98016, 4.91076, 5.3213, 5.39019, 5.22162, 5.3164, 5.10162, 5.1553, 5.25943, 5.06435, 5.26075, 5.07101, 5.33638, 5.24297, 5.14623, 5.23826, 5.03699, 5.31101, 5.04764, 5.02142, 5.13778, 5.10838, 5.26722, 5.14671, 5.27266, 5.09162, 5.0919, 5.24829, 5.3185, 5.25029, 5.18579, 5.14206, 5.28335, 4.94328, 5.20523, 5.08657, 5.29719, 5.17312, 5.18231, 5.10943, 4.98051, 4.99195, 5.21896, 5.30825, 5.09051, 5.05174, 4.91264, 5.11732, 5.11518, 4.92322, 5.33386, 5.02007, 5.09792, 5.16007, 4.99811, 5.05898, 5.06488, 4.98971, 5.07389, 5.15699, 4.97292, 5.17835, 4.92646, 4.91925, 5.06679, 4.99198, 4.90773, 4.77047, 4.93905, 5.10914, 5.0148, 5.01342, 5.32728, 4.95518, 4.99041, 5.04238, 4.79783, 4.72965, 4.99227, 5.0394, 4.87169, 4.95051, 5.03887, 5.01995, 4.81482, 4.88854, 4.89947, 4.82779, 4.74234, 5.00778, 4.7467, 5.20619, 4.78181, 4.98955, 4.73414, 4.78105, 4.81703, 4.64628, 4.65374, 4.83873, 4.80327, 4.79812, 4.9214, 4.87849, 4.92132, 4.76615, 4.87858, 4.72843, 4.9077, 4.95342, 4.86965, 4.70236, 4.77862, 4.89666, 4.70572, 4.85677, 4.68692, 4.68192, 4.64505]}, "loss-scale": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [12.95641, 13.2384, 13.63492, 12.46753, 12.09519, 9.48185, 7.05331, 7.26898, 6.13791, 4.65533, 4.16677, 2.85409, 2.39258, 2.35693, 2.05902, 2.22136, 2.15373, 1.91319, 2.28507, 2.08136, 2.12587, 2.16293, 2.01255, 2.22443, 1.98488, 2.10576, 1.90696, 1.9543, 1.94666, 2.19132, 2.07534, 1.9973, 1.90676, 2.17071, 2.13949, 2.12242, 2.00142, 1.85779, 1.93941, 1.74128, 2.19131, 1.80266, 1.76804, 1.92184, 1.89627, 1.81829, 1.73892, 1.73316, 1.7548, 1.56741, 1.70661, 1.78909, 1.75371, 1.8099, 1.69083, 1.80378, 1.72805, 1.87537, 1.64718, 1.47793, 1.64751, 1.54177, 1.73678, 1.93709, 1.70003, 1.61404, 1.65733, 1.60718, 1.41019, 1.66006, 1.44415, 1.3449, 1.59801, 1.38078, 1.40657, 1.58642, 1.37384, 1.47591, 1.51235, 1.32276, 1.27695, 1.35665, 1.39793, 1.46181, 1.25641, 1.39278, 1.37555, 1.31206, 1.25327, 1.08729, 1.11608, 1.26073, 1.05493, 1.26676, 1.03825, 1.22449, 1.31527, 1.17458, 1.05643, 1.32651, 1.60257, 1.2771, 1.33646, 1.31918, 1.248, 1.20478, 1.17877, 1.39792, 1.21711, 1.31304, 1.06851, 0.90225, 1.00231, 1.02701, 1.08335, 1.06592, 1.11157, 1.35469, 1.11475, 0.96782, 1.00793, 1.10818, 0.98621, 1.2088, 1.33881, 1.44029, 1.6209, 1.4596, 1.76932, 0.95989, 1.18019, 1.10796, 1.01963, 0.97229, 1.12326, 1.18955, 1.04787, 1.17124, 1.15064, 0.95989, 1.2251, 1.2379, 1.76155, 1.26203, 1.48837, 1.2467, 1.12532, 1.2807, 1.00776, 1.29835, 1.39203, 1.19636, 1.4484, 1.31191, 1.0452, 1.72246, 1.72833, 1.28959, 1.84591, 1.35158, 1.59884, 1.36455, 1.22883, 0.94147, 1.4872, 1.47058, 1.60177, 1.17187, 1.32032, 1.16147, 1.85664, 1.34438, 1.41884, 1.939, 1.3293, 1.75251, 1.4942, 1.19914, 1.25112, 1.47923, 1.19903, 1.70249, 1.28382, 1.22996, 1.38428, 1.04416, 1.49206, 1.45812, 1.5496, 1.42558, 1.5666, 1.60373, 1.50198, 2.14466, 1.64657, 1.23816, 1.19399, 1.20748, 1.27992, 1.28244, 1.01251, 1.42205, 1.36197, 1.11149, 1.15089, 1.21404, 1.39311, 1.5652, 1.38265, 1.4134, 1.55375, 1.48078, 1.28046, 1.56958, 1.42513, 1.45697, 1.27067, 1.6129, 1.30064, 1.30128, 1.59962, 2.07562, 1.66274, 1.53273, 1.30633, 1.38281, 1.30251, 1.26134, 1.59835, 1.39505, 1.20665, 1.50419, 1.33709, 1.53729, 1.35211, 1.18328, 1.72786, 1.56925, 1.48159, 1.79747, 1.32018, 1.29802, 1.45777, 1.41144, 1.32018, 1.82833, 1.47341, 1.38161, 1.37728, 1.47317, 1.22182, 1.50379, 1.40184, 1.43299, 1.38574, 1.54027, 1.3871, 1.51693, 1.73604, 1.27623, 1.30004, 1.43266, 1.26605, 1.31063, 1.40554, 1.47355, 1.43481, 1.66877, 1.27269, 1.36414, 1.39902, 1.36787, 1.30634, 1.35432, 1.33569, 1.38439, 1.38254, 1.48327, 1.3313, 1.47336, 1.54266, 1.45093, 1.39023, 1.42073, 1.71873, 1.24142, 1.27025, 1.75206, 1.19488, 1.72063, 1.35861, 1.46103, 1.32756, 1.38252, 1.44831, 1.49026, 1.5017, 1.67806, 1.49633, 1.40813, 1.2821, 1.34708, 1.20139, 1.33134, 1.30935, 1.28049, 1.39953, 1.36021, 1.30784, 1.55113, 1.45126, 1.35267, 1.8948, 1.31989, 1.26079, 1.54872, 1.25987, 1.49108, 1.31905, 1.39623, 1.42575, 1.70894, 1.69908, 1.44957, 1.53553, 1.41451, 1.68745, 1.45251, 1.2816, 1.33701, 1.40832, 1.76682, 1.43394, 1.35911, 1.42618, 1.36908, 1.37004, 1.25362, 1.44167, 1.3631, 1.32537, 1.0708, 1.21959, 1.38245, 1.69458, 1.66343, 1.49487, 1.64475, 1.18445, 1.24234, 1.37689, 1.3449, 1.29452, 1.57163, 1.48364, 1.39813, 1.46563, 1.16757, 1.33935, 1.37732, 1.74665, 1.43255, 1.6591, 1.35981, 1.18773, 1.72037, 1.57868, 1.47314, 1.60009, 1.70452, 1.52569, 1.35993, 1.71308, 1.55029, 1.45496, 1.45713, 1.21934, 1.34612, 1.35689, 1.29738, 1.27919, 1.35703, 1.34356, 1.23723, 1.16682, 1.55154, 1.54928, 1.31127, 1.22661, 1.39907, 1.23896, 1.39069, 1.35517, 1.4518, 1.74352, 1.41812, 1.48035, 1.43537, 1.2798, 1.31958]}, "grad-norm vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [12.95641, 13.2384, 13.63492, 12.46753, 12.09519, 9.48185, 7.05331, 7.26898, 6.13791, 4.65533, 4.16677, 2.85409, 2.39258, 2.35693, 2.05902, 2.22136, 2.15373, 1.91319, 2.28507, 2.08136, 2.12587, 2.16293, 2.01255, 2.22443, 1.98488, 2.10576, 1.90696, 1.9543, 1.94666, 2.19132, 2.07534, 1.9973, 1.90676, 2.17071, 2.13949, 2.12242, 2.00142, 1.85779, 1.93941, 1.74128, 2.19131, 1.80266, 1.76804, 1.92184, 1.89627, 1.81829, 1.73892, 1.73316, 1.7548, 1.56741, 1.70661, 1.78909, 1.75371, 1.8099, 1.69083, 1.80378, 1.72805, 1.87537, 1.64718, 1.47793, 1.64751, 1.54177, 1.73678, 1.93709, 1.70003, 1.61404, 1.65733, 1.60718, 1.41019, 1.66006, 1.44415, 1.3449, 1.59801, 1.38078, 1.40657, 1.58642, 1.37384, 1.47591, 1.51235, 1.32276, 1.27695, 1.35665, 1.39793, 1.46181, 1.25641, 1.39278, 1.37555, 1.31206, 1.25327, 1.08729, 1.11608, 1.26073, 1.05493, 1.26676, 1.03825, 1.22449, 1.31527, 1.17458, 1.05643, 1.32651, 1.60257, 1.2771, 1.33646, 1.31918, 1.248, 1.20478, 1.17877, 1.39792, 1.21711, 1.31304, 1.06851, 0.90225, 1.00231, 1.02701, 1.08335, 1.06592, 1.11157, 1.35469, 1.11475, 0.96782, 1.00793, 1.10818, 0.98621, 1.2088, 1.33881, 1.44029, 1.6209, 1.4596, 1.76932, 0.95989, 1.18019, 1.10796, 1.01963, 0.97229, 1.12326, 1.18955, 1.04787, 1.17124, 1.15064, 0.95989, 1.2251, 1.2379, 1.76155, 1.26203, 1.48837, 1.2467, 1.12532, 1.2807, 1.00776, 1.29835, 1.39203, 1.19636, 1.4484, 1.31191, 1.0452, 1.72246, 1.72833, 1.28959, 1.84591, 1.35158, 1.59884, 1.36455, 1.22883, 0.94147, 1.4872, 1.47058, 1.60177, 1.17187, 1.32032, 1.16147, 1.85664, 1.34438, 1.41884, 1.939, 1.3293, 1.75251, 1.4942, 1.19914, 1.25112, 1.47923, 1.19903, 1.70249, 1.28382, 1.22996, 1.38428, 1.04416, 1.49206, 1.45812, 1.5496, 1.42558, 1.5666, 1.60373, 1.50198, 2.14466, 1.64657, 1.23816, 1.19399, 1.20748, 1.27992, 1.28244, 1.01251, 1.42205, 1.36197, 1.11149, 1.15089, 1.21404, 1.39311, 1.5652, 1.38265, 1.4134, 1.55375, 1.48078, 1.28046, 1.56958, 1.42513, 1.45697, 1.27067, 1.6129, 1.30064, 1.30128, 1.59962, 2.07562, 1.66274, 1.53273, 1.30633, 1.38281, 1.30251, 1.26134, 1.59835, 1.39505, 1.20665, 1.50419, 1.33709, 1.53729, 1.35211, 1.18328, 1.72786, 1.56925, 1.48159, 1.79747, 1.32018, 1.29802, 1.45777, 1.41144, 1.32018, 1.82833, 1.47341, 1.38161, 1.37728, 1.47317, 1.22182, 1.50379, 1.40184, 1.43299, 1.38574, 1.54027, 1.3871, 1.51693, 1.73604, 1.27623, 1.30004, 1.43266, 1.26605, 1.31063, 1.40554, 1.47355, 1.43481, 1.66877, 1.27269, 1.36414, 1.39902, 1.36787, 1.30634, 1.35432, 1.33569, 1.38439, 1.38254, 1.48327, 1.3313, 1.47336, 1.54266, 1.45093, 1.39023, 1.42073, 1.71873, 1.24142, 1.27025, 1.75206, 1.19488, 1.72063, 1.35861, 1.46103, 1.32756, 1.38252, 1.44831, 1.49026, 1.5017, 1.67806, 1.49633, 1.40813, 1.2821, 1.34708, 1.20139, 1.33134, 1.30935, 1.28049, 1.39953, 1.36021, 1.30784, 1.55113, 1.45126, 1.35267, 1.8948, 1.31989, 1.26079, 1.54872, 1.25987, 1.49108, 1.31905, 1.39623, 1.42575, 1.70894, 1.69908, 1.44957, 1.53553, 1.41451, 1.68745, 1.45251, 1.2816, 1.33701, 1.40832, 1.76682, 1.43394, 1.35911, 1.42618, 1.36908, 1.37004, 1.25362, 1.44167, 1.3631, 1.32537, 1.0708, 1.21959, 1.38245, 1.69458, 1.66343, 1.49487, 1.64475, 1.18445, 1.24234, 1.37689, 1.3449, 1.29452, 1.57163, 1.48364, 1.39813, 1.46563, 1.16757, 1.33935, 1.37732, 1.74665, 1.43255, 1.6591, 1.35981, 1.18773, 1.72037, 1.57868, 1.47314, 1.60009, 1.70452, 1.52569, 1.35993, 1.71308, 1.55029, 1.45496, 1.45713, 1.21934, 1.34612, 1.35689, 1.29738, 1.27919, 1.35703, 1.34356, 1.23723, 1.16682, 1.55154, 1.54928, 1.31127, 1.22661, 1.39907, 1.23896, 1.39069, 1.35517, 1.4518, 1.74352, 1.41812, 1.48035, 1.43537, 1.2798, 1.31958]}, "num-zeros": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [58.0, 81.0, 78.0, 82.0, 76.0, 95.0, 104.0, 114.0, 114.0, 147.0, 119.0, 159.0, 165.0, 173.0, 182.0, 167.0, 188.0, 176.0, 167.0, 165.0, 187.0, 162.0, 191.0, 164.0, 181.0, 170.0, 168.0, 172.0, 182.0, 180.0, 164.0, 171.0, 169.0, 154.0, 144.0, 172.0, 173.0, 198.0, 168.0, 210.0, 178.0, 156.0, 174.0, 177.0, 163.0, 172.0, 206.0, 172.0, 184.0, 197.0, 223.0, 153.0, 162.0, 187.0, 173.0, 201.0, 146.0, 152.0, 240.0, 231.0, 192.0, 208.0, 162.0, 210.0, 192.0, 282.0, 232.0, 174.0, 215.0, 186.0, 227.0, 258.0, 202.0, 265.0, 192.0, 216.0, 239.0, 200.0, 265.0, 210.0, 264.0, 231.0, 179.0, 221.0, 234.0, 184.0, 188.0, 206.0, 157.0, 228.0, 217.0, 227.0, 219.0, 233.0, 191.0, 187.0, 214.0, 190.0, 237.0, 168.0, 155.0, 174.0, 165.0, 157.0, 155.0, 136.0, 154.0, 133.0, 124.0, 167.0, 187.0, 158.0, 188.0, 161.0, 168.0, 130.0, 164.0, 109.0, 181.0, 166.0, 146.0, 145.0, 130.0, 132.0, 130.0, 145.0, 125.0, 107.0, 130.0, 147.0, 128.0, 137.0, 149.0, 151.0, 133.0, 117.0, 167.0, 153.0, 134.0, 131.0, 117.0, 116.0, 100.0, 125.0, 121.0, 139.0, 125.0, 139.0, 124.0, 118.0, 103.0, 142.0, 95.0, 127.0, 109.0, 102.0, 110.0, 119.0, 101.0, 129.0, 122.0, 143.0, 119.0, 131.0, 102.0, 117.0, 98.0, 140.0, 129.0, 106.0, 76.0, 115.0, 81.0, 87.0, 118.0, 84.0, 101.0, 118.0, 99.0, 99.0, 107.0, 108.0, 137.0, 131.0, 109.0, 123.0, 107.0, 104.0, 102.0, 138.0, 125.0, 119.0, 91.0, 79.0, 87.0, 112.0, 104.0, 98.0, 101.0, 109.0, 135.0, 98.0, 89.0, 117.0, 106.0, 127.0, 103.0, 111.0, 122.0, 102.0, 92.0, 99.0, 110.0, 93.0, 123.0, 114.0, 133.0, 87.0, 114.0, 121.0, 111.0, 95.0, 93.0, 102.0, 127.0, 88.0, 127.0, 114.0, 107.0, 110.0, 101.0, 110.0, 108.0, 99.0, 106.0, 126.0, 92.0, 96.0, 94.0, 77.0, 124.0, 119.0, 91.0, 105.0, 110.0, 103.0, 97.0, 116.0, 104.0, 97.0, 117.0, 92.0, 110.0, 114.0, 97.0, 101.0, 92.0, 105.0, 93.0, 141.0, 93.0, 106.0, 116.0, 107.0, 122.0, 107.0, 128.0, 100.0, 94.0, 105.0, 124.0, 114.0, 94.0, 80.0, 98.0, 105.0, 97.0, 99.0, 132.0, 94.0, 99.0, 93.0, 108.0, 108.0, 107.0, 111.0, 134.0, 114.0, 104.0, 102.0, 123.0, 108.0, 109.0, 107.0, 110.0, 121.0, 92.0, 94.0, 130.0, 128.0, 130.0, 83.0, 110.0, 130.0, 105.0, 99.0, 106.0, 107.0, 101.0, 100.0, 98.0, 131.0, 101.0, 116.0, 89.0, 106.0, 114.0, 115.0, 112.0, 110.0, 128.0, 92.0, 88.0, 112.0, 108.0, 106.0, 83.0, 113.0, 129.0, 126.0, 99.0, 118.0, 98.0, 101.0, 102.0, 103.0, 119.0, 126.0, 128.0, 110.0, 107.0, 128.0, 125.0, 119.0, 113.0, 89.0, 102.0, 103.0, 126.0, 141.0, 95.0, 106.0, 117.0, 109.0, 93.0, 109.0, 111.0, 138.0, 124.0, 114.0, 106.0, 92.0, 109.0, 105.0, 144.0, 122.0, 108.0, 112.0, 86.0, 100.0, 127.0, 108.0, 100.0, 113.0, 99.0, 103.0, 104.0, 96.0, 125.0, 122.0, 97.0, 128.0, 117.0, 121.0, 133.0, 115.0, 95.0, 126.0, 117.0, 136.0, 118.0, 108.0, 135.0, 109.0, 114.0, 124.0, 122.0, 106.0, 110.0, 124.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [58.0, 81.0, 78.0, 82.0, 76.0, 95.0, 104.0, 114.0, 114.0, 147.0, 119.0, 159.0, 165.0, 173.0, 182.0, 167.0, 188.0, 176.0, 167.0, 165.0, 187.0, 162.0, 191.0, 164.0, 181.0, 170.0, 168.0, 172.0, 182.0, 180.0, 164.0, 171.0, 169.0, 154.0, 144.0, 172.0, 173.0, 198.0, 168.0, 210.0, 178.0, 156.0, 174.0, 177.0, 163.0, 172.0, 206.0, 172.0, 184.0, 197.0, 223.0, 153.0, 162.0, 187.0, 173.0, 201.0, 146.0, 152.0, 240.0, 231.0, 192.0, 208.0, 162.0, 210.0, 192.0, 282.0, 232.0, 174.0, 215.0, 186.0, 227.0, 258.0, 202.0, 265.0, 192.0, 216.0, 239.0, 200.0, 265.0, 210.0, 264.0, 231.0, 179.0, 221.0, 234.0, 184.0, 188.0, 206.0, 157.0, 228.0, 217.0, 227.0, 219.0, 233.0, 191.0, 187.0, 214.0, 190.0, 237.0, 168.0, 155.0, 174.0, 165.0, 157.0, 155.0, 136.0, 154.0, 133.0, 124.0, 167.0, 187.0, 158.0, 188.0, 161.0, 168.0, 130.0, 164.0, 109.0, 181.0, 166.0, 146.0, 145.0, 130.0, 132.0, 130.0, 145.0, 125.0, 107.0, 130.0, 147.0, 128.0, 137.0, 149.0, 151.0, 133.0, 117.0, 167.0, 153.0, 134.0, 131.0, 117.0, 116.0, 100.0, 125.0, 121.0, 139.0, 125.0, 139.0, 124.0, 118.0, 103.0, 142.0, 95.0, 127.0, 109.0, 102.0, 110.0, 119.0, 101.0, 129.0, 122.0, 143.0, 119.0, 131.0, 102.0, 117.0, 98.0, 140.0, 129.0, 106.0, 76.0, 115.0, 81.0, 87.0, 118.0, 84.0, 101.0, 118.0, 99.0, 99.0, 107.0, 108.0, 137.0, 131.0, 109.0, 123.0, 107.0, 104.0, 102.0, 138.0, 125.0, 119.0, 91.0, 79.0, 87.0, 112.0, 104.0, 98.0, 101.0, 109.0, 135.0, 98.0, 89.0, 117.0, 106.0, 127.0, 103.0, 111.0, 122.0, 102.0, 92.0, 99.0, 110.0, 93.0, 123.0, 114.0, 133.0, 87.0, 114.0, 121.0, 111.0, 95.0, 93.0, 102.0, 127.0, 88.0, 127.0, 114.0, 107.0, 110.0, 101.0, 110.0, 108.0, 99.0, 106.0, 126.0, 92.0, 96.0, 94.0, 77.0, 124.0, 119.0, 91.0, 105.0, 110.0, 103.0, 97.0, 116.0, 104.0, 97.0, 117.0, 92.0, 110.0, 114.0, 97.0, 101.0, 92.0, 105.0, 93.0, 141.0, 93.0, 106.0, 116.0, 107.0, 122.0, 107.0, 128.0, 100.0, 94.0, 105.0, 124.0, 114.0, 94.0, 80.0, 98.0, 105.0, 97.0, 99.0, 132.0, 94.0, 99.0, 93.0, 108.0, 108.0, 107.0, 111.0, 134.0, 114.0, 104.0, 102.0, 123.0, 108.0, 109.0, 107.0, 110.0, 121.0, 92.0, 94.0, 130.0, 128.0, 130.0, 83.0, 110.0, 130.0, 105.0, 99.0, 106.0, 107.0, 101.0, 100.0, 98.0, 131.0, 101.0, 116.0, 89.0, 106.0, 114.0, 115.0, 112.0, 110.0, 128.0, 92.0, 88.0, 112.0, 108.0, 106.0, 83.0, 113.0, 129.0, 126.0, 99.0, 118.0, 98.0, 101.0, 102.0, 103.0, 119.0, 126.0, 128.0, 110.0, 107.0, 128.0, 125.0, 119.0, 113.0, 89.0, 102.0, 103.0, 126.0, 141.0, 95.0, 106.0, 117.0, 109.0, 93.0, 109.0, 111.0, 138.0, 124.0, 114.0, 106.0, 92.0, 109.0, 105.0, 144.0, 122.0, 108.0, 112.0, 86.0, 100.0, 127.0, 108.0, 100.0, 113.0, 99.0, 103.0, 104.0, 96.0, 125.0, 122.0, 97.0, 128.0, 117.0, 121.0, 133.0, 115.0, 95.0, 126.0, 117.0, 136.0, 118.0, 108.0, 135.0, 109.0, 114.0, 124.0, 122.0, 106.0, 110.0, 124.0]}, "params-norm": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [179.95625, 179.95625, 179.95625, 179.95625, 179.95625, 179.95624, 179.95621, 179.95612, 179.95593, 179.95575, 179.95447, 179.9538, 179.95322, 179.95126, 179.95035, 179.94966, 179.94905, 179.94916, 179.94939, 179.94966, 179.9496, 179.94933, 179.94919, 179.94952, 179.95036, 179.95168, 179.95288, 179.95392, 179.95509, 179.9565, 179.95851, 179.96069, 179.963, 179.96532, 179.96788, 179.97118, 179.97482, 179.97873, 179.98279, 179.98714, 179.99208, 179.99753, 180.00325, 180.00955, 180.01634, 180.02382, 180.03171, 180.04016, 180.04951, 180.05969, 180.07059, 180.08221, 180.09441, 180.10721, 180.12059, 180.13457, 180.14899, 180.16373, 180.1792, 180.19586, 180.21344, 180.23199, 180.25226, 180.2733, 180.2948, 180.31709, 180.34032, 180.36464, 180.38991, 180.41573, 180.44231, 180.46947, 180.49721, 180.52528, 180.55406, 180.5829, 180.61168, 180.64125, 180.67117, 180.70154, 180.73244, 180.76378, 180.79633, 180.82928, 180.86198, 180.89581, 180.92958, 180.96359, 180.99808, 181.03401, 181.07187, 181.1104, 181.14795, 181.18536, 181.22249, 181.26071, 181.29898, 181.33658, 181.37422, 181.41164, 181.4467, 181.47968, 181.5123, 181.54552, 181.57919, 181.61421, 181.65012, 181.68695, 181.72267, 181.7587, 181.79526, 181.83344, 181.87288, 181.91354, 181.9543, 181.99518, 182.03568, 182.07515, 182.11353, 182.15218, 182.19164, 182.23108, 182.2708, 182.30989, 182.34795, 182.3871, 182.42479, 182.46089, 182.49536, 182.52867, 182.5638, 182.60063, 182.63989, 182.67992, 182.72049, 182.76151, 182.80296, 182.8448, 182.88582, 182.92665, 182.96825, 183.00778, 183.04619, 183.08208, 183.117, 183.15222, 183.18738, 183.22598, 183.2657, 183.30598, 183.34494, 183.38196, 183.41934, 183.45613, 183.49393, 183.53142, 183.56673, 183.60075, 183.63268, 183.66296, 183.69357, 183.7247, 183.76031, 183.79965, 183.83946, 183.87967, 183.91869, 183.95782, 183.99774, 184.03601, 184.07205, 184.10704, 184.14296, 184.17989, 184.21503, 184.24945, 184.28268, 184.31783, 184.35512, 184.39378, 184.43393, 184.47366, 184.51508, 184.55717, 184.59872, 184.64001, 184.68074, 184.71964, 184.75798, 184.79604, 184.83191, 184.86661, 184.90184, 184.9364, 184.96959, 185.00362, 185.0423, 185.08412, 185.12758, 185.17178, 185.21582, 185.26006, 185.30214, 185.34361, 185.3847, 185.42496, 185.46634, 185.50591, 185.54526, 185.58424, 185.62386, 185.6624, 185.7025, 185.74159, 185.78154, 185.82208, 185.86279, 185.90271, 185.94293, 185.98375, 186.0233, 186.05884, 186.09236, 186.12791, 186.16458, 186.20477, 186.24573, 186.28658, 186.32719, 186.36766, 186.40819, 186.44913, 186.48967, 186.53146, 186.57472, 186.61908, 186.66409, 186.70798, 186.75232, 186.79475, 186.83501, 186.8761, 186.91815, 186.96135, 187.00375, 187.04543, 187.08774, 187.13051, 187.17398, 187.21738, 187.26135, 187.30682, 187.3519, 187.39789, 187.44398, 187.48967, 187.53412, 187.57758, 187.62079, 187.66299, 187.70578, 187.74741, 187.79074, 187.83516, 187.8799, 187.92366, 187.9662, 188.00873, 188.0517, 188.09543, 188.13933, 188.183, 188.2269, 188.2719, 188.31848, 188.36552, 188.41412, 188.46288, 188.51031, 188.55696, 188.60126, 188.64514, 188.68958, 188.7356, 188.78317, 188.82912, 188.87651, 188.92406, 188.97069, 189.0186, 189.06526, 189.11108, 189.15532, 189.20073, 189.24802, 189.29507, 189.3419, 189.38878, 189.43637, 189.48433, 189.53323, 189.58208, 189.63031, 189.67888, 189.72659, 189.7742, 189.82292, 189.87331, 189.92422, 189.97572, 190.02654, 190.07675, 190.12685, 190.17654, 190.22655, 190.27744, 190.32918, 190.38191, 190.43228, 190.48412, 190.53688, 190.58897, 190.6412, 190.69144, 190.74126, 190.79027, 190.84029, 190.89107, 190.94135, 190.99312, 191.04454, 191.09538, 191.14601, 191.19763, 191.25024, 191.3022, 191.35342, 191.40527, 191.45781, 191.51038, 191.56477, 191.61903, 191.67284, 191.72745, 191.78351, 191.83809, 191.89211, 191.94516, 191.99768, 192.0515, 192.10683, 192.16144, 192.21646, 192.27127, 192.3248, 192.37834, 192.43166, 192.48701, 192.54335, 192.59961, 192.65665, 192.71281, 192.76929, 192.82428, 192.88118, 192.93932, 192.99641, 193.05295, 193.10945, 193.16679, 193.22235, 193.27766, 193.33466, 193.38956, 193.44543, 193.4995, 193.55339, 193.60861, 193.66547, 193.72427, 193.78304, 193.84152, 193.8996, 193.95851, 194.01683, 194.07661, 194.13618, 194.19662, 194.25862, 194.32071, 194.3831, 194.44382, 194.50331, 194.56212, 194.62186, 194.67973, 194.73642, 194.7941, 194.85469, 194.91579]}, "params-norm vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [179.95625, 179.95625, 179.95625, 179.95625, 179.95625, 179.95624, 179.95621, 179.95612, 179.95593, 179.95575, 179.95447, 179.9538, 179.95322, 179.95126, 179.95035, 179.94966, 179.94905, 179.94916, 179.94939, 179.94966, 179.9496, 179.94933, 179.94919, 179.94952, 179.95036, 179.95168, 179.95288, 179.95392, 179.95509, 179.9565, 179.95851, 179.96069, 179.963, 179.96532, 179.96788, 179.97118, 179.97482, 179.97873, 179.98279, 179.98714, 179.99208, 179.99753, 180.00325, 180.00955, 180.01634, 180.02382, 180.03171, 180.04016, 180.04951, 180.05969, 180.07059, 180.08221, 180.09441, 180.10721, 180.12059, 180.13457, 180.14899, 180.16373, 180.1792, 180.19586, 180.21344, 180.23199, 180.25226, 180.2733, 180.2948, 180.31709, 180.34032, 180.36464, 180.38991, 180.41573, 180.44231, 180.46947, 180.49721, 180.52528, 180.55406, 180.5829, 180.61168, 180.64125, 180.67117, 180.70154, 180.73244, 180.76378, 180.79633, 180.82928, 180.86198, 180.89581, 180.92958, 180.96359, 180.99808, 181.03401, 181.07187, 181.1104, 181.14795, 181.18536, 181.22249, 181.26071, 181.29898, 181.33658, 181.37422, 181.41164, 181.4467, 181.47968, 181.5123, 181.54552, 181.57919, 181.61421, 181.65012, 181.68695, 181.72267, 181.7587, 181.79526, 181.83344, 181.87288, 181.91354, 181.9543, 181.99518, 182.03568, 182.07515, 182.11353, 182.15218, 182.19164, 182.23108, 182.2708, 182.30989, 182.34795, 182.3871, 182.42479, 182.46089, 182.49536, 182.52867, 182.5638, 182.60063, 182.63989, 182.67992, 182.72049, 182.76151, 182.80296, 182.8448, 182.88582, 182.92665, 182.96825, 183.00778, 183.04619, 183.08208, 183.117, 183.15222, 183.18738, 183.22598, 183.2657, 183.30598, 183.34494, 183.38196, 183.41934, 183.45613, 183.49393, 183.53142, 183.56673, 183.60075, 183.63268, 183.66296, 183.69357, 183.7247, 183.76031, 183.79965, 183.83946, 183.87967, 183.91869, 183.95782, 183.99774, 184.03601, 184.07205, 184.10704, 184.14296, 184.17989, 184.21503, 184.24945, 184.28268, 184.31783, 184.35512, 184.39378, 184.43393, 184.47366, 184.51508, 184.55717, 184.59872, 184.64001, 184.68074, 184.71964, 184.75798, 184.79604, 184.83191, 184.86661, 184.90184, 184.9364, 184.96959, 185.00362, 185.0423, 185.08412, 185.12758, 185.17178, 185.21582, 185.26006, 185.30214, 185.34361, 185.3847, 185.42496, 185.46634, 185.50591, 185.54526, 185.58424, 185.62386, 185.6624, 185.7025, 185.74159, 185.78154, 185.82208, 185.86279, 185.90271, 185.94293, 185.98375, 186.0233, 186.05884, 186.09236, 186.12791, 186.16458, 186.20477, 186.24573, 186.28658, 186.32719, 186.36766, 186.40819, 186.44913, 186.48967, 186.53146, 186.57472, 186.61908, 186.66409, 186.70798, 186.75232, 186.79475, 186.83501, 186.8761, 186.91815, 186.96135, 187.00375, 187.04543, 187.08774, 187.13051, 187.17398, 187.21738, 187.26135, 187.30682, 187.3519, 187.39789, 187.44398, 187.48967, 187.53412, 187.57758, 187.62079, 187.66299, 187.70578, 187.74741, 187.79074, 187.83516, 187.8799, 187.92366, 187.9662, 188.00873, 188.0517, 188.09543, 188.13933, 188.183, 188.2269, 188.2719, 188.31848, 188.36552, 188.41412, 188.46288, 188.51031, 188.55696, 188.60126, 188.64514, 188.68958, 188.7356, 188.78317, 188.82912, 188.87651, 188.92406, 188.97069, 189.0186, 189.06526, 189.11108, 189.15532, 189.20073, 189.24802, 189.29507, 189.3419, 189.38878, 189.43637, 189.48433, 189.53323, 189.58208, 189.63031, 189.67888, 189.72659, 189.7742, 189.82292, 189.87331, 189.92422, 189.97572, 190.02654, 190.07675, 190.12685, 190.17654, 190.22655, 190.27744, 190.32918, 190.38191, 190.43228, 190.48412, 190.53688, 190.58897, 190.6412, 190.69144, 190.74126, 190.79027, 190.84029, 190.89107, 190.94135, 190.99312, 191.04454, 191.09538, 191.14601, 191.19763, 191.25024, 191.3022, 191.35342, 191.40527, 191.45781, 191.51038, 191.56477, 191.61903, 191.67284, 191.72745, 191.78351, 191.83809, 191.89211, 191.94516, 191.99768, 192.0515, 192.10683, 192.16144, 192.21646, 192.27127, 192.3248, 192.37834, 192.43166, 192.48701, 192.54335, 192.59961, 192.65665, 192.71281, 192.76929, 192.82428, 192.88118, 192.93932, 192.99641, 193.05295, 193.10945, 193.16679, 193.22235, 193.27766, 193.33466, 193.38956, 193.44543, 193.4995, 193.55339, 193.60861, 193.66547, 193.72427, 193.78304, 193.84152, 193.8996, 193.95851, 194.01683, 194.07661, 194.13618, 194.19662, 194.25862, 194.32071, 194.3831, 194.44382, 194.50331, 194.56212, 194.62186, 194.67973, 194.73642, 194.7941, 194.85469, 194.91579]}, "iteration-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [18.78556, 0.6433, 0.64729, 0.63688, 0.63863, 0.64094, 0.6349, 0.97491, 0.63959, 0.63938, 0.63992, 0.63559, 0.63842, 0.63697, 0.63738, 0.64112, 0.63959, 0.64348, 0.63705, 0.6364, 0.63918, 0.63292, 0.6437, 0.64018, 0.639, 0.63548, 0.63416, 0.64052, 0.6394, 0.64087, 0.93505, 0.64011, 0.63922, 0.63683, 0.63698, 0.63707, 0.63678, 0.63951, 0.63884, 0.63971, 0.64127, 0.63397, 0.63425, 0.63678, 0.64689, 0.63996, 0.6373, 0.63968, 0.63439, 0.63168, 0.63761, 0.63699, 0.63824, 0.71804, 0.64031, 0.63865, 0.64029, 0.63765, 0.63483, 0.63106, 0.64044, 0.64084, 0.64009, 0.63302, 0.63552, 0.634, 0.64042, 0.62983, 0.63367, 0.63643, 0.6354, 0.63829, 0.64059, 0.75259, 0.63372, 0.63627, 0.6387, 0.73904, 0.63828, 0.63771, 0.6359, 0.63693, 0.63456, 0.63441, 0.63425, 0.63785, 0.63673, 0.63659, 0.63691, 0.63886, 0.63666, 0.63099, 0.63434, 0.63606, 0.63766, 0.63693, 0.63641, 0.63421, 0.74335, 0.63417, 0.73325, 0.63333, 0.63749, 0.63466, 0.63579, 0.6328, 0.63166, 0.63446, 0.63178, 0.63147, 0.63478, 0.63778, 0.63144, 0.63332, 0.63409, 0.63176, 0.63302, 0.63438, 0.63574, 0.63649, 0.63622, 0.63188, 0.63339, 0.63517, 0.72118, 0.63229, 0.63429, 0.63655, 0.63599, 0.6353, 0.63271, 0.63372, 0.64125, 0.63512, 0.63455, 0.63532, 0.63725, 0.63591, 0.63729, 0.63999, 0.63638, 0.63338, 0.63695, 0.63822, 0.64221, 0.635, 0.63426, 0.63954, 0.63843, 0.75293, 0.63573, 0.63901, 0.63561, 0.63959, 0.6361, 0.63665, 0.64435, 0.63719, 0.63371, 0.63219, 0.6406, 0.64456, 0.63924, 0.635, 0.6327, 0.6352, 0.63564, 0.63957, 0.63877, 0.73034, 0.73934, 0.64019, 0.63815, 0.63937, 0.75337, 0.63669, 0.63936, 0.63737, 0.6461, 0.63756, 0.63312, 0.63542, 0.63878, 0.6388, 0.64047, 0.63637, 0.63586, 0.63666, 0.63721, 0.63734, 0.63786, 0.63594, 0.8184, 0.73163, 0.72764, 0.63564, 0.63408, 0.63622, 0.64045, 0.63686, 0.62364, 0.64914, 0.64308, 0.64069, 0.63927, 0.64269, 0.64288, 0.64533, 0.64376, 0.64236, 0.64125, 0.64212, 0.6369, 0.63583, 0.74464, 0.63698, 0.72591, 0.64074, 0.73419, 0.63849, 0.63726, 0.64412, 0.64282, 0.75083, 0.63592, 0.63941, 0.63766, 0.63791, 0.63977, 0.63509, 0.6399, 0.64297, 0.63884, 0.63671, 0.6435, 0.64374, 0.64843, 0.64579, 0.63861, 0.64594, 0.64077, 0.63925, 0.72846, 0.639, 0.64699, 0.6369, 0.63194, 0.63558, 0.64203, 0.63965, 0.63904, 0.63895, 0.63899, 0.64164, 0.63997, 0.63805, 0.63955, 0.63823, 0.64646, 0.64468, 0.64926, 0.64434, 0.6452, 0.64591, 0.64664, 0.63886, 0.731, 0.64411, 0.64842, 0.6425, 0.64476, 0.63269, 0.63913, 0.63471, 0.63896, 0.63597, 0.63778, 0.63815, 0.6401, 0.64693, 0.64595, 0.64455, 0.64718, 0.64189, 0.63449, 0.75535, 0.6495, 0.6344, 0.63238, 0.64302, 0.6447, 0.64478, 0.63878, 0.63865, 0.64385, 0.64709, 0.64475, 0.63872, 0.63717, 0.64047, 0.64341, 0.6397, 0.64191, 0.63957, 0.63403, 0.64098, 0.64479, 0.64926, 0.74478, 0.73898, 0.64632, 0.64647, 0.63797, 0.64641, 0.64397, 0.64203, 0.645, 0.64045, 0.64179, 0.64038, 0.64201, 0.64156, 0.64501, 0.64116, 0.63858, 0.63331, 0.63441, 0.63583, 0.64119, 0.6353, 0.63464, 0.63359, 0.63663, 0.64109, 0.6316, 0.63418, 0.63702, 0.63806, 0.64097, 0.63561, 0.63886, 0.63666, 0.63662, 0.64007, 0.64226, 0.64759, 0.64499, 0.6441, 0.63331, 0.63366, 0.63388, 0.64218, 0.6449, 0.7739, 0.64344, 0.64344, 0.64738, 0.64398, 0.64107, 0.64511, 0.64245, 0.64068, 0.6375, 0.63653, 0.63463, 0.63795, 0.64039, 0.6391, 0.63754, 0.63814, 0.64098, 0.63698, 0.63569, 0.63797, 0.63695, 0.64036, 0.63449, 0.63592, 0.72519, 0.64273, 0.63744, 0.63929, 0.63719, 0.64021, 0.64007, 0.63925, 0.63833, 0.63918, 0.63915, 0.64067, 0.64172, 0.63687, 0.63877, 0.63737, 0.64309, 0.6455, 0.64316, 0.63731, 0.6383, 0.63962]}, "lm loss validation": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [5.60423]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [5.60423]}, "lm loss validation ppl": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [271.57376]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [271.57376]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values_dev.json new file mode 100644 index 000000000..f2cc2651b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values_dev.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.8833, + 10.90234, + 10.8867, + 10.83313, + 10.67611, + 10.64923, + 10.43399, + 10.15135, + 9.93913, + 9.84138, + 9.58862, + 9.85447, + 9.88459, + 9.62945, + 9.78806, + 9.51139, + 9.45835, + 9.64919, + 9.38616, + 9.33214, + 9.24217, + 9.14552, + 9.17556, + 8.99549, + 9.18942, + 9.06, + 9.15557, + 9.16494, + 9.29777, + 8.98447, + 8.9291, + 9.0438, + 9.04302, + 8.65501, + 8.71714, + 8.75345, + 8.68366, + 8.73437, + 8.65884, + 8.76497, + 8.66083, + 8.84974, + 8.83206, + 8.49923, + 8.38904, + 8.43157, + 8.49322, + 8.38452, + 8.43264, + 8.57965, + 8.36711, + 8.19222, + 8.22606, + 8.22221, + 8.26779, + 7.91377, + 8.09628, + 7.89164, + 8.2472, + 8.23126, + 8.00591, + 7.9665, + 7.91908, + 7.74099, + 7.7407, + 7.64366, + 7.51608, + 7.90725, + 7.6987, + 7.45218, + 7.74074, + 7.76788, + 7.54126, + 7.29845, + 7.45178, + 7.3355, + 7.46213, + 7.22379, + 7.63678, + 7.27944, + 7.35187, + 7.21324, + 7.21605, + 7.42279, + 7.17674, + 7.28039, + 7.00049, + 7.00348, + 7.0378, + 7.13559, + 6.8226, + 6.98478, + 7.08778, + 7.00054, + 6.87352, + 6.7548, + 6.98975, + 7.05529, + 6.70191, + 6.57996, + 6.72276, + 6.73919, + 6.73242, + 6.73508, + 6.65475, + 6.40522, + 6.63735, + 6.61784, + 6.44466, + 6.62795, + 6.74118, + 6.60668, + 6.72226, + 6.69283, + 6.62263, + 6.50666, + 6.59776, + 6.40564, + 6.66354, + 6.24776, + 6.2498, + 6.30069, + 6.38858, + 6.34831, + 6.45112, + 6.29344, + 6.33922, + 6.23941, + 6.20371, + 6.40027, + 6.32848, + 6.32525, + 6.17126, + 6.1643, + 6.2454, + 6.39032, + 6.20693, + 6.15596, + 6.18982, + 6.12202, + 6.07039, + 6.07971, + 6.26493, + 6.41807, + 6.26721, + 6.30841, + 6.10624, + 6.18818, + 6.01112, + 6.03436, + 5.96365, + 6.25335, + 6.19771, + 5.97183, + 5.78965, + 6.12772, + 5.85318, + 6.10697, + 5.79207, + 6.16231, + 6.14778, + 6.08858, + 5.93222, + 6.11354, + 5.94235, + 6.19392, + 5.89409, + 5.79284, + 5.77325, + 5.68417, + 6.01344, + 5.99765, + 6.06104, + 5.88062, + 6.03537, + 5.96403, + 5.99065, + 5.98597, + 5.9429, + 5.83537, + 5.94528, + 5.61064, + 5.69396, + 5.88331, + 5.83611, + 5.8572, + 5.75616, + 5.8315, + 5.72086, + 5.55559, + 5.71476, + 5.62107, + 5.82784, + 5.59614, + 5.70294, + 5.70926, + 5.89205, + 5.63787, + 5.84442, + 5.73328, + 5.86482, + 5.32391, + 5.88991, + 5.86664, + 5.84821, + 5.40773, + 5.40279, + 5.6189, + 5.58915, + 5.47606, + 5.56698, + 5.66844, + 5.46942, + 5.73811, + 5.50571, + 5.58896, + 5.61865, + 5.61286, + 5.50477, + 5.60628, + 5.66565, + 5.69156, + 5.58829, + 5.65549, + 5.3707, + 5.67705, + 5.62292, + 5.41672, + 5.5855, + 5.62763, + 5.55004, + 5.33605, + 5.5357, + 5.48154, + 5.47891, + 5.37306, + 5.55395, + 5.59949, + 5.38543, + 5.52273, + 5.48203, + 5.3275, + 5.50172, + 5.40512, + 5.4376, + 5.31466, + 5.06074, + 5.47521, + 5.56277, + 5.70758, + 5.41112, + 5.59472, + 5.62927, + 5.23143, + 5.26976, + 5.39082, + 5.38949, + 5.32381, + 5.49509, + 5.18131, + 5.29884, + 5.24876, + 5.37339, + 5.25697, + 5.44221, + 5.53619, + 5.30996, + 5.43641, + 5.33417, + 5.06948, + 5.3127, + 5.25169, + 5.30028, + 5.10715, + 5.2724, + 5.26524, + 5.46862, + 5.15665, + 5.26598, + 5.20649, + 5.35982, + 4.98371, + 4.91206, + 5.31959, + 5.38874, + 5.22559, + 5.31589, + 5.1, + 5.15578, + 5.25723, + 5.065, + 5.26354, + 5.07334, + 5.33639, + 5.24541, + 5.15041, + 5.24112, + 5.03819, + 5.31, + 5.0477, + 5.02146, + 5.13877, + 5.10876, + 5.26714, + 5.14932, + 5.27649, + 5.0965, + 5.09542, + 5.24706, + 5.31762, + 5.25262, + 5.18876, + 5.13842, + 5.28319, + 4.94386, + 5.20599, + 5.08696, + 5.29641, + 5.1744, + 5.18255, + 5.10891, + 4.98033, + 4.99108, + 5.21829, + 5.31066, + 5.09636, + 5.05054, + 4.91569, + 5.12013, + 5.11714, + 4.92205, + 5.33319, + 5.02061, + 5.09671, + 5.15803, + 4.99994, + 5.0584, + 5.06511, + 4.98874, + 5.0743, + 5.15696, + 4.97546, + 5.17775, + 4.92623, + 4.91526, + 5.06578, + 4.98937, + 4.90649, + 4.77326, + 4.94086, + 5.1121, + 5.01488, + 5.01357, + 5.32596, + 4.95425, + 4.99115, + 5.0419, + 4.80405, + 4.73491, + 4.9946, + 5.03423, + 4.87011, + 4.94783, + 5.04177, + 5.02083, + 4.81039, + 4.88762, + 4.90025, + 4.8257, + 4.74307, + 5.00644, + 4.74731, + 5.20296, + 4.78234, + 4.98845, + 4.73187, + 4.78111, + 4.81624, + 4.64753, + 4.65382, + 4.83884, + 4.80187, + 4.79782, + 4.91858, + 4.87993, + 4.92242, + 4.7636, + 4.87789, + 4.73001, + 4.90747, + 4.95247, + 4.87195, + 4.70431, + 4.77676, + 4.89474, + 4.70621, + 4.85602, + 4.68499, + 4.68274, + 4.64493 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 86.0, + 65.0, + 73.0, + 73.0, + 63.0, + 79.0, + 89.0, + 101.0, + 111.0, + 114.0, + 120.0, + 130.0, + 146.0, + 151.0, + 186.0, + 176.0, + 158.0, + 185.0, + 193.0, + 154.0, + 152.0, + 162.0, + 215.0, + 192.0, + 212.0, + 153.0, + 177.0, + 162.0, + 152.0, + 166.0, + 157.0, + 177.0, + 124.0, + 172.0, + 160.0, + 155.0, + 166.0, + 189.0, + 180.0, + 206.0, + 200.0, + 165.0, + 175.0, + 186.0, + 176.0, + 183.0, + 210.0, + 187.0, + 205.0, + 245.0, + 226.0, + 175.0, + 186.0, + 163.0, + 175.0, + 207.0, + 167.0, + 137.0, + 265.0, + 259.0, + 187.0, + 185.0, + 194.0, + 173.0, + 204.0, + 254.0, + 212.0, + 218.0, + 212.0, + 228.0, + 242.0, + 261.0, + 198.0, + 226.0, + 204.0, + 204.0, + 257.0, + 207.0, + 273.0, + 231.0, + 237.0, + 222.0, + 180.0, + 234.0, + 254.0, + 226.0, + 221.0, + 194.0, + 233.0, + 188.0, + 190.0, + 215.0, + 234.0, + 212.0, + 214.0, + 162.0, + 213.0, + 214.0, + 173.0, + 130.0, + 192.0, + 183.0, + 184.0, + 150.0, + 162.0, + 148.0, + 167.0, + 133.0, + 145.0, + 190.0, + 173.0, + 194.0, + 181.0, + 174.0, + 141.0, + 129.0, + 160.0, + 131.0, + 201.0, + 153.0, + 148.0, + 141.0, + 134.0, + 155.0, + 121.0, + 99.0, + 131.0, + 121.0, + 132.0, + 144.0, + 144.0, + 137.0, + 154.0, + 113.0, + 129.0, + 130.0, + 162.0, + 109.0, + 92.0, + 124.0, + 112.0, + 117.0, + 122.0, + 96.0, + 121.0, + 120.0, + 109.0, + 130.0, + 122.0, + 141.0, + 133.0, + 105.0, + 103.0, + 131.0, + 107.0, + 120.0, + 122.0, + 101.0, + 119.0, + 124.0, + 131.0, + 116.0, + 117.0, + 150.0, + 121.0, + 112.0, + 124.0, + 96.0, + 127.0, + 103.0, + 92.0, + 105.0, + 103.0, + 124.0, + 119.0, + 108.0, + 82.0, + 110.0, + 93.0, + 105.0, + 124.0, + 126.0, + 115.0, + 125.0, + 93.0, + 99.0, + 96.0, + 103.0, + 86.0, + 86.0, + 130.0, + 97.0, + 121.0, + 114.0, + 113.0, + 112.0, + 100.0, + 106.0, + 113.0, + 105.0, + 106.0, + 105.0, + 110.0, + 135.0, + 116.0, + 90.0, + 95.0, + 88.0, + 131.0, + 113.0, + 116.0, + 101.0, + 109.0, + 119.0, + 87.0, + 91.0, + 107.0, + 103.0, + 99.0, + 94.0, + 116.0, + 58.0, + 90.0, + 95.0, + 106.0, + 98.0, + 120.0, + 113.0, + 106.0, + 90.0, + 122.0, + 98.0, + 92.0, + 119.0, + 122.0, + 120.0, + 110.0, + 111.0, + 106.0, + 95.0, + 120.0, + 119.0, + 115.0, + 119.0, + 106.0, + 95.0, + 108.0, + 119.0, + 116.0, + 102.0, + 121.0, + 103.0, + 124.0, + 116.0, + 99.0, + 77.0, + 107.0, + 98.0, + 81.0, + 108.0, + 106.0, + 88.0, + 122.0, + 86.0, + 89.0, + 98.0, + 114.0, + 109.0, + 122.0, + 119.0, + 110.0, + 115.0, + 91.0, + 133.0, + 114.0, + 106.0, + 114.0, + 115.0, + 122.0, + 127.0, + 91.0, + 85.0, + 101.0, + 89.0, + 97.0, + 106.0, + 120.0, + 85.0, + 98.0, + 94.0, + 109.0, + 98.0, + 106.0, + 119.0, + 97.0, + 80.0, + 95.0, + 103.0, + 107.0, + 102.0, + 134.0, + 107.0, + 117.0, + 123.0, + 102.0, + 105.0, + 97.0, + 108.0, + 134.0, + 113.0, + 93.0, + 118.0, + 101.0, + 94.0, + 123.0, + 109.0, + 104.0, + 120.0, + 109.0, + 136.0, + 102.0, + 98.0, + 77.0, + 105.0, + 120.0, + 94.0, + 106.0, + 109.0, + 89.0, + 103.0, + 137.0, + 111.0, + 96.0, + 125.0, + 138.0, + 99.0, + 142.0, + 107.0, + 107.0, + 95.0, + 124.0, + 117.0, + 142.0, + 123.0, + 124.0, + 97.0, + 110.0, + 91.0, + 131.0, + 115.0, + 106.0, + 102.0, + 120.0, + 114.0, + 117.0, + 102.0, + 116.0, + 126.0, + 105.0, + 100.0, + 107.0, + 114.0, + 118.0, + 101.0, + 109.0, + 112.0, + 99.0, + 97.0, + 114.0, + 107.0, + 127.0, + 119.0, + 121.0, + 107.0, + 120.0, + 119.0, + 102.0, + 110.0, + 116.0, + 107.0, + 117.0, + 117.0, + 121.0, + 130.0, + 128.0, + 102.0, + 126.0, + 115.0, + 114.0, + 119.0, + 128.0, + 112.0, + 98.0, + 141.0, + 109.0, + 103.0, + 106.0, + 114.0, + 122.0, + 121.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 13.16275, + 0.4518, + 0.44557, + 0.45576, + 0.45722, + 0.44122, + 0.44896, + 0.44797, + 0.45127, + 0.44355, + 0.44203, + 0.44107, + 0.44753, + 0.44562, + 0.44125, + 0.44515, + 0.67142, + 0.44532, + 0.46026, + 0.44572, + 0.44982, + 0.44886, + 0.44864, + 0.44877, + 0.44648, + 0.4424, + 0.44248, + 0.44394, + 0.44792, + 0.44757, + 0.45034, + 0.44906, + 0.458, + 0.4431, + 0.44402, + 0.44226, + 0.44968, + 0.44244, + 0.43928, + 0.45458, + 0.44414, + 0.44266, + 0.44257, + 0.44323, + 0.44374, + 0.44748, + 0.44303, + 0.4441, + 0.44285, + 0.44733, + 0.44378, + 0.44354, + 0.4399, + 0.44097, + 0.44394, + 0.4429, + 0.44266, + 0.44164, + 0.44233, + 0.44097, + 0.43971, + 0.6223, + 0.44021, + 0.43751, + 0.44529, + 0.43738, + 0.43829, + 0.4386, + 0.43992, + 0.43998, + 0.43889, + 0.43767, + 0.43834, + 0.43759, + 0.43777, + 0.43857, + 0.43711, + 0.43941, + 0.43784, + 0.44083, + 0.43811, + 0.43937, + 0.44198, + 0.44123, + 0.44152, + 0.44023, + 0.44153, + 0.44214, + 0.4395, + 0.44473, + 0.44356, + 0.44158, + 0.44242, + 0.4424, + 0.4404, + 0.44416, + 0.44469, + 0.44324, + 0.44225, + 0.43921, + 0.44046, + 0.61905, + 0.4415, + 0.44022, + 0.44161, + 0.44571, + 0.44336, + 0.44323, + 0.4464, + 0.45359, + 0.44064, + 0.44296, + 0.44293, + 0.44022, + 0.44093, + 0.44096, + 0.44293, + 0.44476, + 0.44293, + 0.44493, + 0.44441, + 0.44481, + 0.44206, + 0.44245, + 0.44282, + 0.44194, + 0.4442, + 0.44265, + 0.44176, + 0.44137, + 0.44235, + 0.4394, + 0.43896, + 0.44163, + 0.44138, + 0.44107, + 0.44214, + 0.44424, + 0.44448, + 0.44264, + 0.4416, + 0.44032, + 0.43985, + 0.43852, + 0.4412, + 0.43765, + 0.43824, + 0.43891, + 0.44181, + 0.43809, + 0.78158, + 0.62586, + 0.44007, + 0.44167, + 0.44119, + 0.44323, + 0.44293, + 0.44258, + 0.44257, + 0.44383, + 0.44055, + 0.44274, + 0.44198, + 0.44248, + 0.44257, + 0.44076, + 0.44018, + 0.44336, + 0.44473, + 0.44424, + 0.4397, + 0.44067, + 0.44098, + 0.43695, + 0.43881, + 0.43582, + 0.43518, + 0.43505, + 0.43754, + 0.43588, + 0.43662, + 0.43699, + 0.43687, + 0.43919, + 0.43661, + 0.43689, + 0.43479, + 0.43653, + 0.43585, + 0.43678, + 0.43698, + 0.43872, + 0.43736, + 0.43695, + 0.43692, + 0.6126, + 0.43542, + 0.60845, + 0.43535, + 0.43582, + 0.44167, + 0.44049, + 0.44041, + 0.43948, + 0.43837, + 0.4451, + 0.44758, + 0.43922, + 0.43796, + 0.43914, + 0.43744, + 0.43686, + 0.43836, + 0.43649, + 0.43807, + 0.43912, + 0.43758, + 0.43832, + 0.43758, + 0.43794, + 0.43713, + 0.436, + 0.43768, + 0.47048, + 0.43956, + 0.4375, + 0.43873, + 0.4394, + 0.43764, + 0.43801, + 0.44127, + 0.44216, + 0.4391, + 0.43815, + 0.43822, + 0.43702, + 0.43794, + 0.61667, + 0.44311, + 0.43731, + 0.43777, + 0.43921, + 0.43875, + 0.44131, + 0.44003, + 0.4415, + 0.43932, + 0.43866, + 0.43727, + 0.43777, + 0.43796, + 0.43822, + 0.44556, + 0.44349, + 0.4382, + 0.44057, + 0.44268, + 0.4425, + 0.43738, + 0.43736, + 0.43793, + 0.43862, + 0.43893, + 0.43846, + 0.43905, + 0.43842, + 0.43863, + 0.43678, + 0.43877, + 0.43998, + 0.43905, + 0.43837, + 0.44205, + 0.43732, + 0.43694, + 0.43718, + 0.43541, + 0.44457, + 0.469, + 0.44256, + 0.44183, + 0.44406, + 0.44573, + 0.44202, + 0.44479, + 0.43977, + 0.45002, + 0.45362, + 0.45377, + 0.45436, + 0.44253, + 0.44457, + 0.45383, + 0.45596, + 0.45261, + 0.4516, + 0.45161, + 0.45303, + 0.43464, + 0.43652, + 0.44758, + 0.44901, + 0.44729, + 0.45325, + 0.44638, + 0.43862, + 0.4353, + 0.44012, + 0.44375, + 0.44691, + 0.44508, + 0.44783, + 0.44662, + 0.45161, + 0.43977, + 0.43968, + 0.4409, + 0.44272, + 0.44165, + 0.4453, + 0.4461, + 0.44635, + 0.44321, + 0.43877, + 0.44548, + 0.44124, + 0.44386, + 0.44185, + 0.43882, + 0.43874, + 0.61671, + 0.44295, + 0.4451, + 0.43869, + 0.44223, + 0.43833, + 0.44469, + 0.44476, + 0.44294, + 0.44362, + 0.4417, + 0.44045, + 0.44113, + 0.44174, + 0.4438, + 0.44235, + 0.44348, + 0.44315, + 0.44249, + 0.43979, + 0.43901, + 0.43734, + 0.43836, + 0.43776, + 0.44259, + 0.43817, + 0.4403, + 0.43646, + 0.43628, + 0.43735, + 0.43576, + 0.43537, + 0.43519, + 0.43657, + 0.4395, + 0.44075, + 0.4379, + 0.43864, + 0.43931, + 0.43933, + 0.43914, + 0.43998, + 0.60863, + 0.44024, + 0.44234, + 0.61444, + 0.4406, + 0.44103, + 0.44089, + 0.43894, + 0.43643, + 0.43311, + 0.43426, + 0.43504, + 0.43528, + 0.43329, + 0.43387, + 0.43408, + 0.43608, + 0.43761, + 0.43604, + 0.43664, + 0.44061, + 0.43728, + 0.4362, + 0.43852, + 0.4395, + 0.44056, + 0.43729, + 0.4387 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values_lts.json new file mode 100644 index 000000000..01ae9fa60 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/golden_values_lts.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.88328, + 10.90257, + 10.88663, + 10.83293, + 10.67628, + 10.64935, + 10.43401, + 10.15135, + 9.93919, + 9.84145, + 9.5886, + 9.85443, + 9.88471, + 9.6295, + 9.78811, + 9.51135, + 9.45833, + 9.64922, + 9.3861, + 9.33215, + 9.24219, + 9.14551, + 9.17554, + 8.99539, + 9.18938, + 9.05997, + 9.15548, + 9.16492, + 9.29764, + 8.98435, + 8.92898, + 9.04372, + 9.04285, + 8.65475, + 8.71696, + 8.75327, + 8.68353, + 8.73425, + 8.65866, + 8.7648, + 8.66088, + 8.84978, + 8.83233, + 8.49954, + 8.38931, + 8.43182, + 8.49351, + 8.38471, + 8.43278, + 8.57978, + 8.36719, + 8.19226, + 8.22606, + 8.22217, + 8.26751, + 7.91344, + 8.09563, + 7.89094, + 8.24624, + 8.23026, + 8.00472, + 7.96522, + 7.91788, + 7.7397, + 7.73956, + 7.64272, + 7.5154, + 7.90678, + 7.6983, + 7.45188, + 7.7404, + 7.76772, + 7.54129, + 7.29853, + 7.45244, + 7.33556, + 7.46205, + 7.2239, + 7.63657, + 7.27934, + 7.35205, + 7.21344, + 7.2184, + 7.42314, + 7.17762, + 7.28364, + 7.00217, + 7.00609, + 7.04135, + 7.14062, + 6.82539, + 6.98709, + 7.08964, + 7.00127, + 6.87463, + 6.75505, + 6.98955, + 7.05522, + 6.70122, + 6.57704, + 6.7241, + 6.73883, + 6.73084, + 6.73626, + 6.65691, + 6.40601, + 6.6385, + 6.61945, + 6.44599, + 6.62978, + 6.7427, + 6.60925, + 6.72472, + 6.69413, + 6.62417, + 6.50597, + 6.59855, + 6.40573, + 6.66284, + 6.24739, + 6.24997, + 6.30097, + 6.388, + 6.34802, + 6.45034, + 6.28816, + 6.33919, + 6.23671, + 6.20179, + 6.39922, + 6.32737, + 6.32553, + 6.17013, + 6.16365, + 6.24434, + 6.39029, + 6.20574, + 6.15527, + 6.18471, + 6.1222, + 6.07029, + 6.07979, + 6.26575, + 6.41726, + 6.26706, + 6.30954, + 6.10595, + 6.18734, + 6.00692, + 6.03492, + 5.96423, + 6.2551, + 6.19408, + 5.97048, + 5.78933, + 6.12844, + 5.85507, + 6.10685, + 5.79224, + 6.16384, + 6.15379, + 6.09028, + 5.93344, + 6.11618, + 5.94755, + 6.19909, + 5.89849, + 5.79479, + 5.78215, + 5.68723, + 6.01666, + 5.99873, + 6.06846, + 5.89225, + 6.04309, + 5.97331, + 5.99586, + 5.98785, + 5.9482, + 5.83937, + 5.9539, + 5.61502, + 5.699, + 5.88897, + 5.84054, + 5.86112, + 5.75936, + 5.8375, + 5.72064, + 5.55646, + 5.71958, + 5.62394, + 5.82954, + 5.59832, + 5.70553, + 5.71488, + 5.89528, + 5.63976, + 5.84631, + 5.73496, + 5.86743, + 5.32607, + 5.8903, + 5.86889, + 5.85006, + 5.40738, + 5.40549, + 5.61986, + 5.59188, + 5.48192, + 5.57349, + 5.66996, + 5.47178, + 5.74017, + 5.5091, + 5.5953, + 5.62066, + 5.61598, + 5.50824, + 5.60964, + 5.66876, + 5.67788, + 5.58421, + 5.65722, + 5.37016, + 5.67677, + 5.62454, + 5.41705, + 5.58431, + 5.62542, + 5.551, + 5.33804, + 5.5352, + 5.48161, + 5.4792, + 5.37255, + 5.55166, + 5.59953, + 5.38742, + 5.52882, + 5.48399, + 5.32717, + 5.50198, + 5.40392, + 5.43702, + 5.3136, + 5.06117, + 5.47389, + 5.56557, + 5.70853, + 5.41216, + 5.59341, + 5.63164, + 5.23055, + 5.27033, + 5.38841, + 5.39231, + 5.32637, + 5.49634, + 5.17964, + 5.29868, + 5.24799, + 5.37548, + 5.25701, + 5.44548, + 5.5335, + 5.31052, + 5.43683, + 5.3353, + 5.07101, + 5.31399, + 5.25159, + 5.30391, + 5.10938, + 5.27301, + 5.26584, + 5.47183, + 5.15833, + 5.26797, + 5.2042, + 5.35548, + 4.98018, + 4.91368, + 5.31818, + 5.38695, + 5.2229, + 5.31671, + 5.10441, + 5.157, + 5.26026, + 5.0625, + 5.25998, + 5.07253, + 5.3394, + 5.24357, + 5.1487, + 5.23894, + 5.03446, + 5.31002, + 5.04729, + 5.02048, + 5.13726, + 5.10974, + 5.26597, + 5.14767, + 5.27512, + 5.09179, + 5.09166, + 5.24809, + 5.31963, + 5.24883, + 5.18566, + 5.13848, + 5.28494, + 4.94428, + 5.20203, + 5.08707, + 5.2953, + 5.17219, + 5.18368, + 5.10813, + 4.97968, + 4.98627, + 5.21879, + 5.30748, + 5.09449, + 5.05013, + 4.90918, + 5.1167, + 5.11153, + 4.92276, + 5.33502, + 5.01879, + 5.09746, + 5.15679, + 5.00133, + 5.05827, + 5.0642, + 4.99125, + 5.07529, + 5.15683, + 4.97325, + 5.18006, + 4.92846, + 4.91522, + 5.06502, + 4.98714, + 4.90587, + 4.76968, + 4.93606, + 5.10905, + 5.01253, + 5.01189, + 5.32285, + 4.95232, + 4.98602, + 5.03643, + 4.79932, + 4.73082, + 4.98974, + 5.03227, + 4.869, + 4.94652, + 5.03569, + 5.01991, + 4.80827, + 4.8843, + 4.90063, + 4.82504, + 4.74012, + 5.00614, + 4.74848, + 5.20476, + 4.78042, + 4.98499, + 4.73025, + 4.7785, + 4.81295, + 4.64494, + 4.65243, + 4.83669, + 4.8024, + 4.79669, + 4.91921, + 4.87673, + 4.91715, + 4.76372, + 4.87698, + 4.72822, + 4.90557, + 4.95497, + 4.8678, + 4.70245, + 4.77753, + 4.89528, + 4.70375, + 4.8549, + 4.68367, + 4.68022, + 4.64383 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 73.0, + 74.0, + 89.0, + 69.0, + 80.0, + 81.0, + 114.0, + 120.0, + 136.0, + 153.0, + 132.0, + 143.0, + 138.0, + 166.0, + 183.0, + 152.0, + 149.0, + 170.0, + 167.0, + 164.0, + 173.0, + 182.0, + 184.0, + 196.0, + 177.0, + 176.0, + 223.0, + 188.0, + 191.0, + 163.0, + 168.0, + 143.0, + 156.0, + 162.0, + 162.0, + 141.0, + 176.0, + 203.0, + 169.0, + 205.0, + 142.0, + 165.0, + 143.0, + 172.0, + 177.0, + 173.0, + 201.0, + 208.0, + 179.0, + 206.0, + 233.0, + 183.0, + 204.0, + 136.0, + 161.0, + 206.0, + 173.0, + 168.0, + 219.0, + 264.0, + 191.0, + 180.0, + 185.0, + 177.0, + 187.0, + 250.0, + 225.0, + 175.0, + 235.0, + 183.0, + 228.0, + 253.0, + 184.0, + 214.0, + 206.0, + 216.0, + 273.0, + 223.0, + 279.0, + 243.0, + 277.0, + 232.0, + 223.0, + 213.0, + 232.0, + 183.0, + 193.0, + 226.0, + 226.0, + 198.0, + 212.0, + 211.0, + 229.0, + 210.0, + 220.0, + 188.0, + 216.0, + 189.0, + 182.0, + 190.0, + 153.0, + 170.0, + 180.0, + 173.0, + 139.0, + 137.0, + 158.0, + 153.0, + 131.0, + 185.0, + 187.0, + 148.0, + 178.0, + 153.0, + 149.0, + 126.0, + 169.0, + 112.0, + 166.0, + 167.0, + 188.0, + 146.0, + 137.0, + 138.0, + 126.0, + 118.0, + 127.0, + 139.0, + 133.0, + 142.0, + 143.0, + 105.0, + 131.0, + 128.0, + 154.0, + 108.0, + 163.0, + 113.0, + 113.0, + 103.0, + 110.0, + 113.0, + 98.0, + 122.0, + 156.0, + 119.0, + 129.0, + 148.0, + 133.0, + 119.0, + 97.0, + 97.0, + 129.0, + 129.0, + 120.0, + 101.0, + 108.0, + 146.0, + 113.0, + 136.0, + 90.0, + 121.0, + 130.0, + 125.0, + 87.0, + 103.0, + 105.0, + 130.0, + 102.0, + 122.0, + 139.0, + 106.0, + 108.0, + 96.0, + 132.0, + 98.0, + 115.0, + 135.0, + 116.0, + 119.0, + 102.0, + 126.0, + 146.0, + 111.0, + 127.0, + 135.0, + 126.0, + 106.0, + 114.0, + 118.0, + 113.0, + 87.0, + 126.0, + 87.0, + 113.0, + 84.0, + 126.0, + 131.0, + 121.0, + 93.0, + 121.0, + 116.0, + 112.0, + 102.0, + 112.0, + 111.0, + 107.0, + 80.0, + 114.0, + 100.0, + 111.0, + 99.0, + 112.0, + 127.0, + 109.0, + 83.0, + 108.0, + 118.0, + 109.0, + 102.0, + 104.0, + 140.0, + 108.0, + 115.0, + 110.0, + 112.0, + 112.0, + 130.0, + 89.0, + 113.0, + 129.0, + 91.0, + 92.0, + 95.0, + 99.0, + 97.0, + 105.0, + 93.0, + 126.0, + 78.0, + 105.0, + 115.0, + 98.0, + 104.0, + 111.0, + 95.0, + 110.0, + 109.0, + 107.0, + 123.0, + 111.0, + 95.0, + 130.0, + 110.0, + 107.0, + 96.0, + 96.0, + 116.0, + 101.0, + 116.0, + 94.0, + 91.0, + 126.0, + 97.0, + 96.0, + 111.0, + 131.0, + 104.0, + 112.0, + 123.0, + 108.0, + 109.0, + 96.0, + 113.0, + 116.0, + 124.0, + 91.0, + 106.0, + 108.0, + 105.0, + 97.0, + 96.0, + 96.0, + 112.0, + 115.0, + 107.0, + 120.0, + 74.0, + 108.0, + 100.0, + 98.0, + 87.0, + 115.0, + 92.0, + 94.0, + 111.0, + 109.0, + 110.0, + 111.0, + 106.0, + 133.0, + 101.0, + 110.0, + 121.0, + 98.0, + 121.0, + 89.0, + 106.0, + 111.0, + 112.0, + 116.0, + 121.0, + 92.0, + 103.0, + 115.0, + 102.0, + 102.0, + 112.0, + 109.0, + 104.0, + 131.0, + 103.0, + 99.0, + 88.0, + 131.0, + 107.0, + 105.0, + 116.0, + 111.0, + 107.0, + 109.0, + 92.0, + 114.0, + 110.0, + 131.0, + 89.0, + 131.0, + 121.0, + 107.0, + 113.0, + 122.0, + 127.0, + 118.0, + 113.0, + 123.0, + 99.0, + 113.0, + 95.0, + 139.0, + 133.0, + 115.0, + 103.0, + 135.0, + 140.0, + 121.0, + 116.0, + 112.0, + 108.0, + 109.0, + 120.0, + 121.0, + 139.0, + 131.0, + 121.0, + 99.0, + 129.0, + 128.0, + 112.0, + 111.0, + 113.0, + 108.0, + 125.0, + 104.0, + 125.0, + 108.0, + 132.0, + 113.0, + 131.0, + 106.0, + 105.0, + 127.0, + 103.0, + 112.0, + 126.0, + 138.0, + 122.0, + 93.0, + 117.0, + 108.0, + 131.0, + 109.0, + 119.0, + 116.0, + 112.0, + 117.0, + 110.0, + 113.0, + 117.0, + 123.0, + 129.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 23.51382, + 0.5342, + 0.5223, + 0.51941, + 0.51915, + 0.51817, + 0.52172, + 0.52039, + 1.09879, + 0.51789, + 0.53759, + 0.51941, + 0.51486, + 0.52394, + 0.55868, + 0.51619, + 0.51826, + 0.52072, + 0.5198, + 0.52296, + 0.51791, + 0.5175, + 0.51812, + 0.5142, + 0.5147, + 0.51385, + 0.51787, + 0.51458, + 0.51261, + 0.5194, + 0.52308, + 0.52639, + 0.51319, + 0.51711, + 0.51849, + 0.52014, + 0.51598, + 0.51621, + 0.51753, + 0.51941, + 0.52438, + 0.51841, + 0.52176, + 0.51987, + 0.53153, + 0.51724, + 0.51824, + 0.51341, + 0.51785, + 0.51604, + 0.51734, + 0.51496, + 0.51307, + 0.5287, + 0.51388, + 0.52769, + 0.52046, + 0.64635, + 0.51593, + 0.51775, + 0.52198, + 0.51714, + 0.52393, + 0.54984, + 0.53386, + 0.52318, + 0.53634, + 0.51966, + 0.51953, + 0.52496, + 0.52002, + 0.52185, + 0.52079, + 0.51802, + 0.51931, + 0.52004, + 0.52012, + 0.52253, + 0.56766, + 0.52277, + 0.51891, + 0.52244, + 0.77939, + 0.52675, + 0.52298, + 0.52169, + 0.54141, + 0.51931, + 0.52167, + 0.52006, + 0.52623, + 0.52106, + 0.52152, + 0.51996, + 0.52123, + 0.52206, + 0.52184, + 0.5221, + 0.52339, + 0.5196, + 0.52264, + 0.56193, + 0.51873, + 0.51733, + 0.52052, + 0.52492, + 0.51965, + 0.9034, + 0.52445, + 0.52113, + 0.52863, + 0.52107, + 0.53136, + 0.53476, + 0.52098, + 0.51906, + 0.52323, + 0.52001, + 0.52096, + 0.51763, + 0.52786, + 0.51903, + 0.51973, + 0.51829, + 0.52265, + 0.53926, + 0.52064, + 0.52148, + 0.51749, + 0.52273, + 0.5196, + 0.64915, + 0.52709, + 0.52382, + 0.52177, + 0.52138, + 0.51704, + 0.52011, + 0.5235, + 0.52066, + 0.5224, + 0.5223, + 0.52268, + 0.5202, + 0.52043, + 0.52099, + 0.51814, + 0.51833, + 0.52443, + 0.51872, + 0.5226, + 0.51996, + 0.5247, + 0.52329, + 0.52019, + 0.5266, + 0.52223, + 0.51963, + 0.52204, + 0.52169, + 0.51858, + 0.52132, + 0.52141, + 0.52373, + 0.52127, + 0.51793, + 0.53003, + 0.51861, + 0.5225, + 0.52182, + 0.51846, + 0.52272, + 0.51992, + 0.5237, + 0.51685, + 0.5209, + 0.51901, + 0.51631, + 0.52358, + 0.51629, + 0.51963, + 0.52068, + 0.52867, + 0.77752, + 0.51921, + 0.52025, + 0.52279, + 0.51598, + 0.51949, + 0.5185, + 0.51599, + 0.51831, + 0.51714, + 0.52096, + 0.51531, + 0.51772, + 0.52075, + 0.51527, + 0.52285, + 0.51419, + 0.50962, + 0.52299, + 0.51823, + 0.5203, + 0.52057, + 0.6447, + 0.52388, + 0.52098, + 0.51617, + 0.52062, + 0.51981, + 0.51981, + 0.52216, + 0.51694, + 0.52074, + 0.51891, + 0.51763, + 0.52161, + 0.51535, + 0.51916, + 0.51601, + 0.51886, + 0.52694, + 0.51739, + 0.52451, + 0.51812, + 0.51682, + 0.51817, + 0.51679, + 0.51488, + 0.51481, + 0.64785, + 0.51418, + 0.51997, + 0.5195, + 0.51253, + 0.55243, + 0.5133, + 0.51914, + 0.51872, + 0.5117, + 0.52929, + 0.51388, + 0.51762, + 0.51507, + 0.51904, + 0.51979, + 0.53219, + 0.51427, + 0.51907, + 0.52006, + 0.52028, + 0.5158, + 0.51359, + 0.51582, + 0.51882, + 0.77271, + 0.51317, + 0.51263, + 0.5189, + 0.51467, + 0.52205, + 0.51684, + 0.51957, + 0.51527, + 0.52485, + 0.5329, + 0.51602, + 0.52031, + 0.52254, + 0.52213, + 0.51582, + 0.52159, + 0.5168, + 0.51972, + 0.51313, + 0.51875, + 0.52647, + 0.5295, + 0.51793, + 0.52266, + 0.51713, + 0.51426, + 0.51708, + 0.51628, + 0.51718, + 0.51698, + 0.51493, + 0.51322, + 0.51916, + 0.52679, + 0.52173, + 0.52442, + 0.52011, + 0.52081, + 0.52103, + 0.51937, + 0.51853, + 0.51432, + 0.51971, + 0.51314, + 0.5217, + 0.51693, + 0.52016, + 0.51948, + 0.52146, + 0.6434, + 0.51345, + 0.51714, + 0.52033, + 0.52025, + 0.52005, + 0.52095, + 0.5176, + 0.51568, + 0.52952, + 0.51954, + 0.5179, + 0.51824, + 0.51634, + 0.51696, + 0.52052, + 0.51605, + 0.51911, + 0.5166, + 0.51723, + 0.51968, + 0.51804, + 0.51805, + 0.51944, + 0.65632, + 0.51506, + 0.51541, + 0.52912, + 0.51706, + 0.51487, + 0.51405, + 0.51718, + 0.52008, + 0.51812, + 0.5149, + 0.51969, + 0.51459, + 0.51746, + 0.51199, + 0.51806, + 0.51521, + 0.51985, + 0.52113, + 0.5151, + 0.52832, + 0.51726, + 0.51874, + 0.52492, + 0.52264, + 0.52255, + 0.52119, + 0.52146, + 0.52374, + 0.52585, + 0.52001, + 0.52957, + 0.52158, + 0.52306, + 0.53198, + 0.51875, + 0.52172, + 0.52141, + 0.52506, + 0.52701, + 0.52335, + 0.52579, + 0.52561, + 0.52567, + 0.52299, + 0.52173, + 0.52358, + 0.52268, + 0.5225, + 0.53389, + 0.79026, + 0.52767, + 0.52103, + 0.53508, + 0.52025, + 0.51955, + 0.52579, + 0.52352, + 0.51858, + 0.51765, + 0.52118, + 0.52567, + 0.52257, + 0.52435, + 0.51912, + 0.538, + 0.52183, + 0.52136, + 0.51694, + 0.51741 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/golden_values_lts.json new file mode 100644 index 000000000..e787a3088 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/golden_values_lts.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.85943, + 10.87053, + 10.8552, + 10.80356, + 10.64125, + 10.62658, + 10.41609, + 10.12827, + 9.92585, + 9.82486, + 9.56933, + 9.84044, + 9.86925, + 9.61422, + 9.77596, + 9.50084, + 9.45229, + 9.6411, + 9.38015, + 9.32643, + 9.23852, + 9.14191, + 9.17285, + 8.9927, + 9.18814, + 9.05775, + 9.15479, + 9.16462, + 9.29869, + 8.98698, + 8.93083, + 9.04739, + 9.04626, + 8.65646, + 8.71654, + 8.75519, + 8.68493, + 8.73641, + 8.66113, + 8.76487, + 8.66214, + 8.84933, + 8.83099, + 8.49833, + 8.38764, + 8.42872, + 8.49081, + 8.38216, + 8.4304, + 8.57772, + 8.3637, + 8.19009, + 8.2243, + 8.21889, + 8.26311, + 7.90921, + 8.08965, + 7.88749, + 8.23972, + 8.2245, + 7.99829, + 7.95654, + 7.91147, + 7.73211, + 7.73278, + 7.63576, + 7.50815, + 7.89999, + 7.69271, + 7.44759, + 7.73518, + 7.76308, + 7.53726, + 7.29755, + 7.45042, + 7.3335, + 7.46271, + 7.225, + 7.63686, + 7.2791, + 7.35262, + 7.21194, + 7.21749, + 7.42206, + 7.17637, + 7.28451, + 7.00229, + 7.00565, + 7.03947, + 7.14154, + 6.82546, + 6.98874, + 7.09158, + 7.00468, + 6.87701, + 6.76252, + 6.99607, + 7.06246, + 6.7093, + 6.58432, + 6.73413, + 6.74992, + 6.73916, + 6.74503, + 6.66397, + 6.41283, + 6.64356, + 6.62408, + 6.4507, + 6.63348, + 6.74925, + 6.61194, + 6.72888, + 6.69712, + 6.62816, + 6.51254, + 6.60259, + 6.40806, + 6.66632, + 6.2507, + 6.25539, + 6.30384, + 6.39197, + 6.35089, + 6.45101, + 6.2955, + 6.34162, + 6.23953, + 6.2031, + 6.40112, + 6.32791, + 6.32743, + 6.16712, + 6.16395, + 6.24217, + 6.38851, + 6.20408, + 6.15194, + 6.18454, + 6.1209, + 6.06687, + 6.07678, + 6.26378, + 6.41474, + 6.26293, + 6.30777, + 6.10302, + 6.18498, + 6.00557, + 6.03665, + 5.96024, + 6.2507, + 6.19188, + 5.96584, + 5.78516, + 6.12539, + 5.85253, + 6.10869, + 5.78882, + 6.16044, + 6.14583, + 6.08775, + 5.93339, + 6.11557, + 5.94544, + 6.19493, + 5.89494, + 5.79561, + 5.77741, + 5.68874, + 6.0135, + 5.99903, + 6.06725, + 5.8872, + 6.03788, + 5.96513, + 5.99395, + 5.98839, + 5.94543, + 5.83698, + 5.94898, + 5.61313, + 5.69872, + 5.88749, + 5.84072, + 5.8593, + 5.76366, + 5.83328, + 5.72126, + 5.55865, + 5.71778, + 5.62379, + 5.82983, + 5.60127, + 5.70628, + 5.71074, + 5.89526, + 5.64025, + 5.84484, + 5.73462, + 5.86678, + 5.32703, + 5.89388, + 5.86988, + 5.85354, + 5.41104, + 5.40723, + 5.62371, + 5.58859, + 5.48045, + 5.57103, + 5.66878, + 5.47266, + 5.74241, + 5.50355, + 5.58657, + 5.6171, + 5.6132, + 5.50529, + 5.61047, + 5.6702, + 5.67709, + 5.58565, + 5.65642, + 5.36862, + 5.67635, + 5.62256, + 5.42287, + 5.57977, + 5.62805, + 5.54907, + 5.33789, + 5.53276, + 5.47933, + 5.47544, + 5.3732, + 5.54994, + 5.60231, + 5.38211, + 5.51886, + 5.48037, + 5.32973, + 5.50123, + 5.40609, + 5.44142, + 5.31615, + 5.06636, + 5.47338, + 5.56525, + 5.70949, + 5.41185, + 5.59801, + 5.63224, + 5.22911, + 5.26901, + 5.38983, + 5.39245, + 5.32727, + 5.49282, + 5.18151, + 5.30008, + 5.24082, + 5.37393, + 5.25404, + 5.443, + 5.53676, + 5.31112, + 5.43487, + 5.33659, + 5.07047, + 5.30683, + 5.25186, + 5.30466, + 5.11066, + 5.27622, + 5.26326, + 5.47457, + 5.15806, + 5.26885, + 5.20826, + 5.35837, + 4.98081, + 4.9145, + 5.32227, + 5.38824, + 5.22777, + 5.3152, + 5.10173, + 5.1612, + 5.2585, + 5.06606, + 5.26362, + 5.06839, + 5.34424, + 5.24663, + 5.15173, + 5.24493, + 5.0382, + 5.31517, + 5.05402, + 5.02588, + 5.1416, + 5.11464, + 5.26976, + 5.1508, + 5.2759, + 5.09641, + 5.09478, + 5.24899, + 5.32187, + 5.25358, + 5.18918, + 5.14007, + 5.28993, + 4.94923, + 5.20665, + 5.09082, + 5.30279, + 5.17751, + 5.1877, + 5.11038, + 4.97967, + 4.98954, + 5.21943, + 5.31096, + 5.09497, + 5.05772, + 4.91641, + 5.12945, + 5.11765, + 4.92879, + 5.34097, + 5.02317, + 5.10375, + 5.1625, + 5.00244, + 5.06493, + 5.07017, + 4.9971, + 5.07986, + 5.162, + 4.9804, + 5.18135, + 4.9301, + 4.92184, + 5.06864, + 4.99078, + 4.90547, + 4.77408, + 4.94473, + 5.11756, + 5.01899, + 5.02253, + 5.33217, + 4.96101, + 4.99441, + 5.04553, + 4.80626, + 4.7391, + 4.99364, + 5.03728, + 4.87194, + 4.95067, + 5.04413, + 5.02255, + 4.81787, + 4.89308, + 4.90769, + 4.82921, + 4.7438, + 5.01691, + 4.75193, + 5.21153, + 4.78624, + 4.99548, + 4.73862, + 4.78812, + 4.81836, + 4.64864, + 4.65649, + 4.84617, + 4.80992, + 4.80425, + 4.92585, + 4.88618, + 4.93246, + 4.76987, + 4.88471, + 4.73751, + 4.91636, + 4.95806, + 4.87967, + 4.70744, + 4.78973, + 4.89998, + 4.71284, + 4.87002, + 4.69686, + 4.69721, + 4.648 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 61.0, + 66.0, + 86.0, + 64.0, + 68.0, + 81.0, + 100.0, + 92.0, + 106.0, + 131.0, + 123.0, + 149.0, + 140.0, + 182.0, + 180.0, + 159.0, + 169.0, + 200.0, + 163.0, + 164.0, + 168.0, + 177.0, + 167.0, + 183.0, + 190.0, + 162.0, + 188.0, + 162.0, + 143.0, + 160.0, + 156.0, + 192.0, + 152.0, + 179.0, + 141.0, + 176.0, + 168.0, + 202.0, + 176.0, + 202.0, + 157.0, + 168.0, + 183.0, + 180.0, + 177.0, + 205.0, + 201.0, + 158.0, + 189.0, + 219.0, + 217.0, + 173.0, + 211.0, + 145.0, + 197.0, + 176.0, + 160.0, + 154.0, + 207.0, + 234.0, + 196.0, + 193.0, + 167.0, + 160.0, + 196.0, + 207.0, + 190.0, + 186.0, + 186.0, + 185.0, + 225.0, + 236.0, + 162.0, + 247.0, + 175.0, + 184.0, + 230.0, + 220.0, + 230.0, + 201.0, + 226.0, + 212.0, + 204.0, + 260.0, + 192.0, + 186.0, + 160.0, + 202.0, + 184.0, + 209.0, + 187.0, + 214.0, + 225.0, + 203.0, + 185.0, + 171.0, + 178.0, + 193.0, + 222.0, + 182.0, + 155.0, + 154.0, + 159.0, + 141.0, + 167.0, + 143.0, + 154.0, + 181.0, + 142.0, + 149.0, + 169.0, + 177.0, + 185.0, + 167.0, + 161.0, + 143.0, + 148.0, + 138.0, + 177.0, + 141.0, + 152.0, + 132.0, + 145.0, + 144.0, + 115.0, + 111.0, + 100.0, + 130.0, + 120.0, + 124.0, + 154.0, + 121.0, + 140.0, + 122.0, + 121.0, + 116.0, + 138.0, + 116.0, + 115.0, + 109.0, + 106.0, + 84.0, + 120.0, + 118.0, + 127.0, + 108.0, + 106.0, + 135.0, + 101.0, + 96.0, + 120.0, + 123.0, + 88.0, + 134.0, + 143.0, + 109.0, + 116.0, + 102.0, + 104.0, + 118.0, + 116.0, + 125.0, + 104.0, + 122.0, + 111.0, + 95.0, + 111.0, + 101.0, + 125.0, + 103.0, + 112.0, + 121.0, + 103.0, + 90.0, + 147.0, + 120.0, + 110.0, + 114.0, + 89.0, + 111.0, + 111.0, + 101.0, + 108.0, + 123.0, + 75.0, + 100.0, + 85.0, + 125.0, + 95.0, + 114.0, + 109.0, + 99.0, + 102.0, + 95.0, + 108.0, + 99.0, + 102.0, + 76.0, + 102.0, + 112.0, + 95.0, + 71.0, + 104.0, + 124.0, + 103.0, + 106.0, + 106.0, + 85.0, + 132.0, + 112.0, + 106.0, + 100.0, + 94.0, + 126.0, + 105.0, + 102.0, + 112.0, + 126.0, + 127.0, + 83.0, + 73.0, + 102.0, + 84.0, + 99.0, + 121.0, + 106.0, + 112.0, + 101.0, + 89.0, + 117.0, + 109.0, + 92.0, + 117.0, + 111.0, + 111.0, + 111.0, + 102.0, + 92.0, + 120.0, + 102.0, + 99.0, + 98.0, + 105.0, + 101.0, + 108.0, + 87.0, + 86.0, + 114.0, + 115.0, + 112.0, + 101.0, + 126.0, + 108.0, + 110.0, + 105.0, + 87.0, + 117.0, + 90.0, + 126.0, + 107.0, + 103.0, + 109.0, + 111.0, + 85.0, + 105.0, + 103.0, + 113.0, + 97.0, + 119.0, + 117.0, + 138.0, + 133.0, + 110.0, + 105.0, + 115.0, + 103.0, + 86.0, + 132.0, + 102.0, + 119.0, + 93.0, + 99.0, + 100.0, + 110.0, + 116.0, + 87.0, + 116.0, + 81.0, + 114.0, + 103.0, + 103.0, + 103.0, + 111.0, + 92.0, + 88.0, + 95.0, + 92.0, + 103.0, + 98.0, + 97.0, + 110.0, + 129.0, + 110.0, + 99.0, + 118.0, + 111.0, + 88.0, + 101.0, + 138.0, + 104.0, + 102.0, + 114.0, + 88.0, + 116.0, + 108.0, + 101.0, + 104.0, + 108.0, + 104.0, + 104.0, + 129.0, + 121.0, + 89.0, + 104.0, + 98.0, + 100.0, + 118.0, + 103.0, + 98.0, + 90.0, + 90.0, + 100.0, + 106.0, + 111.0, + 116.0, + 102.0, + 117.0, + 130.0, + 131.0, + 108.0, + 110.0, + 129.0, + 116.0, + 112.0, + 95.0, + 98.0, + 107.0, + 97.0, + 114.0, + 119.0, + 94.0, + 95.0, + 113.0, + 114.0, + 116.0, + 102.0, + 126.0, + 119.0, + 103.0, + 116.0, + 110.0, + 124.0, + 132.0, + 117.0, + 110.0, + 115.0, + 116.0, + 91.0, + 105.0, + 126.0, + 77.0, + 107.0, + 100.0, + 119.0, + 116.0, + 137.0, + 86.0, + 132.0, + 102.0, + 108.0, + 119.0, + 106.0, + 135.0, + 117.0, + 98.0, + 111.0, + 138.0, + 120.0, + 103.0, + 102.0, + 133.0, + 102.0, + 139.0, + 112.0, + 108.0, + 104.0, + 106.0, + 110.0, + 125.0, + 106.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 22.12982, + 0.58407, + 0.59544, + 0.57636, + 0.5766, + 0.58301, + 0.57644, + 0.58681, + 0.58148, + 0.57124, + 0.56572, + 0.58109, + 0.56543, + 0.5649, + 0.56341, + 0.56668, + 0.56923, + 0.57023, + 0.57002, + 0.57163, + 0.5698, + 0.57588, + 0.57051, + 0.56835, + 0.57262, + 0.57082, + 0.5649, + 0.57266, + 0.57393, + 0.58758, + 0.56761, + 0.57161, + 0.57422, + 0.57961, + 0.57363, + 0.59229, + 0.56483, + 0.57134, + 0.56808, + 0.5692, + 0.56593, + 0.5711, + 0.56922, + 0.5683, + 0.56701, + 0.57467, + 0.58127, + 0.56473, + 0.56993, + 0.57385, + 0.57146, + 0.57652, + 0.57352, + 0.56785, + 0.5726, + 0.57374, + 0.56621, + 0.56991, + 0.57008, + 0.57409, + 0.5744, + 0.57432, + 0.57083, + 0.57352, + 0.57249, + 0.57474, + 0.57472, + 0.58684, + 0.5799, + 0.57096, + 0.57292, + 0.56708, + 0.5663, + 0.56501, + 0.56504, + 0.56721, + 0.56683, + 0.56252, + 0.77946, + 0.56722, + 0.56653, + 0.57422, + 0.57071, + 0.56657, + 0.56506, + 0.56584, + 0.56691, + 0.56745, + 0.57057, + 0.56428, + 0.56687, + 0.57132, + 0.56594, + 0.56782, + 0.56891, + 0.56753, + 0.56906, + 0.56673, + 0.88584, + 0.56888, + 0.57701, + 0.57547, + 0.56962, + 0.5688, + 0.57167, + 0.57702, + 0.57411, + 0.57094, + 0.57176, + 0.56854, + 0.56903, + 0.56946, + 0.56935, + 0.56407, + 0.56657, + 0.57094, + 0.56615, + 0.57381, + 0.56941, + 0.57691, + 0.57244, + 0.57915, + 0.57743, + 0.57646, + 0.56386, + 0.56966, + 0.56538, + 0.56642, + 0.56814, + 0.56657, + 0.57645, + 0.57776, + 0.57771, + 0.57127, + 0.57046, + 0.56543, + 0.56914, + 0.57383, + 0.59003, + 0.57928, + 0.57644, + 0.56492, + 0.57059, + 0.56832, + 0.57254, + 0.57276, + 0.56747, + 0.57186, + 0.571, + 0.56967, + 0.56653, + 0.57611, + 0.57206, + 0.57268, + 0.57845, + 0.56889, + 0.56949, + 0.58288, + 0.57504, + 0.57406, + 0.57109, + 0.58614, + 0.56961, + 0.56989, + 0.57728, + 0.57191, + 0.56862, + 0.57399, + 0.56928, + 0.57292, + 0.57047, + 0.57538, + 0.5753, + 0.57291, + 0.57288, + 0.58911, + 0.57434, + 0.57201, + 0.57334, + 0.57987, + 0.5698, + 0.57996, + 0.57766, + 0.57099, + 0.57237, + 0.57303, + 0.67546, + 0.56788, + 0.56501, + 0.57103, + 0.56997, + 0.56764, + 0.57336, + 0.56641, + 0.5662, + 0.60418, + 0.56859, + 0.57566, + 0.56885, + 0.58381, + 0.56215, + 0.57305, + 0.58455, + 0.57298, + 0.56641, + 0.56918, + 0.57446, + 0.57409, + 0.57287, + 0.57556, + 0.569, + 0.58387, + 0.56755, + 0.57091, + 0.57385, + 0.57298, + 0.57161, + 0.57035, + 0.56803, + 0.5801, + 0.57192, + 0.57401, + 0.57126, + 0.57158, + 0.56959, + 0.57293, + 0.5672, + 0.57462, + 0.57167, + 0.57014, + 0.57475, + 0.57603, + 0.5714, + 0.62444, + 0.57036, + 0.56999, + 0.57522, + 0.5716, + 0.58197, + 0.5765, + 0.56999, + 0.58429, + 0.56856, + 0.58173, + 0.57178, + 0.56779, + 0.56947, + 0.57295, + 0.56857, + 0.56829, + 0.57295, + 0.57504, + 0.57254, + 0.5675, + 0.56824, + 0.56877, + 0.57088, + 0.58067, + 0.57834, + 0.58238, + 0.57541, + 0.57865, + 0.5778, + 0.57228, + 0.57535, + 0.57627, + 0.56977, + 0.57269, + 0.57535, + 0.5772, + 0.5831, + 0.56943, + 0.57879, + 0.57353, + 0.57324, + 0.57476, + 0.57759, + 0.57151, + 0.57047, + 0.56246, + 0.56374, + 0.57046, + 0.56893, + 0.57193, + 0.5791, + 0.58222, + 0.5705, + 0.57925, + 0.58343, + 0.58822, + 0.57432, + 0.57436, + 0.57976, + 0.57785, + 0.57198, + 0.57174, + 0.56859, + 0.56547, + 0.57031, + 0.56948, + 0.57002, + 0.57584, + 0.57149, + 0.581, + 0.57702, + 0.58343, + 0.57227, + 0.57291, + 0.57608, + 0.57163, + 0.5767, + 0.56671, + 0.5697, + 0.5685, + 0.56652, + 0.57017, + 0.56761, + 0.57061, + 0.56876, + 0.56891, + 0.59662, + 0.59338, + 0.59138, + 0.57587, + 0.59007, + 0.5826, + 2.38992, + 0.58781, + 0.58277, + 0.58392, + 0.58454, + 0.58183, + 0.58321, + 0.58162, + 0.58178, + 0.58315, + 0.58576, + 0.58984, + 0.58447, + 0.58384, + 0.58444, + 0.57882, + 0.58178, + 0.58201, + 0.58621, + 0.58435, + 0.58728, + 0.58479, + 0.58194, + 0.58203, + 0.58472, + 0.58349, + 0.58442, + 0.5844, + 0.59043, + 0.58246, + 0.57817, + 0.59224, + 0.58333, + 0.58317, + 0.58198, + 0.57783, + 0.58072, + 0.57983, + 0.57676, + 0.57121, + 0.57894, + 0.57207, + 0.57802, + 0.5724, + 0.57705, + 0.57431, + 0.57357, + 0.56963, + 0.57063, + 0.57408, + 0.57724, + 0.57667, + 0.57465, + 0.57229, + 0.57231, + 0.57426, + 0.57414, + 0.57398, + 0.57718, + 0.57464, + 0.57416, + 0.57254, + 0.5724, + 0.58836, + 0.57475, + 0.57042, + 0.57821, + 0.58139, + 0.57394, + 0.57683, + 0.57436, + 0.57166, + 0.57692, + 0.57586 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values.json deleted file mode 100644 index d31439293..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [20.88514, 1.46887, 1.45698, 1.45724, 1.47204, 1.4532, 1.46049, 1.46232, 1.46114, 1.45572, 1.45278, 1.45251, 1.4606, 1.45971, 1.45327, 1.45649, 1.45387, 1.44992, 1.45853, 1.46565, 1.45437, 1.4525, 1.45638, 1.45952, 1.45173, 1.46389, 1.45431, 1.45274, 1.4583, 1.45541, 1.44989, 1.45048, 1.44894, 1.45131, 1.45345, 1.44108, 1.44133, 1.44014, 1.45925, 1.44689, 1.44677, 1.45727, 1.45173, 1.45401, 1.46616, 1.45271, 1.45499, 1.46938, 1.4604, 1.4635, 1.4619, 1.46438, 1.45747, 1.46752, 1.45729, 1.46194, 1.46122, 1.46137, 1.46148, 1.46024, 1.45382, 1.46877, 1.45937, 1.46525, 1.46624, 1.46409, 1.4727, 1.46116, 1.46451, 1.4659, 1.45827, 1.45377, 1.47607, 1.46536, 1.45984, 1.46776, 1.47935, 1.47512, 1.47012, 1.47272, 1.47499, 1.47329, 1.4585, 1.45704, 1.4555, 1.46025, 1.46072, 1.45592, 1.45507, 1.45416, 1.45424, 1.46471, 1.45308, 1.45358, 1.45797, 1.46272, 1.45587, 1.47021, 1.47373, 1.47488, 1.45879, 1.45526, 1.46684, 1.45424, 1.46048, 1.45539, 1.45476, 1.46257, 1.46204, 1.4552, 1.46046, 1.45792, 1.45501, 1.46191, 1.47519, 1.45861, 1.46195, 1.4555, 1.46541, 1.45771, 1.45708, 1.46256, 1.46253, 1.45733, 1.46154, 1.46224, 1.45714, 1.46628, 1.462, 1.46251, 1.46041, 1.45921, 1.45844, 1.46129, 1.45453, 1.45615, 1.45383, 1.45915, 1.45368, 1.46097, 1.4609, 1.4519, 1.46109, 1.45906, 1.45677, 1.46323, 1.45746, 1.45755, 1.46188, 1.45867, 1.45807, 1.45578, 1.46681, 1.46385, 1.46569, 1.4551, 1.46369, 1.45943, 1.45524, 1.45829, 1.45857, 1.45785, 1.45457, 1.44886, 1.45654, 1.4591, 1.4583, 1.46482, 1.45668, 1.45572, 1.45853, 1.46203, 1.46116, 1.45964, 1.4598, 1.46157, 1.46339, 1.45804, 1.46302, 1.4604, 1.4681, 1.4619, 1.46043, 1.46458, 1.44955, 1.45921, 1.46214, 1.45918, 1.45767, 1.45627, 1.45501, 1.46271, 1.46011, 1.45047, 1.45537, 1.45774, 1.45791, 1.45844, 1.45736, 1.45685, 1.44897, 1.46515, 1.44824, 1.4544, 1.46501, 1.45918, 1.45782, 1.45713, 1.45546, 1.4536, 1.46366, 1.45823, 1.45916, 1.45823, 1.45337, 1.46118, 1.46699, 1.4587, 1.46699, 1.47055, 1.46344, 1.46652, 1.46046, 1.46265, 1.46449, 1.46285, 1.46692, 1.45814, 1.45886, 1.46803, 1.46061, 1.45819, 1.4648, 1.46266, 1.46133, 1.46278, 1.4587, 1.46188, 1.46627, 1.45851, 1.45538, 1.46707, 1.4652, 1.45779, 1.46235, 1.45952, 1.56522, 1.45535, 1.46212, 1.53267, 1.46331, 1.56631, 1.46611, 1.4675, 1.46789, 1.46422, 1.46465, 1.46332, 1.46526, 1.46728, 1.46084, 1.46879, 1.4673, 1.46097, 1.4632, 1.46893, 1.46312, 1.47082, 1.47286, 1.46203, 1.46457, 1.46392, 1.47428, 1.46372, 1.46741, 1.46293, 1.46502, 1.46743, 1.46135, 1.45986, 1.46485, 1.45803, 1.46118, 1.46355, 1.46477, 1.4597, 1.46145, 1.46577, 1.46316, 1.46246, 1.45852, 1.46444, 1.46127, 1.46343, 1.46846, 1.46172, 1.4611, 1.46651, 1.46449, 1.45901, 1.46118, 1.46452, 1.47046, 1.46733, 1.46134, 1.4708, 1.46233, 1.46381, 1.46441, 1.47211, 1.46336, 1.46499, 1.45935, 1.46955, 1.46104, 1.46986, 1.47015, 1.46324, 1.46425, 1.46739, 1.46074, 1.46764, 1.46483, 1.46352, 1.46907, 1.4704, 1.47514, 1.4677, 1.47074, 1.46865, 1.4746, 1.47247, 1.47112, 1.47411, 1.47813, 1.47421, 1.46569, 1.46574, 1.47004, 1.46433, 1.45849, 1.46834, 1.47747, 1.46919, 1.47242, 1.46719, 1.45884, 1.462, 1.45808, 1.46357, 1.46256, 1.4583, 1.53085, 1.46007, 1.56675, 1.46277, 1.46292, 1.54903, 1.46448, 1.46847, 1.46708, 1.47477, 1.46444, 1.46433, 1.46714, 1.46403, 1.46557, 1.4607, 1.4618, 1.4615, 1.45857, 1.46496, 1.46801, 1.46664, 1.45296, 1.45665, 1.46006, 1.46236, 1.46106, 1.4622, 1.46573, 1.46166, 1.45667, 1.4563, 1.46152, 1.45678, 1.45303, 1.46242, 1.46316, 1.46041, 1.4655, 1.45096, 1.45962, 1.46428, 1.45196, 1.46789, 1.45986, 1.45627, 1.46454, 1.46424]}, "forward-compute-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [6.36252, 0.75642, 0.75338, 0.74782, 0.75864, 0.75119, 0.75271, 0.75652, 0.75238, 0.74967, 0.74518, 0.74699, 0.74982, 0.74683, 0.74477, 0.74825, 0.75424, 0.74304, 0.74908, 0.74831, 0.74285, 0.74505, 0.75194, 0.75268, 0.74597, 0.75419, 0.74822, 0.74832, 0.75308, 0.7494, 0.74312, 0.74787, 0.74249, 0.74586, 0.74659, 0.74391, 0.7376, 0.74214, 0.75476, 0.74522, 0.74687, 0.75765, 0.7462, 0.75118, 0.75883, 0.7495, 0.7508, 0.75734, 0.7532, 0.75555, 0.75913, 0.75728, 0.75891, 0.75923, 0.75304, 0.75387, 0.75689, 0.75658, 0.76074, 0.76432, 0.75769, 0.76347, 0.75739, 0.7616, 0.76613, 0.76452, 0.76556, 0.76205, 0.76331, 0.76266, 0.7584, 0.75596, 0.77338, 0.76537, 0.75847, 0.77247, 0.7698, 0.76711, 0.76502, 0.76683, 0.76807, 0.76879, 0.75959, 0.75609, 0.7542, 0.75889, 0.7586, 0.75685, 0.75677, 0.7569, 0.75222, 0.75781, 0.74463, 0.74619, 0.75051, 0.75082, 0.74909, 0.7631, 0.75774, 0.76204, 0.75145, 0.745, 0.75456, 0.75, 0.75135, 0.75247, 0.74698, 0.7545, 0.75599, 0.74765, 0.75411, 0.75279, 0.74869, 0.75208, 0.75762, 0.74974, 0.75249, 0.74767, 0.75172, 0.74899, 0.751, 0.74685, 0.75057, 0.75145, 0.7525, 0.75608, 0.74708, 0.75458, 0.7537, 0.74712, 0.75411, 0.7543, 0.74836, 0.74769, 0.74953, 0.75136, 0.75937, 0.76403, 0.75925, 0.76123, 0.76488, 0.75935, 0.76327, 0.7569, 0.75895, 0.76622, 0.76412, 0.75914, 0.76039, 0.76442, 0.76455, 0.76016, 0.76196, 0.76613, 0.76729, 0.75679, 0.75985, 0.75945, 0.76323, 0.7635, 0.75457, 0.75811, 0.75642, 0.74425, 0.74872, 0.75503, 0.74958, 0.75606, 0.7608, 0.75663, 0.75567, 0.76176, 0.76045, 0.76145, 0.76278, 0.76702, 0.76166, 0.75954, 0.76405, 0.76075, 0.76028, 0.75744, 0.76195, 0.75996, 0.76397, 0.76843, 0.76911, 0.76882, 0.76899, 0.76126, 0.76583, 0.77184, 0.76598, 0.76126, 0.76043, 0.75584, 0.7596, 0.7606, 0.75826, 0.75896, 0.75754, 0.76441, 0.75157, 0.75476, 0.76479, 0.75674, 0.75885, 0.75822, 0.75074, 0.75763, 0.76244, 0.75885, 0.75847, 0.7616, 0.75912, 0.76519, 0.75935, 0.75886, 0.75905, 0.76846, 0.7612, 0.7615, 0.76008, 0.76429, 0.75844, 0.75869, 0.76255, 0.76097, 0.75995, 0.76319, 0.76129, 0.76036, 0.76016, 0.76111, 0.76323, 0.76537, 0.759, 0.7601, 0.76445, 0.75571, 0.75685, 0.76075, 0.75723, 0.75653, 0.75845, 0.75674, 0.86396, 0.75777, 0.76008, 0.79802, 0.76226, 0.86191, 0.76011, 0.76317, 0.76386, 0.7605, 0.76066, 0.76276, 0.76322, 0.7613, 0.7592, 0.762, 0.76075, 0.75635, 0.75896, 0.7677, 0.7624, 0.76381, 0.76676, 0.75786, 0.75925, 0.76099, 0.76684, 0.7623, 0.76206, 0.76286, 0.76089, 0.75817, 0.75534, 0.75831, 0.76571, 0.76592, 0.76306, 0.76728, 0.76327, 0.76387, 0.7666, 0.76417, 0.7663, 0.7669, 0.76023, 0.76799, 0.76358, 0.76252, 0.76815, 0.76889, 0.76519, 0.77456, 0.76596, 0.76411, 0.76815, 0.77016, 0.77392, 0.76784, 0.76277, 0.77204, 0.76778, 0.7655, 0.76653, 0.76663, 0.7655, 0.76981, 0.76378, 0.76855, 0.76427, 0.77286, 0.76279, 0.75723, 0.75876, 0.76093, 0.75608, 0.76062, 0.75705, 0.75985, 0.76693, 0.76742, 0.77256, 0.76978, 0.76789, 0.76969, 0.76933, 0.77265, 0.76608, 0.76739, 0.77128, 0.76748, 0.75765, 0.75397, 0.76206, 0.75882, 0.75813, 0.76547, 0.77479, 0.76791, 0.77465, 0.76715, 0.75994, 0.76202, 0.75688, 0.75371, 0.75879, 0.75648, 0.78313, 0.75471, 0.85298, 0.75745, 0.75629, 0.79889, 0.75755, 0.7675, 0.76401, 0.77476, 0.7623, 0.76426, 0.77061, 0.76259, 0.76592, 0.76419, 0.76322, 0.76581, 0.76288, 0.76458, 0.76887, 0.76604, 0.7592, 0.7636, 0.76038, 0.76398, 0.76433, 0.76564, 0.7642, 0.76491, 0.76122, 0.76383, 0.76659, 0.76312, 0.76135, 0.76522, 0.76474, 0.76522, 0.76449, 0.75942, 0.76396, 0.76563, 0.75814, 0.76753, 0.76464, 0.7621, 0.77007, 0.76728]}, "backward-compute-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [4.28133, 0.68196, 0.6748, 0.67881, 0.68478, 0.67217, 0.67802, 0.67659, 0.67892, 0.67668, 0.67659, 0.67465, 0.67463, 0.67462, 0.67762, 0.67642, 0.6769, 0.67572, 0.67809, 0.68097, 0.67934, 0.67704, 0.67406, 0.67837, 0.6757, 0.67949, 0.67968, 0.6787, 0.67717, 0.68038, 0.67537, 0.67968, 0.67434, 0.67314, 0.67835, 0.66827, 0.67483, 0.66865, 0.67777, 0.67612, 0.66888, 0.68034, 0.67914, 0.67754, 0.686, 0.67891, 0.6825, 0.69249, 0.68805, 0.68071, 0.6807, 0.68401, 0.68197, 0.68831, 0.67921, 0.68344, 0.68292, 0.68269, 0.67859, 0.67491, 0.67595, 0.68683, 0.68164, 0.68009, 0.68194, 0.68378, 0.68844, 0.68048, 0.67795, 0.68343, 0.6796, 0.67682, 0.6863, 0.68552, 0.67712, 0.67901, 0.6881, 0.68205, 0.67931, 0.68414, 0.68584, 0.68259, 0.67712, 0.67748, 0.67636, 0.67686, 0.67957, 0.67669, 0.67544, 0.67461, 0.67469, 0.68134, 0.68, 0.67587, 0.68021, 0.68045, 0.67544, 0.67937, 0.68676, 0.68585, 0.67936, 0.68061, 0.68245, 0.67815, 0.67775, 0.6759, 0.67787, 0.68054, 0.6803, 0.67305, 0.67653, 0.67563, 0.67417, 0.68429, 0.68658, 0.67537, 0.68025, 0.6803, 0.68056, 0.6828, 0.68066, 0.68532, 0.67902, 0.67418, 0.68192, 0.6772, 0.6791, 0.68139, 0.68311, 0.68253, 0.67839, 0.67915, 0.67948, 0.68314, 0.67734, 0.67756, 0.67316, 0.67604, 0.6758, 0.67978, 0.67641, 0.67242, 0.67813, 0.67872, 0.6783, 0.67885, 0.67431, 0.67749, 0.67801, 0.6758, 0.67622, 0.67701, 0.68426, 0.6762, 0.67926, 0.67417, 0.68505, 0.67444, 0.67174, 0.67764, 0.67913, 0.67644, 0.67728, 0.67567, 0.67951, 0.67766, 0.67997, 0.68347, 0.67314, 0.66987, 0.67882, 0.67735, 0.67469, 0.67484, 0.67452, 0.67036, 0.67219, 0.66928, 0.67596, 0.68103, 0.68041, 0.67951, 0.67362, 0.6784, 0.6726, 0.67127, 0.67283, 0.67413, 0.67371, 0.67426, 0.67198, 0.67275, 0.67579, 0.66994, 0.67168, 0.6776, 0.67237, 0.67165, 0.67104, 0.67192, 0.67427, 0.67627, 0.66668, 0.66922, 0.67584, 0.67473, 0.6708, 0.67557, 0.67335, 0.67079, 0.67545, 0.67499, 0.67953, 0.67406, 0.67059, 0.67194, 0.67815, 0.67685, 0.67968, 0.67768, 0.67845, 0.68065, 0.67662, 0.67606, 0.68139, 0.67895, 0.67961, 0.67462, 0.67355, 0.68106, 0.67561, 0.67393, 0.67793, 0.67786, 0.6746, 0.67779, 0.67398, 0.67743, 0.67735, 0.67743, 0.67124, 0.68018, 0.68312, 0.67575, 0.67441, 0.67795, 0.77498, 0.67162, 0.6764, 0.67127, 0.67597, 0.68008, 0.68042, 0.67905, 0.68174, 0.67734, 0.68026, 0.6787, 0.67714, 0.682, 0.67394, 0.68013, 0.68188, 0.67889, 0.67722, 0.67427, 0.67656, 0.68229, 0.68021, 0.6768, 0.68025, 0.67886, 0.68439, 0.67958, 0.6764, 0.67518, 0.67551, 0.68714, 0.67915, 0.67531, 0.67638, 0.674, 0.67847, 0.67644, 0.67977, 0.674, 0.67593, 0.68097, 0.67926, 0.67773, 0.67609, 0.6796, 0.67785, 0.67882, 0.67923, 0.6747, 0.67544, 0.67361, 0.68038, 0.67547, 0.67624, 0.67248, 0.67952, 0.68043, 0.67937, 0.67985, 0.67588, 0.68025, 0.67916, 0.68539, 0.67959, 0.67855, 0.67714, 0.68454, 0.67696, 0.67981, 0.683, 0.68247, 0.6825, 0.68134, 0.67836, 0.68273, 0.68212, 0.68044, 0.67659, 0.67798, 0.67887, 0.67623, 0.67774, 0.67659, 0.67891, 0.67811, 0.68204, 0.68313, 0.68107, 0.68061, 0.68094, 0.68548, 0.68238, 0.67942, 0.67349, 0.67874, 0.67949, 0.67779, 0.67431, 0.67512, 0.67432, 0.67473, 0.67593, 0.68238, 0.67917, 0.67651, 0.68094, 0.67897, 0.68533, 0.67806, 0.68435, 0.68504, 0.682, 0.68404, 0.68368, 0.68461, 0.68091, 0.6825, 0.67628, 0.68089, 0.6828, 0.67779, 0.67875, 0.67869, 0.67726, 0.67954, 0.68441, 0.67716, 0.67303, 0.67398, 0.67541, 0.6785, 0.67881, 0.67645, 0.68188, 0.67884, 0.67565, 0.67403, 0.67785, 0.67584, 0.67366, 0.67828, 0.67909, 0.67494, 0.68175, 0.67414, 0.67764, 0.68174, 0.67366, 0.68332, 0.67954, 0.67548, 0.67937, 0.67851]}, "batch-generator-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [2.31358, 0.01342, 0.01402, 0.01374, 0.01299, 0.01268, 0.01392, 0.01354, 0.01304, 0.01288, 0.01303, 0.01298, 0.01232, 0.01255, 0.01299, 0.01326, 0.01362, 0.0129, 0.01443, 0.01263, 0.01254, 0.01285, 0.01249, 0.01344, 0.01424, 0.01237, 0.01372, 0.01224, 0.013, 0.01253, 0.01341, 0.01286, 0.01401, 0.01393, 0.01367, 0.01532, 0.01387, 0.01392, 0.01291, 0.01426, 0.0158, 0.01586, 0.01402, 0.01614, 0.01699, 0.0155, 0.01558, 0.01634, 0.01595, 0.01549, 0.01633, 0.01561, 0.01611, 0.01605, 0.01621, 0.01402, 0.01567, 0.01545, 0.0163, 0.01651, 0.01564, 0.01603, 0.01693, 0.01689, 0.01357, 0.0139, 0.01398, 0.01321, 0.0147, 0.01234, 0.01211, 0.01284, 0.01261, 0.01263, 0.01246, 0.01271, 0.01272, 0.01352, 0.01254, 0.01474, 0.01286, 0.01466, 0.01388, 0.01269, 0.01267, 0.01231, 0.01228, 0.01211, 0.01249, 0.01199, 0.01406, 0.01239, 0.012, 0.01243, 0.01264, 0.01202, 0.01259, 0.01295, 0.01265, 0.01251, 0.01294, 0.01235, 0.01204, 0.01263, 0.01427, 0.01248, 0.01231, 0.01225, 0.01258, 0.01178, 0.01262, 0.01236, 0.01219, 0.01244, 0.01253, 0.01287, 0.01341, 0.01255, 0.01211, 0.01241, 0.01252, 0.01245, 0.01248, 0.01249, 0.01246, 0.01257, 0.01439, 0.01257, 0.01277, 0.01231, 0.01239, 0.01246, 0.01285, 0.01264, 0.01226, 0.01308, 0.01475, 0.01426, 0.01226, 0.01234, 0.0128, 0.01255, 0.01327, 0.01286, 0.01198, 0.0126, 0.01182, 0.01221, 0.01291, 0.01266, 0.0138, 0.01491, 0.01556, 0.01521, 0.01547, 0.01523, 0.01535, 0.01539, 0.01545, 0.01502, 0.01553, 0.01548, 0.01523, 0.0158, 0.0149, 0.01554, 0.01524, 0.01563, 0.01495, 0.01509, 0.01539, 0.01542, 0.01541, 0.01496, 0.0133, 0.01391, 0.01409, 0.01274, 0.01438, 0.01341, 0.01299, 0.01457, 0.0135, 0.01472, 0.01228, 0.01294, 0.01287, 0.01243, 0.01296, 0.01232, 0.0131, 0.01254, 0.01253, 0.01203, 0.01548, 0.01457, 0.01673, 0.01491, 0.01608, 0.01713, 0.20109, 0.01559, 0.01542, 0.01587, 0.01537, 0.01617, 0.01548, 0.01476, 0.01531, 0.01468, 0.01359, 0.01328, 0.01334, 0.01271, 0.01326, 0.01281, 0.01274, 0.01235, 0.01343, 0.01378, 0.01234, 0.01331, 0.01322, 0.01409, 0.01395, 0.01384, 0.01454, 0.01599, 0.01706, 0.01595, 0.01555, 0.01494, 0.01652, 0.01668, 0.01556, 0.01656, 0.01651, 0.01523, 0.01549, 0.01748, 0.0151, 0.01561, 0.01593, 0.01703, 0.01695, 0.01519, 0.11815, 0.01383, 0.01413, 0.01352, 0.0127, 0.01447, 0.01336, 0.0136, 0.0135, 0.01283, 0.01313, 0.01327, 0.01457, 0.0137, 0.01312, 0.01422, 0.01356, 0.01359, 0.01298, 0.01365, 0.01348, 0.01345, 0.01333, 0.01313, 0.01267, 0.01374, 0.01318, 0.01263, 0.01428, 0.01505, 0.01249, 0.01321, 0.01297, 0.01239, 0.01264, 0.01257, 0.01217, 0.0122, 0.0122, 0.01198, 0.0127, 0.01478, 0.01247, 0.01244, 0.01216, 0.0125, 0.01376, 0.01279, 0.01258, 0.01297, 0.01503, 0.01572, 0.01498, 0.01367, 0.01289, 0.01246, 0.01343, 0.01425, 0.01243, 0.01244, 0.0128, 0.01271, 0.01294, 0.01314, 0.01241, 0.01281, 0.01413, 0.01267, 0.01236, 0.01278, 0.01212, 0.01253, 0.01258, 0.01307, 0.0136, 0.01249, 0.0128, 0.01213, 0.01404, 0.01391, 0.01279, 0.0132, 0.01312, 0.01257, 0.01296, 0.01486, 0.01348, 0.01408, 0.01312, 0.01352, 0.01264, 0.01361, 0.01373, 0.01287, 0.01447, 0.01273, 0.0134, 0.01256, 0.01471, 0.01292, 0.01296, 0.01556, 0.01269, 0.01275, 0.01262, 0.01243, 0.01254, 0.01292, 0.01389, 0.01214, 0.01259, 0.01322, 0.01252, 0.01284, 0.01326, 0.01406, 0.01221, 0.01209, 0.01445, 0.01235, 0.01243, 0.01521, 0.01303, 0.01308, 0.01361, 0.01255, 0.01227, 0.01283, 0.01623, 0.01515, 0.01582, 0.01716, 0.01637, 0.01737, 0.01732, 0.01611, 0.01683, 0.01561, 0.01502, 0.01608, 0.015, 0.01699, 0.017, 0.0159, 0.01671, 0.016, 0.01726, 0.01765, 0.01553, 0.01619, 0.01499, 0.01559, 0.01568, 0.01579]}, "forward-recv-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [5.69523, 0.02394, 0.02348, 0.02329, 0.02364, 0.02293, 0.02376, 0.0234, 0.02371, 0.02468, 0.02324, 0.02396, 0.02501, 0.0256, 0.02468, 0.02408, 0.02484, 0.02364, 0.02322, 0.02328, 0.02362, 0.02407, 0.02284, 0.02422, 0.02402, 0.02397, 0.0233, 0.02317, 0.0238, 0.02388, 0.02326, 0.02363, 0.02416, 0.02354, 0.02309, 0.02365, 0.02345, 0.02308, 0.02317, 0.02313, 0.02335, 0.023, 0.02326, 0.0233, 0.0238, 0.02375, 0.02493, 0.02394, 0.02412, 0.0238, 0.02339, 0.02351, 0.02335, 0.0266, 0.0234, 0.02405, 0.02373, 0.0237, 0.02385, 0.02378, 0.02359, 0.02689, 0.02333, 0.02338, 0.02322, 0.02354, 0.0233, 0.02329, 0.02452, 0.02693, 0.02345, 0.02326, 0.02375, 0.02341, 0.02388, 0.0233, 0.02333, 0.02476, 0.02365, 0.0236, 0.02356, 0.02344, 0.02363, 0.02334, 0.0233, 0.02313, 0.02387, 0.02342, 0.02362, 0.02319, 0.02461, 0.02359, 0.0234, 0.02397, 0.02524, 0.02331, 0.02386, 0.02533, 0.02416, 0.02445, 0.02309, 0.02381, 0.02352, 0.02393, 0.02341, 0.02313, 0.02371, 0.02364, 0.02387, 0.02355, 0.02449, 0.02408, 0.02363, 0.02317, 0.02331, 0.0239, 0.02385, 0.0235, 0.02309, 0.0239, 0.02371, 0.0232, 0.0236, 0.0237, 0.0241, 0.02434, 0.02347, 0.02522, 0.02461, 0.02418, 0.02376, 0.02318, 0.02386, 0.02379, 0.02334, 0.02333, 0.02452, 0.02365, 0.02364, 0.02368, 0.02399, 0.02426, 0.02355, 0.02382, 0.02423, 0.02653, 0.02379, 0.02327, 0.02414, 0.02462, 0.02631, 0.02476, 0.02402, 0.02578, 0.02427, 0.02403, 0.02365, 0.02467, 0.02569, 0.02364, 0.02413, 0.02503, 0.02507, 0.02438, 0.02416, 0.02449, 0.02518, 0.02522, 0.02409, 0.02476, 0.02466, 0.02482, 0.02437, 0.02418, 0.0241, 0.02501, 0.02478, 0.02401, 0.02483, 0.02545, 0.02468, 0.02391, 0.02507, 0.02466, 0.02414, 0.02353, 0.0242, 0.02477, 0.02356, 0.02431, 0.02316, 0.02439, 0.02399, 0.02385, 0.02354, 0.02465, 0.02547, 0.02508, 0.02419, 0.02477, 0.01768, 0.02429, 0.02356, 0.02577, 0.02434, 0.02473, 0.02445, 0.02378, 0.02439, 0.02389, 0.02352, 0.02408, 0.02328, 0.02452, 0.02367, 0.02386, 0.02413, 0.02431, 0.02462, 0.02369, 0.02376, 0.02491, 0.02439, 0.02403, 0.02377, 0.02464, 0.02435, 0.02348, 0.02371, 0.0252, 0.02368, 0.02387, 0.02399, 0.02427, 0.02729, 0.02472, 0.02405, 0.02401, 0.02437, 0.02492, 0.02402, 0.02449, 0.02457, 0.02418, 0.02405, 0.02463, 0.02494, 0.02411, 0.02427, 0.02434, 0.02507, 0.02381, 0.02365, 0.02529, 0.02396, 0.02466, 0.0235, 0.02361, 0.02374, 0.02465, 0.02472, 0.02388, 0.02377, 0.02493, 0.02356, 0.02375, 0.024, 0.02421, 0.02437, 0.02348, 0.02314, 0.02411, 0.02461, 0.02389, 0.0247, 0.02407, 0.0246, 0.02474, 0.02412, 0.02434, 0.02469, 0.02369, 0.02397, 0.02513, 0.02411, 0.02363, 0.02383, 0.02511, 0.02474, 0.02401, 0.02392, 0.0241, 0.02386, 0.02404, 0.02408, 0.02406, 0.02452, 0.02544, 0.02797, 0.0258, 0.02429, 0.02521, 0.02549, 0.02471, 0.02437, 0.02521, 0.02445, 0.0245, 0.0237, 0.02743, 0.02449, 0.02397, 0.02369, 0.02461, 0.02423, 0.02547, 0.02366, 0.02466, 0.02473, 0.02447, 0.02511, 0.02472, 0.02518, 0.02397, 0.02404, 0.02493, 0.02555, 0.02496, 0.02436, 0.02395, 0.02507, 0.02456, 0.0243, 0.02385, 0.02539, 0.02483, 0.02431, 0.02399, 0.02469, 0.0254, 0.02512, 0.03429, 0.0364, 0.03571, 0.03561, 0.03474, 0.02415, 0.02604, 0.02499, 0.02494, 0.0246, 0.02567, 0.02501, 0.02468, 0.02397, 0.02793, 0.02468, 0.02491, 0.02539, 0.02409, 0.02475, 0.02441, 0.02562, 0.02394, 0.02557, 0.02449, 0.02381, 0.02425, 0.02474, 0.02431, 0.02389, 0.02357, 0.02526, 0.0266, 0.02574, 0.02347, 0.02485, 0.02498, 0.02413, 0.02387, 0.02515, 0.02481, 0.02439, 0.02404, 0.02457, 0.02585, 0.02502, 0.02382, 0.02429, 0.02509, 0.02444, 0.02418, 0.02439, 0.02469, 0.0242, 0.0249, 0.02556, 0.0254, 0.02589, 0.02426]}, "forward-send-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [2.90859, 0.00013, 0.00013, 0.00012, 0.00012, 0.00011, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00014, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00011, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00041, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00011, 0.00013, 0.00011, 0.00012, 0.00011, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00011, 0.00012, 0.00012, 0.00011, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00011, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00011, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00014, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00012, 0.00013, 0.00012, 0.00013, 0.00012, 0.00012, 0.00013, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00013, 0.00012, 0.00014, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00011, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00014, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00014, 0.00012, 0.00014, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00013, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00013, 0.00012, 0.00014, 0.00012, 0.00011, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00014, 0.00017, 0.00016, 0.00012, 0.00017, 0.00011, 0.00012, 0.00012, 0.00012, 0.00014, 0.00013, 0.00012, 0.00013, 0.00012, 0.00013, 0.00013, 0.00014, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00011, 0.00013, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00013, 0.00013, 0.00012, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00011, 0.00012, 0.00012, 0.00013, 0.00012, 0.00013, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00012, 0.00011, 0.00012, 0.00013, 0.00013]}, "backward-recv-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.02368, 0.02348, 0.02394, 0.02364, 0.02449, 0.02409, 0.02505, 0.02374, 0.02528, 0.0259, 0.02358, 0.0242, 0.02637, 0.02354, 0.0251, 0.02307, 0.02342, 0.02386, 0.02487, 0.02353, 0.02241, 0.02358, 0.02336, 0.02385, 0.02423, 0.02362, 0.02431, 0.02368, 0.02447, 0.02388, 0.02278, 0.02395, 0.02289, 0.02372, 0.0236, 0.02367, 0.02368, 0.02432, 0.02399, 0.02338, 0.02355, 0.02343, 0.02344, 0.02565, 0.02464, 0.02367, 0.02563, 0.02365, 0.02498, 0.02382, 0.02437, 0.02419, 0.02505, 0.02388, 0.02389, 0.02396, 0.02377, 0.02399, 0.02396, 0.02304, 0.02377, 0.02724, 0.02399, 0.02408, 0.02416, 0.02465, 0.02583, 0.02394, 0.02408, 0.02617, 0.02288, 0.02529, 0.0259, 0.02468, 0.02405, 0.02424, 0.02366, 0.02431, 0.02501, 0.02416, 0.02392, 0.02398, 0.02395, 0.02361, 0.02493, 0.02419, 0.02355, 0.02345, 0.02429, 0.02305, 0.02433, 0.02418, 0.02434, 0.02361, 0.02432, 0.02418, 0.0234, 0.02415, 0.02349, 0.02463, 0.02416, 0.02344, 0.02561, 0.02358, 0.02435, 0.024, 0.02522, 0.02503, 0.02562, 0.02467, 0.02425, 0.02421, 0.02382, 0.0242, 0.02401, 0.02416, 0.02588, 0.0247, 0.02434, 0.02473, 0.02524, 0.02511, 0.02494, 0.02375, 0.02595, 0.02432, 0.02337, 0.02414, 0.02486, 0.0245, 0.02433, 0.02431, 0.02365, 0.02411, 0.02342, 0.02427, 0.02467, 0.02469, 0.02352, 0.02452, 0.02337, 0.02463, 0.02478, 0.02463, 0.02462, 0.02668, 0.02409, 0.02498, 0.02302, 0.02351, 0.02626, 0.02404, 0.02319, 0.02423, 0.02437, 0.02371, 0.02423, 0.02372, 0.02372, 0.02417, 0.02394, 0.02401, 0.02428, 0.02406, 0.02443, 0.02396, 0.02341, 0.02439, 0.02392, 0.02389, 0.02372, 0.02654, 0.02468, 0.02413, 0.02396, 0.02411, 0.02434, 0.02436, 0.02416, 0.02432, 0.02413, 0.02462, 0.0275, 0.02423, 0.02396, 0.027, 0.02446, 0.02452, 0.025, 0.02481, 0.02389, 0.02952, 0.02408, 0.02468, 0.02725, 0.02317, 0.02402, 0.02623, 0.02326, 0.02418, 0.0249, 0.0242, 0.02443, 0.02409, 0.0256, 0.02406, 0.02355, 0.02409, 0.02372, 0.02539, 0.02507, 0.02461, 0.02483, 0.02426, 0.02423, 0.02431, 0.02427, 0.02447, 0.02382, 0.02564, 0.02441, 0.02556, 0.02403, 0.02573, 0.02428, 0.02401, 0.02513, 0.02382, 0.02364, 0.02454, 0.02477, 0.02397, 0.0253, 0.02422, 0.02361, 0.02617, 0.02493, 0.02542, 0.0241, 0.02392, 0.02412, 0.02369, 0.02392, 0.02434, 0.02381, 0.02437, 0.02629, 0.02397, 0.0244, 0.02457, 0.02396, 0.02392, 0.02359, 0.02513, 0.02438, 0.02434, 0.02525, 0.02462, 0.02406, 0.02675, 0.0243, 0.02493, 0.02442, 0.02465, 0.02474, 0.02404, 0.02508, 0.02549, 0.02338, 0.02287, 0.02444, 0.02513, 0.02493, 0.02474, 0.0248, 0.02431, 0.0245, 0.02863, 0.02409, 0.02427, 0.02391, 0.02367, 0.02441, 0.02399, 0.02425, 0.02368, 0.0241, 0.02393, 0.02417, 0.02474, 0.02369, 0.02638, 0.02436, 0.02611, 0.02434, 0.02576, 0.02383, 0.02442, 0.02353, 0.02419, 0.02477, 0.02466, 0.02579, 0.02455, 0.0242, 0.02475, 0.02338, 0.02403, 0.02538, 0.02364, 0.02364, 0.02423, 0.02324, 0.02408, 0.02434, 0.02456, 0.0243, 0.02403, 0.02448, 0.02338, 0.02413, 0.02447, 0.02323, 0.02365, 0.02506, 0.02554, 0.02565, 0.02416, 0.025, 0.02532, 0.02482, 0.02683, 0.02458, 0.02498, 0.02491, 0.02422, 0.0243, 0.02428, 0.02417, 0.02376, 0.02431, 0.02339, 0.02362, 0.02365, 0.02371, 0.02421, 0.02393, 0.02386, 0.02374, 0.0249, 0.02454, 0.02401, 0.02418, 0.02411, 0.02461, 0.02418, 0.02303, 0.02369, 0.02384, 0.02685, 0.02364, 0.02436, 0.02417, 0.02486, 0.02423, 0.02448, 0.02462, 0.02366, 0.02415, 0.02421, 0.0243, 0.02378, 0.02574, 0.02403, 0.02374, 0.02434, 0.02432, 0.02579, 0.02343, 0.02354, 0.02396, 0.02392, 0.02373, 0.02416, 0.02348, 0.02355, 0.02427, 0.0252, 0.02486, 0.02405, 0.02393, 0.0234, 0.02443, 0.02418, 0.02422, 0.02504, 0.02408, 0.0243, 0.02762, 0.02382]}, "backward-send-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00016, 0.00016, 0.00019, 0.00016, 0.00017, 0.00018, 0.00016, 0.00016, 0.00019, 0.00016, 0.00017, 0.00016, 0.00016, 0.00016, 0.00018, 0.00018, 0.00016, 0.00016, 0.00019, 0.00016, 0.00018, 0.00019, 0.00018, 0.00015, 0.00016, 0.00016, 0.00016, 0.00016, 0.00016, 0.00016, 0.00016, 0.00017, 0.00018, 0.00016, 0.00016, 0.00016, 0.00016, 0.00017, 0.00018, 0.00019, 0.00018, 0.00018, 0.00018, 0.00017, 0.00018, 0.00016, 0.00016, 0.00016, 0.00017, 0.00016, 0.00017, 0.00019, 0.00016, 0.00017, 0.00017, 0.00016, 0.00019, 0.00018, 0.00018, 0.00016, 0.00018, 0.00016, 0.00017, 0.00016, 0.00018, 0.00016, 0.00019, 0.00018, 0.00018, 0.00016, 0.00018, 0.00016, 0.00016, 0.00016, 0.00018, 0.00016, 0.00017, 0.00016, 0.00018, 0.00018, 0.00018, 0.00017, 0.00017, 0.00017, 0.00018, 0.00016, 0.00018, 0.00018, 0.00019, 0.00016, 0.00018, 0.00016, 0.00016, 0.00016, 0.00016, 0.00016, 0.00019, 0.00016, 0.00018, 0.00016, 0.00017, 0.00016, 0.00018, 0.00016, 0.00018, 0.00016, 0.00017, 0.00032, 0.00018, 0.00016, 0.00016, 0.00016, 0.00018, 0.00018, 0.00018, 0.00019, 0.00016, 0.00016, 0.00016, 0.00016, 0.00018, 0.00019, 0.00016, 0.00016, 0.00016, 0.00017, 0.00019, 0.00018, 0.00016, 0.00018, 0.00018, 0.00016, 0.00018, 0.00017, 0.00016, 0.00016, 0.00018, 0.00016, 0.00018, 0.00017, 0.00016, 0.00017, 0.00025, 0.00016, 0.00016, 0.00018, 0.00016, 0.00016, 0.00016, 0.00018, 0.00016, 0.00016, 0.00016, 0.00016, 0.00018, 0.00017, 0.00019, 0.00016, 0.00016, 0.00018, 0.00018, 0.00016, 0.00016, 0.00016, 0.00016, 0.00016, 0.00016, 0.00031, 0.00016, 0.00016, 0.00025, 0.00018, 0.00016, 0.00016, 0.00016, 0.00018, 0.00017, 0.00016, 0.00018, 0.00018, 0.00016, 0.00016, 0.00022, 0.00016, 0.00016, 0.00016, 0.00018, 0.00016, 0.00017, 0.00017, 0.00015, 0.00016, 0.00018, 0.00016, 0.00016, 0.00016, 0.00017, 0.00018, 0.00019, 0.00017, 0.00017, 0.00018, 0.00016, 0.00018, 0.00018, 0.00018, 0.00015, 0.00016, 0.00017, 0.00016, 0.00016, 0.00017, 0.00016, 0.00017, 0.00018, 0.00017, 0.00018, 0.00018, 0.00016, 0.00016, 0.00018, 0.00018, 0.00016, 0.00016, 0.00016, 0.00018, 0.00017, 0.00017, 0.00018, 0.00018, 0.00016, 0.00017, 0.00017, 0.00016, 0.00017, 0.00019, 0.00019, 0.00028, 0.00017, 0.00017, 0.00016, 0.00016, 0.00016, 0.00016, 0.00015, 0.00017, 0.00016, 0.00016, 0.00017, 0.00018, 0.00016, 0.00016, 0.00016, 0.00017, 0.00018, 0.0002, 0.00016, 0.00017, 0.00017, 0.00018, 0.00018, 0.00016, 0.00016, 0.00017, 0.00018, 0.00018, 0.00016, 0.00023, 0.00016, 0.00016, 0.00016, 0.00017, 0.00016, 0.00019, 0.00017, 0.00016, 0.00016, 0.00015, 0.00016, 0.00018, 0.00019, 0.00016, 0.00018, 0.00017, 0.00016, 0.00017, 0.00018, 0.00018, 0.00022, 0.00016, 0.00016, 0.0002, 0.00019, 0.00017, 0.00016, 0.00018, 0.00016, 0.00016, 0.00017, 0.00016, 0.00017, 0.00019, 0.00016, 0.00016, 0.00018, 0.00017, 0.00018, 0.00015, 0.00016, 0.00016, 0.00019, 0.00018, 0.00018, 0.00016, 0.00018, 0.00016, 0.00016, 0.00016, 0.00017, 0.00016, 0.00016, 0.00018, 0.00016, 0.00016, 0.00016, 0.00018, 0.00018, 0.00018, 0.00016, 0.00017, 0.00022, 0.00016, 0.00017, 0.00016, 0.00018, 0.00016, 0.00018, 0.00018, 0.00018, 0.00017, 0.00016, 0.00017, 0.00016, 0.00026, 0.00018, 0.00016, 0.00016, 0.00016, 0.00017, 0.00018, 0.00016, 0.00016, 0.00019, 0.00017, 0.00018, 0.00016, 0.00016, 0.00016, 0.00016, 0.00018, 0.00019, 0.00016, 0.00016, 0.00016, 0.00016, 0.00016, 0.00018, 0.00019, 0.00016, 0.00018, 0.00018, 0.00016, 0.00017, 0.00018, 0.00031, 0.00018, 0.00017, 0.00016, 0.00016, 0.00016, 0.00016, 0.00016, 0.00017, 0.00016, 0.00016, 0.00017, 0.00016, 0.00016, 0.00019, 0.00018, 0.00016, 0.00017, 0.00018, 0.00018, 0.00018, 0.00016, 0.00018, 0.00019, 0.00016, 0.00019]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [7.32739, 0.12477, 0.12666, 0.128, 0.12835, 0.12967, 0.1275, 0.13153, 0.12112, 0.12816, 0.12128, 0.1203, 0.12267, 0.122, 0.12207, 0.1236, 0.12689, 0.12116, 0.11515, 0.1236, 0.11731, 0.11801, 0.12855, 0.12095, 0.12421, 0.12165, 0.12224, 0.11784, 0.12171, 0.11872, 0.11626, 0.12467, 0.1241, 0.11907, 0.11776, 0.12636, 0.11891, 0.12432, 0.12301, 0.12655, 0.12996, 0.13374, 0.12156, 0.12801, 0.13689, 0.1275, 0.13219, 0.13231, 0.13041, 0.12833, 0.13716, 0.13099, 0.1317, 0.1252, 0.12341, 0.12286, 0.12995, 0.12336, 0.13226, 0.13381, 0.12738, 0.13598, 0.13071, 0.13531, 0.14271, 0.14199, 0.13871, 0.142, 0.14001, 0.14332, 0.13666, 0.13328, 0.14543, 0.14315, 0.13564, 0.15173, 0.14153, 0.15109, 0.14782, 0.14157, 0.14168, 0.14516, 0.13449, 0.13595, 0.13466, 0.13854, 0.13617, 0.13542, 0.13551, 0.13682, 0.13396, 0.13632, 0.12977, 0.13179, 0.13436, 0.12818, 0.1318, 0.15065, 0.14138, 0.14121, 0.12829, 0.1243, 0.12753, 0.13425, 0.13136, 0.13043, 0.12709, 0.1367, 0.13831, 0.13249, 0.13782, 0.13352, 0.13464, 0.12973, 0.1292, 0.13364, 0.13332, 0.13424, 0.12997, 0.13345, 0.12818, 0.13196, 0.13345, 0.13333, 0.13254, 0.13659, 0.13184, 0.13348, 0.12597, 0.13454, 0.13192, 0.1375, 0.13257, 0.12337, 0.1345, 0.13062, 0.13753, 0.13119, 0.13426, 0.13825, 0.13839, 0.13388, 0.13726, 0.12898, 0.13377, 0.13935, 0.1381, 0.13416, 0.13521, 0.13765, 0.1373, 0.13402, 0.12531, 0.13371, 0.14559, 0.13302, 0.12679, 0.13579, 0.1348, 0.13764, 0.13247, 0.13464, 0.13235, 0.13117, 0.12868, 0.13327, 0.13496, 0.1324, 0.13728, 0.13904, 0.13275, 0.14304, 0.14323, 0.14887, 0.14315, 0.1468, 0.14026, 0.14574, 0.14975, 0.14342, 0.14555, 0.13943, 0.1403, 0.1444, 0.14205, 0.14177, 0.1462, 0.14686, 0.14634, 0.14245, 0.14549, 0.14618, 0.14887, 0.13512, 0.13541, 0.13381, 0.14182, 0.14007, 0.14152, 0.13605, 0.13807, 0.13717, 0.13509, 0.13546, 0.13698, 0.13358, 0.13623, 0.13205, 0.12316, 0.13181, 0.14145, 0.1317, 0.13396, 0.14106, 0.13611, 0.14089, 0.14373, 0.13469, 0.1384, 0.14246, 0.13291, 0.14068, 0.13738, 0.13421, 0.13749, 0.13088, 0.13458, 0.13609, 0.133, 0.14241, 0.13922, 0.13388, 0.14182, 0.13246, 0.13971, 0.14107, 0.13164, 0.13039, 0.13705, 0.12577, 0.13184, 0.13088, 0.13144, 0.13487, 0.13555, 0.12695, 0.23517, 0.1322, 0.13486, 0.16077, 0.13981, 0.23534, 0.13332, 0.13076, 0.13464, 0.12966, 0.13057, 0.13577, 0.13162, 0.12711, 0.13253, 0.13694, 0.13253, 0.1291, 0.13231, 0.13615, 0.13278, 0.13306, 0.13739, 0.13635, 0.12928, 0.12884, 0.13997, 0.13381, 0.13621, 0.14094, 0.1347, 0.13224, 0.13078, 0.1333, 0.14059, 0.13768, 0.13345, 0.1394, 0.13204, 0.13595, 0.14267, 0.13406, 0.13447, 0.13958, 0.13493, 0.13657, 0.13256, 0.13241, 0.14205, 0.13985, 0.13748, 0.14438, 0.14105, 0.13704, 0.14125, 0.13958, 0.1371, 0.13476, 0.13221, 0.14116, 0.1413, 0.13323, 0.13777, 0.13451, 0.13785, 0.13827, 0.13489, 0.13565, 0.13632, 0.14132, 0.13954, 0.13567, 0.13798, 0.1411, 0.13641, 0.1346, 0.13417, 0.13059, 0.14076, 0.14564, 0.14703, 0.14826, 0.14723, 0.14169, 0.14389, 0.14245, 0.14606, 0.1389, 0.14429, 0.14006, 0.13171, 0.13461, 0.13482, 0.14111, 0.13415, 0.14396, 0.15035, 0.14874, 0.1481, 0.14804, 0.13867, 0.14775, 0.13614, 0.13103, 0.13832, 0.13379, 0.15425, 0.1329, 0.22576, 0.13539, 0.12996, 0.16565, 0.12569, 0.12696, 0.12758, 0.13901, 0.13127, 0.13219, 0.13915, 0.13046, 0.12996, 0.1351, 0.13312, 0.13428, 0.13394, 0.13287, 0.13398, 0.13368, 0.12682, 0.13561, 0.13323, 0.1307, 0.13416, 0.13272, 0.13142, 0.136, 0.13057, 0.13073, 0.13345, 0.13692, 0.13433, 0.13536, 0.13216, 0.13483, 0.13431, 0.13132, 0.13241, 0.13481, 0.13004, 0.13405, 0.12911, 0.13104, 0.13208, 0.13389]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [2.85465, 0.00835, 0.00699, 0.00741, 0.00706, 0.00797, 0.0072, 0.00701, 0.00796, 0.0097, 0.00702, 0.00774, 0.00734, 0.00774, 0.0089, 0.00828, 0.00699, 0.00781, 0.00859, 0.00782, 0.00885, 0.00849, 0.00699, 0.00689, 0.00726, 0.00698, 0.00708, 0.00765, 0.00904, 0.00754, 0.00764, 0.00719, 0.00699, 0.00717, 0.00867, 0.00723, 0.00713, 0.00719, 0.00696, 0.00695, 0.0071, 0.00724, 0.00738, 0.00696, 0.00708, 0.00738, 0.00771, 0.00745, 0.00704, 0.00878, 0.00742, 0.00713, 0.00774, 0.00714, 0.00691, 0.01011, 0.00831, 0.00755, 0.00829, 0.00713, 0.00712, 0.00776, 0.00714, 0.00703, 0.00812, 0.00754, 0.00844, 0.00686, 0.00703, 0.00718, 0.00709, 0.00784, 0.00743, 0.00744, 0.00705, 0.00773, 0.0077, 0.00752, 0.00823, 0.00721, 0.00697, 0.00777, 0.00754, 0.00704, 0.00687, 0.00767, 0.00697, 0.00724, 0.0081, 0.0081, 0.00692, 0.00799, 0.00739, 0.00705, 0.00849, 0.00694, 0.00742, 0.00767, 0.00711, 0.00824, 0.00696, 0.00742, 0.00848, 0.00758, 0.00786, 0.00691, 0.00711, 0.00709, 0.00692, 0.00764, 0.00779, 0.00699, 0.00727, 0.00768, 0.007, 0.0078, 0.00701, 0.00735, 0.00759, 0.00875, 0.00792, 0.00727, 0.00737, 0.00715, 0.00787, 0.00741, 0.00751, 0.00855, 0.00692, 0.00786, 0.00751, 0.00811, 0.00715, 0.00699, 0.00709, 0.00705, 0.00737, 0.0082, 0.00828, 0.00883, 0.00777, 0.00806, 0.00752, 0.0074, 0.00758, 0.00764, 0.00798, 0.00876, 0.0073, 0.00773, 0.00824, 0.00728, 0.00773, 0.00775, 0.00706, 0.00716, 0.00698, 0.00735, 0.00857, 0.00716, 0.00715, 0.00888, 0.00742, 0.00709, 0.00773, 0.00707, 0.00785, 0.00751, 0.00723, 0.00781, 0.00732, 0.00731, 0.00751, 0.00926, 0.00734, 0.00835, 0.00815, 0.00834, 0.00863, 0.00698, 0.00697, 0.00866, 0.00749, 0.00697, 0.00797, 0.00761, 0.00705, 0.00898, 0.00815, 0.00711, 0.00733, 0.00846, 0.00756, 0.00807, 0.00707, 0.00876, 0.00728, 0.00798, 0.00766, 0.00737, 0.00998, 0.00838, 0.0077, 0.00751, 0.00848, 0.00695, 0.00705, 0.00981, 0.00734, 0.00923, 0.0071, 0.00714, 0.00728, 0.00728, 0.0085, 0.00981, 0.00871, 0.00696, 0.00863, 0.00936, 0.01089, 0.00793, 0.00711, 0.00971, 0.00701, 0.00936, 0.00758, 0.00816, 0.00884, 0.00803, 0.00847, 0.01006, 0.00978, 0.00825, 0.0081, 0.00787, 0.00813, 0.00997, 0.00754, 0.00893, 0.00765, 0.00713, 0.0078, 0.0076, 0.00705, 0.00918, 0.11069, 0.00794, 0.00727, 0.07524, 0.00865, 0.00813, 0.007, 0.00696, 0.0071, 0.00698, 0.00706, 0.00709, 0.00901, 0.00738, 0.00798, 0.00783, 0.00755, 0.00757, 0.00792, 0.0078, 0.00758, 0.00842, 0.00991, 0.00945, 0.00712, 0.00835, 0.00735, 0.00734, 0.00709, 0.00708, 0.00953, 0.00709, 0.00704, 0.00922, 0.00937, 0.00856, 0.00712, 0.00846, 0.01121, 0.00908, 0.00701, 0.01037, 0.00813, 0.00814, 0.00709, 0.00791, 0.0074, 0.00756, 0.00813, 0.00849, 0.00705, 0.00877, 0.00705, 0.00702, 0.00784, 0.00699, 0.00862, 0.00977, 0.0078, 0.00851, 0.00917, 0.00814, 0.00962, 0.0071, 0.00832, 0.01014, 0.00711, 0.00716, 0.00781, 0.00825, 0.01002, 0.00758, 0.00695, 0.01037, 0.00713, 0.0097, 0.00977, 0.00754, 0.00863, 0.00703, 0.00781, 0.00826, 0.00731, 0.00742, 0.00778, 0.00814, 0.00835, 0.00713, 0.00837, 0.0071, 0.00718, 0.00856, 0.00694, 0.00858, 0.00741, 0.00763, 0.00727, 0.00894, 0.00892, 0.0078, 0.00875, 0.00972, 0.00704, 0.00701, 0.00812, 0.00733, 0.0694, 0.00715, 0.09935, 0.00722, 0.00697, 0.0823, 0.00708, 0.00762, 0.00706, 0.00717, 0.00712, 0.0071, 0.00708, 0.00694, 0.00712, 0.00717, 0.00703, 0.00723, 0.00767, 0.007, 0.00705, 0.00716, 0.00837, 0.00992, 0.00743, 0.0076, 0.00795, 0.00785, 0.00774, 0.00828, 0.00864, 0.00714, 0.00767, 0.00727, 0.0089, 0.00821, 0.00781, 0.00855, 0.00777, 0.00721, 0.00716, 0.00875, 0.00792, 0.00919, 0.00807, 0.00884, 0.00881, 0.0088]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [4e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 3e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 1e-05, 2e-05, 2e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00055, 0.00031, 0.00031, 0.00031, 0.00035, 0.00031, 0.00031, 0.00031, 0.00032, 0.00032, 0.00032, 0.00031, 0.00032, 0.00031, 0.00031, 0.00031, 0.00034, 0.00031, 0.00031, 0.00031, 0.00036, 0.00031, 0.00031, 0.00031, 0.00035, 0.00032, 0.00035, 0.00032, 0.00031, 0.00034, 0.00036, 0.00032, 0.00033, 0.00033, 0.00032, 0.00032, 0.00036, 0.00036, 0.00036, 0.00036, 0.00031, 0.00034, 0.00036, 0.00031, 0.00032, 0.00032, 0.00031, 0.00032, 0.00032, 0.00032, 0.00033, 0.00032, 0.00032, 0.00031, 0.00032, 0.00032, 0.00032, 0.00032, 0.00036, 0.00032, 0.00031, 0.00032, 0.00036, 0.00032, 0.00032, 0.00036, 0.00036, 0.00031, 0.00032, 0.00031, 0.00032, 0.00031, 0.00032, 0.00031, 0.00035, 0.00032, 0.00032, 0.00031, 0.00033, 0.00032, 0.00032, 0.00031, 0.00031, 0.00036, 0.00032, 0.00031, 0.00032, 0.00033, 0.00036, 0.00031, 0.00037, 0.00032, 0.00035, 0.00032, 0.00031, 0.00035, 0.00036, 0.00032, 0.00031, 0.00032, 0.00036, 0.00031, 0.00032, 0.00036, 0.00031, 0.00034, 0.00031, 0.00032, 0.00032, 0.00031, 0.00036, 0.00032, 0.00036, 0.00031, 0.00037, 0.00032, 0.00037, 0.0004, 0.00031, 0.00032, 0.00035, 0.00031, 0.00032, 0.00031, 0.00031, 0.00031, 0.00032, 0.00031, 0.00033, 0.00032, 0.00031, 0.00032, 0.00032, 0.00032, 0.00032, 0.00031, 0.00036, 0.00031, 0.00031, 0.00033, 0.00036, 0.00031, 0.00032, 0.00032, 0.00032, 0.00036, 0.00031, 0.00035, 0.00032, 0.00039, 0.00033, 0.00032, 0.00031, 0.00035, 0.00032, 0.00031, 0.00032, 0.00035, 0.00031, 0.00032, 0.00031, 0.00032, 0.00031, 0.00031, 0.00034, 0.00036, 0.00036, 0.00031, 0.00032, 0.00032, 0.00031, 0.00035, 0.00036, 0.00032, 0.00031, 0.00032, 0.00032, 0.00036, 0.00033, 0.00035, 0.00031, 0.00031, 0.00031, 0.00032, 0.00036, 0.00037, 0.00031, 0.00032, 0.00031, 0.00032, 0.00031, 0.00032, 0.00032, 0.00036, 0.00037, 0.00031, 0.00032, 0.00033, 0.00032, 0.00032, 0.00045, 0.00031, 0.00031, 0.00038, 0.00032, 0.00036, 0.00034, 0.00031, 0.00032, 0.00036, 0.00032, 0.00031, 0.00036, 0.00031, 0.00031, 0.00031, 0.00036, 0.00031, 0.00032, 0.00032, 0.0004, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00031, 0.00037, 0.00031, 0.00031, 0.00032, 0.00031, 0.00032, 0.00032, 0.00036, 0.00032, 0.00035, 0.00032, 0.00036, 0.00038, 0.00036, 0.00036, 0.00032, 0.00036, 0.00033, 0.00032, 0.00032, 0.00031, 0.00036, 0.00031, 0.00033, 0.00033, 0.00032, 0.00037, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00031, 0.00032, 0.00031, 0.00032, 0.00032, 0.00032, 0.00031, 0.00032, 0.00032, 0.00032, 0.00037, 0.00032, 0.00031, 0.00032, 0.00032, 0.00036, 0.00032, 0.00033, 0.00031, 0.00032, 0.00031, 0.00032, 0.00031, 0.00032, 0.00032, 0.00032, 0.00033, 0.00032, 0.00036, 0.00032, 0.00032, 0.00037, 0.00031, 0.00031, 0.00031, 0.00032, 0.00032, 0.00032, 0.00031, 0.00031, 0.00037, 0.00035, 0.00036, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00031, 0.00036, 0.00032, 0.00031, 0.00032, 0.00036, 0.00032, 0.00032, 0.00032, 0.00036, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00031, 0.00032, 0.00033, 0.00038, 0.00034, 0.00036, 0.00032, 0.00033, 0.00032, 0.00032, 0.00035, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00035, 0.00032, 0.00032, 0.00031, 0.00032, 0.00036, 0.00036, 0.00032, 0.00032, 0.00032, 0.00036, 0.00032, 0.00032, 0.00031, 0.00036, 0.00032, 0.00036, 0.00033, 0.00031, 0.00031, 0.00032, 0.00032, 0.00032, 0.00032, 0.00032, 0.00036, 0.00035, 0.00031, 0.00032, 0.00036, 0.00032, 0.00033, 0.00036, 0.00032, 0.00032, 0.00032, 0.00031, 0.00032, 0.00033, 0.00032, 0.00031, 0.00032, 0.00035, 0.00032, 0.00032, 0.00035, 0.00032, 0.00035, 0.00032, 0.00037, 0.00032, 0.00031, 0.00037, 0.00032, 0.00035, 0.00031, 0.00036, 0.00032]}, "all-grads-sync-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [2.11402, 0.00057, 0.00063, 0.00057, 0.00058, 0.00057, 0.00058, 0.00058, 0.00057, 0.00063, 0.00057, 0.00058, 0.00058, 0.00057, 0.00057, 0.00058, 0.00059, 0.00058, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.00066, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00059, 0.00058, 0.00059, 0.00059, 0.00058, 0.00059, 0.0006, 0.00059, 0.00059, 0.00063, 0.00059, 0.00058, 0.00058, 0.00059, 0.00063, 0.00058, 0.00059, 0.00059, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00058, 0.00059, 0.0006, 0.00058, 0.00058, 0.00058, 0.00057, 0.0007, 0.00059, 0.00064, 0.00058, 0.00059, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00059, 0.00058, 0.00058, 0.00059, 0.00058, 0.00059, 0.00061, 0.00058, 0.00064, 0.00058, 0.00059, 0.00059, 0.00059, 0.00064, 0.00058, 0.00058, 0.00059, 0.00059, 0.00058, 0.00058, 0.00059, 0.00059, 0.00059, 0.00057, 0.00059, 0.0006, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00059, 0.00058, 0.00065, 0.00058, 0.00059, 0.00058, 0.00064, 0.00059, 0.00059, 0.00059, 0.00062, 0.00059, 0.00064, 0.00059, 0.00059, 0.00059, 0.0006, 0.00058, 0.00058, 0.00059, 0.00059, 0.00059, 0.00059, 0.00059, 0.00058, 0.00059, 0.00059, 0.00058, 0.00064, 0.00065, 0.00059, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00059, 0.00059, 0.00059, 0.00059, 0.00058, 0.00058, 0.00061, 0.0006, 0.00067, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00058, 0.00059, 0.00057, 0.00059, 0.00059, 0.00061, 0.00059, 0.0006, 0.00064, 0.00058, 0.00059, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.0006, 0.00058, 0.00059, 0.0006, 0.00059, 0.00059, 0.00057, 0.00058, 0.00058, 0.00058, 0.0006, 0.0006, 0.00058, 0.00059, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00059, 0.00064, 0.00059, 0.00059, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00059, 0.00062, 0.00058, 0.00059, 0.00059, 0.00059, 0.00058, 0.00063, 0.00058, 0.00058, 0.00059, 0.00058, 0.00058, 0.00064, 0.0006, 0.00059, 0.00058, 0.00059, 0.00058, 0.00058, 0.00059, 0.00058, 0.0006, 0.00064, 0.00058, 0.00058, 0.0006, 0.0006, 0.00057, 0.00058, 0.00059, 0.00059, 0.00059, 0.00062, 0.00059, 0.00058, 0.00059, 0.00059, 0.00059, 0.00059, 0.00059, 0.0006, 0.00059, 0.00059, 0.00058, 0.00058, 0.00064, 0.00059, 0.00064, 0.00058, 0.00058, 0.00059, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00065, 0.0006, 0.00057, 0.00059, 0.00059, 0.00058, 0.00058, 0.00058, 0.0006, 0.00058, 0.00058, 0.00059, 0.00059, 0.00057, 0.00058, 0.00057, 0.00064, 0.00057, 0.00058, 0.00068, 0.00058, 0.00058, 0.00058, 0.00059, 0.00059, 0.00059, 0.00058, 0.00058, 0.00057, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00058, 0.00057, 0.00059, 0.00062, 0.00058, 0.00059, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.0006, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00059, 0.00058, 0.00057, 0.00059, 0.00058, 0.00059, 0.00058, 0.00058, 0.00057, 0.00059, 0.00058, 0.00059, 0.00059, 0.0006, 0.00058, 0.00058, 0.00059, 0.00058, 0.00071, 0.00058, 0.00064, 0.00059, 0.00059, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00058, 0.00059, 0.00058, 0.00063, 0.00059, 0.00058, 0.00058, 0.00057, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.00059, 0.00058, 0.00058, 0.00065, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.00059, 0.00058, 0.00059, 0.00058, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00059, 0.00057, 0.00058, 0.00058, 0.00059, 0.00059, 0.00069, 0.00058, 0.0006, 0.00058, 0.00058, 0.00057, 0.00058, 0.00057, 0.00059, 0.00058, 0.00058]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00021, 0.00012, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00014, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00012, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00014, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00016, 0.00014, 0.00014, 0.00014, 0.0002, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00015, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00013, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00015, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00013, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [4.22691, 0.00055, 0.00056, 0.00056, 0.00056, 0.00057, 0.00056, 0.00056, 0.00055, 0.00056, 0.00056, 0.00056, 0.00056, 0.00055, 0.00057, 0.00057, 0.00056, 0.00056, 0.00054, 0.00056, 0.00056, 0.00055, 0.00055, 0.00056, 0.00056, 0.00055, 0.00061, 0.00058, 0.00058, 0.00056, 0.00056, 0.00056, 0.00057, 0.00061, 0.00059, 0.00057, 0.00058, 0.00056, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00056, 0.00058, 0.00058, 0.00059, 0.00057, 0.00059, 0.00057, 0.00058, 0.00058, 0.00059, 0.00058, 0.00059, 0.00058, 0.0006, 0.00057, 0.00058, 0.00058, 0.00056, 0.00058, 0.00059, 0.00059, 0.00058, 0.00059, 0.00058, 0.00057, 0.0006, 0.00061, 0.00058, 0.00059, 0.00058, 0.00057, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00058, 0.00056, 0.00057, 0.00058, 0.00059, 0.00058, 0.00057, 0.00057, 0.00058, 0.00057, 0.00058, 0.00058, 0.00056, 0.00057, 0.00049, 0.00057, 0.00057, 0.00057, 0.00048, 0.00057, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00058, 0.00048, 0.00048, 0.0005, 0.00058, 0.0006, 0.00058, 0.00058, 0.00059, 0.00056, 0.00058, 0.00058, 0.00058, 0.00059, 0.00057, 0.00058, 0.00057, 0.00058, 0.00057, 0.00073, 0.00058, 0.00058, 0.00057, 0.00058, 0.00058, 0.00059, 0.00058, 0.00058, 0.00046, 0.00058, 0.00057, 0.00059, 0.00058, 0.00057, 0.00048, 0.00058, 0.00058, 0.00059, 0.00059, 0.00059, 0.00058, 0.00057, 0.00059, 0.00058, 0.00057, 0.00057, 0.00058, 0.00056, 0.00058, 0.00058, 0.00058, 0.00057, 0.00047, 0.00047, 0.00067, 0.00057, 0.00058, 0.00059, 0.00057, 0.00058, 0.00066, 0.00058, 0.00058, 0.00059, 0.00048, 0.00059, 0.00059, 0.00059, 0.00057, 0.00062, 0.00058, 0.00057, 0.00057, 0.00057, 0.00058, 0.0006, 0.00057, 0.00057, 0.00058, 0.00058, 0.00057, 0.00058, 0.00059, 0.00058, 0.00059, 0.00058, 0.0006, 0.00058, 0.00058, 0.00058, 0.00064, 0.00057, 0.00058, 0.00059, 0.00059, 0.00058, 0.00059, 0.00059, 0.00058, 0.00057, 0.00057, 0.0006, 0.00058, 0.00057, 0.00058, 0.00059, 0.00059, 0.0006, 0.00059, 0.00059, 0.00059, 0.00059, 0.00059, 0.00058, 0.0006, 0.00058, 0.00061, 0.00059, 0.00057, 0.00056, 0.00058, 0.00059, 0.00059, 0.00059, 0.00059, 0.00058, 0.00059, 0.00063, 0.0006, 0.00059, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.00058, 0.00061, 0.00059, 0.0006, 0.00058, 0.0006, 0.0006, 0.00059, 0.00059, 0.00059, 0.00058, 0.00058, 0.00059, 0.00058, 0.00057, 0.0006, 0.0006, 0.00059, 0.00059, 0.00059, 0.0006, 0.00059, 0.0006, 0.00059, 0.00058, 0.00058, 0.00057, 0.00058, 0.00061, 0.00058, 0.00061, 0.00058, 0.00058, 0.00057, 0.00057, 0.00059, 0.00058, 0.00057, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00058, 0.0006, 0.00058, 0.0006, 0.00057, 0.0006, 0.00059, 0.00059, 0.00059, 0.0006, 0.0006, 0.00059, 0.00058, 0.0006, 0.00058, 0.0006, 0.0006, 0.00061, 0.00058, 0.00058, 0.00059, 0.00058, 0.00058, 0.00059, 0.00058, 0.00061, 0.00062, 0.00062, 0.00058, 0.00057, 0.00058, 0.0006, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00058, 0.00059, 0.00059, 0.00059, 0.00059, 0.00059, 0.00058, 0.00058, 0.00058, 0.00059, 0.00059, 0.00059, 0.00058, 0.00059, 0.00058, 0.00058, 0.00063, 0.0006, 0.00059, 0.00062, 0.00058, 0.00059, 0.00058, 0.00059, 0.00059, 0.00058, 0.00058, 0.00058, 0.00063, 0.00059, 0.00056, 0.00058, 0.00058, 0.00056, 0.00057, 0.00059, 0.00059, 0.00059, 0.00059, 0.00058, 0.00059, 0.00058, 0.00058, 0.00059, 0.00058, 0.00058, 0.00058, 0.0006, 0.00059, 0.00058, 0.00058, 0.00058, 0.00059, 0.0006, 0.00058, 0.0006, 0.00058, 0.00059, 0.00058, 0.00057, 0.00057, 0.0006, 0.00064, 0.00059, 0.00061, 0.00058, 0.00058, 0.0006, 0.00058, 0.0006, 0.00067, 0.00057, 0.00058, 0.0006, 0.00059]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00354, 0.00262, 0.00261, 0.00266, 0.0026, 0.0026, 0.0026, 0.00261, 0.00259, 0.00259, 0.00261, 0.00261, 0.00261, 0.00262, 0.00262, 0.0026, 0.0026, 0.00258, 0.00264, 0.00259, 0.00269, 0.00267, 0.00262, 0.00291, 0.00262, 0.00271, 0.00259, 0.00259, 0.0026, 0.00261, 0.00261, 0.0026, 0.0026, 0.00257, 0.00262, 0.00261, 0.00262, 0.00265, 0.0026, 0.00261, 0.00261, 0.00259, 0.0026, 0.00265, 0.00262, 0.00261, 0.00265, 0.00258, 0.0026, 0.00263, 0.00261, 0.0026, 0.0026, 0.00258, 0.00258, 0.0026, 0.00261, 0.0026, 0.00261, 0.00261, 0.00263, 0.00259, 0.00262, 0.0026, 0.00261, 0.00258, 0.00261, 0.0026, 0.00267, 0.00261, 0.00258, 0.00265, 0.00259, 0.00261, 0.00258, 0.00258, 0.00261, 0.00261, 0.00261, 0.00259, 0.00258, 0.00262, 0.00261, 0.00261, 0.00261, 0.00259, 0.00262, 0.0026, 0.0026, 0.00259, 0.0026, 0.00261, 0.0026, 0.00261, 0.0026, 0.00272, 0.00259, 0.00262, 0.00257, 0.0026, 0.00261, 0.00259, 0.00263, 0.00259, 0.00261, 0.00261, 0.00267, 0.00258, 0.0026, 0.00259, 0.00262, 0.00259, 0.00259, 0.00481, 0.00261, 0.00259, 0.00263, 0.0029, 0.00259, 0.00261, 0.00263, 0.0026, 0.0026, 0.00261, 0.00261, 0.00262, 0.00261, 0.00259, 0.0026, 0.00308, 0.00357, 0.00364, 0.0026, 0.00259, 0.00266, 0.00258, 0.0026, 0.00264, 0.00261, 0.0026, 0.0026, 0.0026, 0.00261, 0.00261, 0.0026, 0.00258, 0.00262, 0.00262, 0.00264, 0.00258, 0.00262, 0.0026, 0.00259, 0.00268, 0.0026, 0.00263, 0.00257, 0.0026, 0.00259, 0.00262, 0.00262, 0.00261, 0.00261, 0.00261, 0.0026, 0.0026, 0.00261, 0.0026, 0.00266, 0.00266, 0.00264, 0.0027, 0.00268, 0.00266, 0.00266, 0.00267, 0.00263, 0.00266, 0.00264, 0.00459, 0.00266, 0.00266, 0.00267, 0.00266, 0.00265, 0.00269, 0.00266, 0.00267, 0.00272, 0.00267, 0.00265, 0.00272, 0.00266, 0.00266, 0.0027, 0.00266, 0.00265, 0.00269, 0.00265, 0.00265, 0.00265, 0.00268, 0.00265, 0.00266, 0.00266, 0.00267, 0.00266, 0.00265, 0.00267, 0.00266, 0.0027, 0.00266, 0.00264, 0.00266, 0.00264, 0.00266, 0.00265, 0.00265, 0.00266, 0.00268, 0.00268, 0.00266, 0.00266, 0.00266, 0.00264, 0.00265, 0.00269, 0.00267, 0.00267, 0.00269, 0.00266, 0.00266, 0.00266, 0.00266, 0.00265, 0.00268, 0.0027, 0.00351, 0.00265, 0.00266, 0.00267, 0.00267, 0.00265, 0.00267, 0.00265, 0.00267, 0.00266, 0.00266, 0.00275, 0.00266, 0.00264, 0.00265, 0.00266, 0.0027, 0.00287, 0.00267, 0.00306, 0.00267, 0.00265, 0.00268, 0.00266, 0.00266, 0.00265, 0.00265, 0.00265, 0.00266, 0.00271, 0.00266, 0.00266, 0.00267, 0.00267, 0.00273, 0.00267, 0.00267, 0.00264, 0.00267, 0.00266, 0.00264, 0.00267, 0.00267, 0.00266, 0.00267, 0.00266, 0.00263, 0.00266, 0.00268, 0.00265, 0.00266, 0.00266, 0.00267, 0.00267, 0.00265, 0.00268, 0.00266, 0.00267, 0.00272, 0.00264, 0.00266, 0.00266, 0.00265, 0.00277, 0.00266, 0.00269, 0.00264, 0.00265, 0.00266, 0.00259, 0.00259, 0.0026, 0.00261, 0.0026, 0.00262, 0.0026, 0.00261, 0.00261, 0.00261, 0.00261, 0.00272, 0.00262, 0.00323, 0.0026, 0.00261, 0.00262, 0.00269, 0.00259, 0.00261, 0.00261, 0.00261, 0.00261, 0.0026, 0.00259, 0.00258, 0.0026, 0.00262, 0.00261, 0.00261, 0.00262, 0.0026, 0.0026, 0.00264, 0.00259, 0.00285, 0.0026, 0.00259, 0.00259, 0.0026, 0.00258, 0.00261, 0.00261, 0.00259, 0.0026, 0.00261, 0.0026, 0.00273, 0.0026, 0.00258, 0.00261, 0.0026, 0.00259, 0.0026, 0.00259, 0.00259, 0.00261, 0.00266, 0.00266, 0.00265, 0.00269, 0.00269, 0.00266, 0.00266, 0.00266, 0.00264, 0.00266, 0.00267, 0.00265, 0.00273, 0.00265, 0.00265, 0.0027, 0.00266, 0.00274, 0.00267, 0.00267, 0.00267, 0.00266, 0.00266, 0.00266, 0.00299, 0.00266, 0.00268, 0.00265, 0.00267, 0.00265, 0.00268, 0.00265, 0.00266, 0.00267, 0.00267, 0.00271, 0.00267]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00249, 0.00044, 0.00044, 0.00045, 0.00045, 0.00044, 0.00045, 0.00045, 0.00045, 0.00044, 0.00045, 0.00045, 0.00044, 0.00044, 0.00044, 0.00044, 0.00045, 0.00044, 0.00044, 0.00045, 0.00044, 0.00044, 0.00045, 0.00046, 0.00044, 0.00045, 0.00045, 0.00045, 0.00045, 0.00045, 0.00044, 0.00045, 0.00045, 0.00047, 0.00045, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00045, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00045, 0.00045, 0.00047, 0.00046, 0.00046, 0.00045, 0.00045, 0.00046, 0.00046, 0.00045, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00048, 0.00056, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00047, 0.00046, 0.00049, 0.00051, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00047, 0.00047, 0.00045, 0.00047, 0.00045, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00047, 0.00047, 0.00046, 0.00047, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00047, 0.00049, 0.00048, 0.00046, 0.00045, 0.00046, 0.00046, 0.00045, 0.00046, 0.00045, 0.00045, 0.00047, 0.00045, 0.00045, 0.00046, 0.00046, 0.00045, 0.00047, 0.00046, 0.00047, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00047, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00045, 0.00047, 0.00047, 0.00046, 0.00047, 0.00046, 0.00047, 0.00047, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00045, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00048, 0.00046, 0.00046, 0.00047, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00047, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00045, 0.00045, 0.00046, 0.00045, 0.00046, 0.0005, 0.00046, 0.00046, 0.00047, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00047, 0.00046, 0.00047, 0.00046, 0.00047, 0.00047, 0.00046, 0.00046, 0.00045, 0.00045, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00045, 0.00046, 0.00047, 0.00045, 0.00046, 0.00046, 0.00046, 0.00047, 0.00047, 0.00047, 0.00046, 0.00057, 0.00046, 0.00046, 0.00047, 0.00045, 0.00046, 0.00045, 0.00045, 0.00045, 0.00047, 0.00047, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00045, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00046, 0.00047, 0.00044, 0.00046, 0.00046, 0.00047, 0.00046, 0.00045, 0.00045, 0.00045, 0.00046, 0.00047, 0.00046, 0.00047, 0.00046, 0.00047, 0.00047, 0.00045, 0.00045, 0.00045, 0.00045, 0.00047, 0.00046, 0.00046]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.00056, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00048, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00048, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00051, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00048, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00051, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00069, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00051, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00053, 0.00064, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00051, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00052, 0.00049, 0.00049, 0.00051, 0.00049, 0.0005, 0.00051, 0.00049, 0.00049, 0.00053, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00051, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.0005, 0.00049, 0.00049, 0.0005, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00051, 0.00049, 0.00049, 0.00059, 0.00051, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.0005, 0.00049, 0.00049, 0.00068, 0.0005, 0.00049, 0.00049, 0.00049, 0.00077, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00062, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.0005, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.0005, 0.0005, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.0005, 0.00049, 0.0005, 0.00049, 0.0005, 0.00064, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00061, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00052, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.0005, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049, 0.00049, 0.0005, 0.00049, 0.00049, 0.00049]}, "optimizer-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [4.23567, 0.00458, 0.00457, 0.00463, 0.00456, 0.00458, 0.00456, 0.00457, 0.00457, 0.00456, 0.00457, 0.00457, 0.00457, 0.00456, 0.00459, 0.00457, 0.00455, 0.00458, 0.00456, 0.00456, 0.00465, 0.00463, 0.00457, 0.005, 0.00457, 0.00468, 0.0046, 0.00458, 0.00461, 0.0046, 0.00456, 0.00456, 0.00462, 0.00463, 0.00464, 0.0046, 0.00464, 0.00464, 0.00461, 0.00462, 0.00462, 0.00459, 0.00465, 0.00464, 0.00462, 0.00462, 0.00467, 0.00457, 0.00462, 0.00465, 0.00462, 0.00462, 0.00473, 0.00459, 0.0046, 0.00464, 0.00463, 0.00458, 0.00462, 0.00462, 0.00462, 0.00459, 0.00465, 0.00461, 0.00463, 0.00459, 0.0046, 0.00462, 0.00469, 0.00466, 0.00461, 0.00468, 0.0046, 0.00461, 0.0046, 0.00464, 0.00463, 0.00465, 0.00465, 0.00462, 0.00459, 0.00459, 0.00461, 0.00461, 0.00462, 0.00461, 0.00463, 0.00459, 0.00461, 0.00458, 0.00461, 0.00463, 0.00459, 0.0046, 0.00456, 0.00476, 0.00459, 0.00465, 0.00449, 0.00462, 0.00463, 0.0046, 0.00465, 0.0046, 0.00462, 0.00462, 0.00468, 0.00461, 0.00462, 0.00462, 0.00464, 0.0045, 0.00453, 0.00715, 0.00463, 0.00463, 0.00466, 0.00492, 0.00461, 0.00459, 0.00464, 0.00466, 0.00461, 0.00462, 0.00461, 0.00464, 0.00462, 0.00461, 0.0046, 0.00561, 0.00589, 0.00578, 0.0046, 0.0046, 0.00467, 0.0046, 0.00462, 0.00468, 0.00449, 0.00462, 0.00461, 0.00464, 0.00463, 0.00464, 0.0045, 0.0046, 0.00464, 0.00464, 0.00466, 0.00463, 0.00464, 0.00464, 0.00462, 0.00469, 0.00461, 0.00467, 0.00459, 0.00458, 0.00465, 0.00466, 0.00462, 0.00464, 0.00454, 0.00452, 0.00487, 0.00461, 0.00461, 0.00463, 0.00466, 0.00467, 0.00477, 0.00473, 0.00469, 0.00473, 0.00459, 0.00473, 0.00467, 0.00467, 0.00466, 0.0068, 0.00467, 0.00466, 0.00467, 0.00465, 0.00466, 0.00472, 0.00467, 0.00466, 0.00474, 0.00468, 0.00464, 0.00474, 0.00468, 0.00473, 0.00472, 0.00468, 0.0047, 0.00472, 0.00465, 0.00466, 0.00496, 0.00468, 0.00467, 0.00471, 0.0047, 0.00468, 0.00472, 0.00467, 0.00467, 0.00466, 0.00472, 0.00469, 0.00466, 0.00464, 0.00467, 0.00469, 0.00466, 0.00468, 0.00469, 0.00474, 0.00473, 0.00468, 0.0047, 0.00468, 0.00467, 0.00469, 0.00477, 0.00469, 0.00464, 0.00465, 0.0047, 0.0047, 0.00469, 0.00468, 0.00472, 0.00469, 0.00472, 0.00563, 0.00469, 0.00469, 0.00469, 0.0047, 0.00467, 0.0047, 0.00467, 0.00467, 0.00472, 0.00469, 0.00478, 0.00471, 0.00475, 0.00469, 0.00469, 0.00472, 0.00495, 0.00468, 0.0051, 0.00473, 0.0047, 0.00468, 0.00485, 0.00471, 0.00466, 0.0047, 0.00468, 0.00471, 0.00473, 0.00471, 0.0047, 0.00469, 0.00469, 0.00472, 0.00468, 0.00471, 0.00464, 0.00469, 0.00465, 0.00469, 0.00468, 0.00465, 0.00471, 0.00469, 0.0047, 0.00498, 0.00469, 0.00468, 0.00467, 0.00468, 0.00506, 0.0047, 0.00468, 0.00467, 0.00466, 0.00468, 0.0047, 0.00474, 0.00468, 0.00469, 0.0047, 0.00467, 0.00478, 0.00468, 0.00471, 0.0047, 0.00469, 0.00471, 0.00461, 0.00466, 0.00461, 0.00462, 0.0046, 0.00465, 0.00463, 0.00465, 0.00465, 0.00468, 0.00461, 0.00471, 0.00465, 0.00542, 0.00464, 0.00463, 0.00463, 0.00472, 0.0046, 0.00464, 0.00463, 0.0048, 0.00465, 0.00463, 0.00461, 0.00463, 0.0046, 0.00463, 0.00465, 0.00464, 0.00463, 0.00463, 0.00465, 0.00469, 0.00459, 0.00495, 0.00468, 0.00461, 0.00465, 0.00461, 0.00464, 0.00464, 0.00466, 0.00462, 0.00464, 0.00508, 0.00461, 0.0048, 0.00463, 0.00454, 0.00463, 0.00461, 0.00456, 0.0046, 0.00466, 0.00462, 0.00465, 0.00468, 0.00486, 0.00469, 0.00471, 0.00469, 0.00468, 0.00468, 0.00467, 0.00468, 0.00468, 0.00471, 0.00469, 0.00474, 0.00469, 0.00467, 0.00472, 0.00467, 0.00477, 0.00472, 0.00471, 0.00468, 0.00467, 0.00465, 0.00469, 0.00513, 0.00471, 0.00489, 0.00466, 0.00469, 0.00468, 0.00474, 0.00467, 0.00475, 0.00467, 0.00469, 0.00476, 0.0047]}, "learning-rate": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 1e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 6e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 7e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05, 9e-05]}, "batch-size": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0]}, "lm loss": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [10.84424, 10.87342, 10.85055, 10.81078, 10.64469, 10.6386, 10.4283, 10.13518, 9.93546, 9.83538, 9.5857, 9.84804, 9.88588, 9.63127, 9.79022, 9.5114, 9.4597, 9.65546, 9.38988, 9.33928, 9.24947, 9.15126, 9.18199, 9.00445, 9.19836, 9.06663, 9.16101, 9.1698, 9.30057, 8.98927, 8.92967, 9.05035, 9.04657, 8.66029, 8.72527, 8.75664, 8.69468, 8.74328, 8.66681, 8.77286, 8.67044, 8.86119, 8.84295, 8.50873, 8.39852, 8.43801, 8.49532, 8.39321, 8.44017, 8.59221, 8.37564, 8.19958, 8.2329, 8.22974, 8.27495, 7.92044, 8.0993, 7.89755, 8.2517, 8.23397, 8.00952, 7.97507, 7.92567, 7.74377, 7.74735, 7.64935, 7.51967, 7.91031, 7.70174, 7.45536, 7.74632, 7.77446, 7.54372, 7.30243, 7.45569, 7.34305, 7.4658, 7.22841, 7.63683, 7.28242, 7.34884, 7.21343, 7.21124, 7.41956, 7.17365, 7.2819, 6.99462, 7.00325, 7.04012, 7.13712, 6.82214, 6.98588, 7.08949, 6.99872, 6.87479, 6.75655, 6.99059, 7.06011, 6.70413, 6.58421, 6.72746, 6.74527, 6.73409, 6.73823, 6.65852, 6.40615, 6.63686, 6.6194, 6.44648, 6.62844, 6.74357, 6.61132, 6.72657, 6.69405, 6.62733, 6.50769, 6.59795, 6.40666, 6.66519, 6.24881, 6.25106, 6.30401, 6.39198, 6.34989, 6.45173, 6.29422, 6.33969, 6.23719, 6.20153, 6.39655, 6.32455, 6.32086, 6.16315, 6.15667, 6.23617, 6.38123, 6.19858, 6.14609, 6.17459, 6.11003, 6.05359, 6.06531, 6.24848, 6.39923, 6.24762, 6.28436, 6.08885, 6.1659, 5.99117, 6.01964, 5.94446, 6.23937, 6.17942, 5.95871, 5.7764, 6.11339, 5.84425, 6.10156, 5.77953, 6.15415, 6.13822, 6.07746, 5.92004, 6.10968, 5.93741, 6.19122, 5.88685, 5.78306, 5.77148, 5.68041, 6.00813, 5.99187, 6.05986, 5.88016, 6.03137, 5.96131, 5.99374, 5.98716, 5.94573, 5.83722, 5.94198, 5.61328, 5.69729, 5.88553, 5.83625, 5.85543, 5.75718, 5.83246, 5.71985, 5.55522, 5.71497, 5.61505, 5.82338, 5.59492, 5.70181, 5.69956, 5.89291, 5.6334, 5.84186, 5.73328, 5.86061, 5.32413, 5.89063, 5.86923, 5.84806, 5.40969, 5.40238, 5.62094, 5.5916, 5.47979, 5.57337, 5.67122, 5.47407, 5.73944, 5.51167, 5.59101, 5.62347, 5.61736, 5.50921, 5.61182, 5.67274, 5.68001, 5.58479, 5.65971, 5.37206, 5.67757, 5.62674, 5.42131, 5.58249, 5.62904, 5.55375, 5.34106, 5.53431, 5.48176, 5.48104, 5.38026, 5.55107, 5.59981, 5.38504, 5.51817, 5.48713, 5.33135, 5.50212, 5.40894, 5.44244, 5.31335, 5.06368, 5.47625, 5.56822, 5.71202, 5.40926, 5.59783, 5.63205, 5.23113, 5.2684, 5.39256, 5.39509, 5.32651, 5.49543, 5.18174, 5.2944, 5.24351, 5.3743, 5.25187, 5.4403, 5.53394, 5.30526, 5.42762, 5.33573, 5.07536, 5.30828, 5.24915, 5.30097, 5.10794, 5.27462, 5.25882, 5.46931, 5.15605, 5.26147, 5.20567, 5.34991, 4.9789, 4.90972, 5.32269, 5.39016, 5.22419, 5.31593, 5.10145, 5.16054, 5.25953, 5.0667, 5.26007, 5.06659, 5.33924, 5.2437, 5.14669, 5.24181, 5.03908, 5.31189, 5.0508, 5.02718, 5.13824, 5.11134, 5.26999, 5.14813, 5.27491, 5.09204, 5.0944, 5.24441, 5.32532, 5.25266, 5.18964, 5.14218, 5.28959, 4.95048, 5.2045, 5.09444, 5.30302, 5.17003, 5.18518, 5.11668, 4.98204, 4.99495, 5.222, 5.30847, 5.098, 5.05553, 4.91636, 5.12137, 5.11611, 4.9291, 5.33462, 5.02406, 5.09871, 5.16424, 5.00257, 5.06588, 5.06465, 4.99336, 5.07822, 5.15996, 4.97519, 5.18105, 4.9261, 4.91748, 5.06072, 4.99116, 4.90494, 4.77574, 4.94081, 5.11232, 5.01149, 5.01672, 5.32706, 4.95549, 4.99178, 5.04351, 4.80691, 4.73281, 4.99471, 5.04386, 4.87342, 4.9541, 5.04639, 5.02142, 4.81154, 4.89155, 4.90243, 4.82954, 4.73696, 5.00591, 4.75497, 5.20346, 4.791, 4.99509, 4.73426, 4.7815, 4.81632, 4.64705, 4.65335, 4.84192, 4.80637, 4.79718, 4.91906, 4.87982, 4.9259, 4.76993, 4.87999, 4.73114, 4.91345, 4.95513, 4.87047, 4.70341, 4.77964, 4.89818, 4.70591, 4.85482, 4.68983, 4.68887, 4.64189]}, "lm loss vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [10.84424, 10.87342, 10.85055, 10.81078, 10.64469, 10.6386, 10.4283, 10.13518, 9.93546, 9.83538, 9.5857, 9.84804, 9.88588, 9.63127, 9.79022, 9.5114, 9.4597, 9.65546, 9.38988, 9.33928, 9.24947, 9.15126, 9.18199, 9.00445, 9.19836, 9.06663, 9.16101, 9.1698, 9.30057, 8.98927, 8.92967, 9.05035, 9.04657, 8.66029, 8.72527, 8.75664, 8.69468, 8.74328, 8.66681, 8.77286, 8.67044, 8.86119, 8.84295, 8.50873, 8.39852, 8.43801, 8.49532, 8.39321, 8.44017, 8.59221, 8.37564, 8.19958, 8.2329, 8.22974, 8.27495, 7.92044, 8.0993, 7.89755, 8.2517, 8.23397, 8.00952, 7.97507, 7.92567, 7.74377, 7.74735, 7.64935, 7.51967, 7.91031, 7.70174, 7.45536, 7.74632, 7.77446, 7.54372, 7.30243, 7.45569, 7.34305, 7.4658, 7.22841, 7.63683, 7.28242, 7.34884, 7.21343, 7.21124, 7.41956, 7.17365, 7.2819, 6.99462, 7.00325, 7.04012, 7.13712, 6.82214, 6.98588, 7.08949, 6.99872, 6.87479, 6.75655, 6.99059, 7.06011, 6.70413, 6.58421, 6.72746, 6.74527, 6.73409, 6.73823, 6.65852, 6.40615, 6.63686, 6.6194, 6.44648, 6.62844, 6.74357, 6.61132, 6.72657, 6.69405, 6.62733, 6.50769, 6.59795, 6.40666, 6.66519, 6.24881, 6.25106, 6.30401, 6.39198, 6.34989, 6.45173, 6.29422, 6.33969, 6.23719, 6.20153, 6.39655, 6.32455, 6.32086, 6.16315, 6.15667, 6.23617, 6.38123, 6.19858, 6.14609, 6.17459, 6.11003, 6.05359, 6.06531, 6.24848, 6.39923, 6.24762, 6.28436, 6.08885, 6.1659, 5.99117, 6.01964, 5.94446, 6.23937, 6.17942, 5.95871, 5.7764, 6.11339, 5.84425, 6.10156, 5.77953, 6.15415, 6.13822, 6.07746, 5.92004, 6.10968, 5.93741, 6.19122, 5.88685, 5.78306, 5.77148, 5.68041, 6.00813, 5.99187, 6.05986, 5.88016, 6.03137, 5.96131, 5.99374, 5.98716, 5.94573, 5.83722, 5.94198, 5.61328, 5.69729, 5.88553, 5.83625, 5.85543, 5.75718, 5.83246, 5.71985, 5.55522, 5.71497, 5.61505, 5.82338, 5.59492, 5.70181, 5.69956, 5.89291, 5.6334, 5.84186, 5.73328, 5.86061, 5.32413, 5.89063, 5.86923, 5.84806, 5.40969, 5.40238, 5.62094, 5.5916, 5.47979, 5.57337, 5.67122, 5.47407, 5.73944, 5.51167, 5.59101, 5.62347, 5.61736, 5.50921, 5.61182, 5.67274, 5.68001, 5.58479, 5.65971, 5.37206, 5.67757, 5.62674, 5.42131, 5.58249, 5.62904, 5.55375, 5.34106, 5.53431, 5.48176, 5.48104, 5.38026, 5.55107, 5.59981, 5.38504, 5.51817, 5.48713, 5.33135, 5.50212, 5.40894, 5.44244, 5.31335, 5.06368, 5.47625, 5.56822, 5.71202, 5.40926, 5.59783, 5.63205, 5.23113, 5.2684, 5.39256, 5.39509, 5.32651, 5.49543, 5.18174, 5.2944, 5.24351, 5.3743, 5.25187, 5.4403, 5.53394, 5.30526, 5.42762, 5.33573, 5.07536, 5.30828, 5.24915, 5.30097, 5.10794, 5.27462, 5.25882, 5.46931, 5.15605, 5.26147, 5.20567, 5.34991, 4.9789, 4.90972, 5.32269, 5.39016, 5.22419, 5.31593, 5.10145, 5.16054, 5.25953, 5.0667, 5.26007, 5.06659, 5.33924, 5.2437, 5.14669, 5.24181, 5.03908, 5.31189, 5.0508, 5.02718, 5.13824, 5.11134, 5.26999, 5.14813, 5.27491, 5.09204, 5.0944, 5.24441, 5.32532, 5.25266, 5.18964, 5.14218, 5.28959, 4.95048, 5.2045, 5.09444, 5.30302, 5.17003, 5.18518, 5.11668, 4.98204, 4.99495, 5.222, 5.30847, 5.098, 5.05553, 4.91636, 5.12137, 5.11611, 4.9291, 5.33462, 5.02406, 5.09871, 5.16424, 5.00257, 5.06588, 5.06465, 4.99336, 5.07822, 5.15996, 4.97519, 5.18105, 4.9261, 4.91748, 5.06072, 4.99116, 4.90494, 4.77574, 4.94081, 5.11232, 5.01149, 5.01672, 5.32706, 4.95549, 4.99178, 5.04351, 4.80691, 4.73281, 4.99471, 5.04386, 4.87342, 4.9541, 5.04639, 5.02142, 4.81154, 4.89155, 4.90243, 4.82954, 4.73696, 5.00591, 4.75497, 5.20346, 4.791, 4.99509, 4.73426, 4.7815, 4.81632, 4.64705, 4.65335, 4.84192, 4.80637, 4.79718, 4.91906, 4.87982, 4.9259, 4.76993, 4.87999, 4.73114, 4.91345, 4.95513, 4.87047, 4.70341, 4.77964, 4.89818, 4.70591, 4.85482, 4.68983, 4.68887, 4.64189]}, "loss-scale": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [13.93626, 13.32689, 13.8137, 12.62172, 11.96992, 9.43513, 6.80799, 6.88665, 5.95498, 4.54619, 4.13053, 2.82596, 2.39543, 2.34537, 2.05773, 2.21996, 2.14537, 1.88392, 2.17069, 2.06105, 2.12373, 2.16615, 2.00976, 2.20876, 1.97308, 2.09194, 1.90863, 1.88776, 1.95054, 2.15308, 2.08778, 2.10616, 1.95646, 2.17094, 2.31724, 2.02642, 2.04764, 1.84545, 1.93704, 1.75657, 2.13069, 1.75993, 1.70876, 1.86665, 1.92331, 1.79127, 1.74297, 1.74426, 1.75161, 1.53485, 1.75292, 1.73299, 1.79809, 1.83477, 1.59059, 1.79085, 1.74313, 1.81505, 1.54888, 1.47615, 1.68285, 1.4812, 1.79315, 1.92171, 1.63149, 1.63813, 1.6586, 1.59744, 1.47545, 1.65909, 1.42464, 1.41939, 1.49901, 1.42049, 1.40172, 1.46225, 1.44185, 1.3706, 1.36838, 1.26055, 1.34627, 1.29904, 1.25687, 1.20642, 1.27731, 1.27576, 1.4537, 1.34738, 1.41703, 1.10279, 1.09805, 1.25584, 1.13228, 1.20775, 0.93229, 1.32305, 1.10083, 1.31134, 0.99675, 1.32116, 1.31807, 1.20377, 1.14298, 1.25982, 1.11587, 1.06268, 1.1383, 1.13456, 1.18344, 1.01042, 1.19822, 0.96542, 0.98282, 0.98083, 1.21915, 1.08304, 1.00478, 1.26788, 1.10619, 1.30807, 1.1248, 1.36119, 1.37901, 1.4392, 1.56444, 1.29037, 1.19911, 1.00927, 1.14759, 1.2293, 1.07062, 1.374, 1.0323, 1.06393, 1.18259, 1.20195, 1.16586, 1.44753, 0.94529, 1.13538, 1.05269, 1.34467, 1.18959, 1.01819, 0.86119, 1.06946, 1.34129, 1.684, 1.13519, 1.32985, 1.38775, 1.34761, 1.74434, 1.43622, 1.39335, 1.37538, 1.86703, 2.00418, 1.35288, 1.23486, 1.3698, 1.32764, 0.9773, 0.96112, 1.19304, 1.38421, 1.30281, 1.24815, 1.29487, 1.60508, 1.50397, 1.88527, 1.44501, 1.35752, 0.94887, 1.377, 2.16776, 1.36769, 1.5918, 1.53974, 1.46219, 1.57752, 1.18503, 1.28159, 1.42022, 1.06676, 1.57312, 1.38623, 1.21566, 1.67634, 1.0445, 1.27733, 1.33704, 1.42129, 1.46397, 1.28187, 1.4299, 1.30773, 1.5098, 1.44392, 1.45291, 1.64364, 1.49176, 1.37459, 1.51541, 1.63213, 1.48678, 1.52484, 1.4594, 1.29967, 1.2736, 1.3991, 1.32876, 1.30752, 2.30271, 1.55904, 1.8449, 1.46033, 1.24296, 1.20709, 1.62628, 1.5864, 1.26763, 1.43759, 1.47487, 1.37697, 1.3542, 1.33151, 1.73529, 1.34567, 1.25198, 1.32539, 1.47482, 1.18237, 1.36743, 1.49708, 1.35135, 1.39444, 1.32979, 1.17935, 1.87393, 1.4264, 1.47427, 1.49289, 1.23046, 1.40513, 1.22641, 1.41026, 1.60243, 1.3143, 1.19178, 1.29275, 1.40778, 1.27321, 1.41008, 1.70248, 1.64394, 1.51805, 1.52213, 1.56958, 1.37322, 1.23197, 1.2534, 1.33391, 1.27155, 1.71409, 1.36328, 1.34111, 1.56216, 1.69178, 1.34859, 1.23125, 1.30141, 1.35618, 1.71086, 1.21378, 1.62762, 1.35769, 1.32471, 1.3449, 1.37393, 1.16861, 1.52125, 1.65464, 1.84529, 1.4419, 1.39298, 1.45439, 1.43606, 1.60436, 1.56537, 1.49466, 1.35372, 1.44924, 1.44717, 1.59557, 1.51747, 1.64905, 1.33058, 1.31553, 1.61355, 1.23394, 1.40751, 1.24118, 1.39003, 1.46524, 1.46231, 1.5848, 1.30142, 1.49751, 1.49494, 1.35146, 1.32779, 1.48392, 1.42067, 1.43745, 1.57573, 1.52413, 1.22763, 1.19418, 1.89055, 1.53347, 1.40105, 1.60967, 1.38946, 1.31243, 1.45306, 1.42686, 1.36629, 1.4597, 1.59178, 1.37262, 1.28569, 1.49855, 1.29513, 1.26508, 1.32564, 1.18627, 1.52963, 1.41157, 1.22284, 1.09058, 1.41662, 1.39267, 1.29437, 1.39958, 1.3399, 1.36221, 1.4319, 1.07457, 1.45594, 1.29022, 1.47328, 1.63456, 1.35731, 1.53342, 1.23853, 1.30778, 1.37885, 1.39437, 1.58806, 1.41021, 1.41084, 1.3741, 1.18704, 1.36438, 1.50507, 1.3615, 1.43368, 1.39267, 1.48306, 1.60864, 1.92464, 1.65072, 1.54144, 1.35616, 1.29657, 1.5044, 1.29558, 1.3191, 1.41541, 1.44176, 1.48919, 1.28271, 1.18322, 1.31948, 1.34975, 1.36515, 1.26883, 1.48957, 1.40195, 1.45318, 1.67399, 1.47474, 1.53573, 1.49973, 1.39375, 1.51272, 1.36339, 1.21633]}, "grad-norm vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [13.93626, 13.32689, 13.8137, 12.62172, 11.96992, 9.43513, 6.80799, 6.88665, 5.95498, 4.54619, 4.13053, 2.82596, 2.39543, 2.34537, 2.05773, 2.21996, 2.14537, 1.88392, 2.17069, 2.06105, 2.12373, 2.16615, 2.00976, 2.20876, 1.97308, 2.09194, 1.90863, 1.88776, 1.95054, 2.15308, 2.08778, 2.10616, 1.95646, 2.17094, 2.31724, 2.02642, 2.04764, 1.84545, 1.93704, 1.75657, 2.13069, 1.75993, 1.70876, 1.86665, 1.92331, 1.79127, 1.74297, 1.74426, 1.75161, 1.53485, 1.75292, 1.73299, 1.79809, 1.83477, 1.59059, 1.79085, 1.74313, 1.81505, 1.54888, 1.47615, 1.68285, 1.4812, 1.79315, 1.92171, 1.63149, 1.63813, 1.6586, 1.59744, 1.47545, 1.65909, 1.42464, 1.41939, 1.49901, 1.42049, 1.40172, 1.46225, 1.44185, 1.3706, 1.36838, 1.26055, 1.34627, 1.29904, 1.25687, 1.20642, 1.27731, 1.27576, 1.4537, 1.34738, 1.41703, 1.10279, 1.09805, 1.25584, 1.13228, 1.20775, 0.93229, 1.32305, 1.10083, 1.31134, 0.99675, 1.32116, 1.31807, 1.20377, 1.14298, 1.25982, 1.11587, 1.06268, 1.1383, 1.13456, 1.18344, 1.01042, 1.19822, 0.96542, 0.98282, 0.98083, 1.21915, 1.08304, 1.00478, 1.26788, 1.10619, 1.30807, 1.1248, 1.36119, 1.37901, 1.4392, 1.56444, 1.29037, 1.19911, 1.00927, 1.14759, 1.2293, 1.07062, 1.374, 1.0323, 1.06393, 1.18259, 1.20195, 1.16586, 1.44753, 0.94529, 1.13538, 1.05269, 1.34467, 1.18959, 1.01819, 0.86119, 1.06946, 1.34129, 1.684, 1.13519, 1.32985, 1.38775, 1.34761, 1.74434, 1.43622, 1.39335, 1.37538, 1.86703, 2.00418, 1.35288, 1.23486, 1.3698, 1.32764, 0.9773, 0.96112, 1.19304, 1.38421, 1.30281, 1.24815, 1.29487, 1.60508, 1.50397, 1.88527, 1.44501, 1.35752, 0.94887, 1.377, 2.16776, 1.36769, 1.5918, 1.53974, 1.46219, 1.57752, 1.18503, 1.28159, 1.42022, 1.06676, 1.57312, 1.38623, 1.21566, 1.67634, 1.0445, 1.27733, 1.33704, 1.42129, 1.46397, 1.28187, 1.4299, 1.30773, 1.5098, 1.44392, 1.45291, 1.64364, 1.49176, 1.37459, 1.51541, 1.63213, 1.48678, 1.52484, 1.4594, 1.29967, 1.2736, 1.3991, 1.32876, 1.30752, 2.30271, 1.55904, 1.8449, 1.46033, 1.24296, 1.20709, 1.62628, 1.5864, 1.26763, 1.43759, 1.47487, 1.37697, 1.3542, 1.33151, 1.73529, 1.34567, 1.25198, 1.32539, 1.47482, 1.18237, 1.36743, 1.49708, 1.35135, 1.39444, 1.32979, 1.17935, 1.87393, 1.4264, 1.47427, 1.49289, 1.23046, 1.40513, 1.22641, 1.41026, 1.60243, 1.3143, 1.19178, 1.29275, 1.40778, 1.27321, 1.41008, 1.70248, 1.64394, 1.51805, 1.52213, 1.56958, 1.37322, 1.23197, 1.2534, 1.33391, 1.27155, 1.71409, 1.36328, 1.34111, 1.56216, 1.69178, 1.34859, 1.23125, 1.30141, 1.35618, 1.71086, 1.21378, 1.62762, 1.35769, 1.32471, 1.3449, 1.37393, 1.16861, 1.52125, 1.65464, 1.84529, 1.4419, 1.39298, 1.45439, 1.43606, 1.60436, 1.56537, 1.49466, 1.35372, 1.44924, 1.44717, 1.59557, 1.51747, 1.64905, 1.33058, 1.31553, 1.61355, 1.23394, 1.40751, 1.24118, 1.39003, 1.46524, 1.46231, 1.5848, 1.30142, 1.49751, 1.49494, 1.35146, 1.32779, 1.48392, 1.42067, 1.43745, 1.57573, 1.52413, 1.22763, 1.19418, 1.89055, 1.53347, 1.40105, 1.60967, 1.38946, 1.31243, 1.45306, 1.42686, 1.36629, 1.4597, 1.59178, 1.37262, 1.28569, 1.49855, 1.29513, 1.26508, 1.32564, 1.18627, 1.52963, 1.41157, 1.22284, 1.09058, 1.41662, 1.39267, 1.29437, 1.39958, 1.3399, 1.36221, 1.4319, 1.07457, 1.45594, 1.29022, 1.47328, 1.63456, 1.35731, 1.53342, 1.23853, 1.30778, 1.37885, 1.39437, 1.58806, 1.41021, 1.41084, 1.3741, 1.18704, 1.36438, 1.50507, 1.3615, 1.43368, 1.39267, 1.48306, 1.60864, 1.92464, 1.65072, 1.54144, 1.35616, 1.29657, 1.5044, 1.29558, 1.3191, 1.41541, 1.44176, 1.48919, 1.28271, 1.18322, 1.31948, 1.34975, 1.36515, 1.26883, 1.48957, 1.40195, 1.45318, 1.67399, 1.47474, 1.53573, 1.49973, 1.39375, 1.51272, 1.36339, 1.21633]}, "num-zeros": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [69.0, 86.0, 77.0, 73.0, 78.0, 81.0, 100.0, 105.0, 134.0, 134.0, 122.0, 173.0, 158.0, 179.0, 178.0, 172.0, 173.0, 192.0, 186.0, 185.0, 155.0, 157.0, 183.0, 172.0, 179.0, 162.0, 166.0, 176.0, 162.0, 177.0, 178.0, 149.0, 163.0, 200.0, 122.0, 151.0, 160.0, 216.0, 173.0, 192.0, 163.0, 174.0, 167.0, 195.0, 177.0, 181.0, 195.0, 201.0, 171.0, 240.0, 190.0, 187.0, 177.0, 159.0, 167.0, 211.0, 151.0, 167.0, 226.0, 215.0, 184.0, 206.0, 174.0, 166.0, 203.0, 236.0, 215.0, 192.0, 197.0, 197.0, 250.0, 225.0, 178.0, 210.0, 205.0, 223.0, 233.0, 196.0, 258.0, 221.0, 228.0, 237.0, 226.0, 223.0, 188.0, 182.0, 179.0, 198.0, 147.0, 189.0, 211.0, 214.0, 206.0, 216.0, 245.0, 156.0, 216.0, 214.0, 192.0, 170.0, 167.0, 167.0, 171.0, 168.0, 164.0, 141.0, 174.0, 143.0, 140.0, 184.0, 153.0, 162.0, 175.0, 144.0, 145.0, 144.0, 166.0, 110.0, 159.0, 132.0, 128.0, 137.0, 112.0, 132.0, 126.0, 136.0, 128.0, 172.0, 158.0, 131.0, 135.0, 133.0, 133.0, 144.0, 114.0, 123.0, 127.0, 129.0, 121.0, 139.0, 118.0, 107.0, 135.0, 149.0, 155.0, 123.0, 118.0, 109.0, 109.0, 111.0, 101.0, 119.0, 87.0, 118.0, 99.0, 104.0, 99.0, 88.0, 112.0, 112.0, 136.0, 110.0, 122.0, 128.0, 102.0, 105.0, 114.0, 106.0, 103.0, 119.0, 109.0, 83.0, 87.0, 99.0, 136.0, 116.0, 91.0, 112.0, 94.0, 98.0, 128.0, 100.0, 108.0, 115.0, 104.0, 128.0, 109.0, 99.0, 112.0, 96.0, 123.0, 103.0, 109.0, 84.0, 117.0, 105.0, 92.0, 104.0, 83.0, 96.0, 128.0, 71.0, 107.0, 110.0, 99.0, 96.0, 100.0, 100.0, 99.0, 122.0, 94.0, 98.0, 121.0, 118.0, 83.0, 96.0, 99.0, 123.0, 108.0, 107.0, 108.0, 93.0, 89.0, 101.0, 121.0, 121.0, 113.0, 108.0, 83.0, 123.0, 89.0, 105.0, 99.0, 100.0, 108.0, 105.0, 95.0, 112.0, 101.0, 110.0, 93.0, 108.0, 94.0, 120.0, 118.0, 107.0, 98.0, 121.0, 102.0, 97.0, 111.0, 126.0, 102.0, 108.0, 107.0, 108.0, 95.0, 97.0, 96.0, 118.0, 100.0, 111.0, 103.0, 92.0, 100.0, 101.0, 100.0, 103.0, 112.0, 87.0, 86.0, 119.0, 97.0, 101.0, 119.0, 120.0, 124.0, 114.0, 108.0, 105.0, 101.0, 104.0, 103.0, 98.0, 86.0, 101.0, 115.0, 98.0, 90.0, 108.0, 102.0, 102.0, 108.0, 125.0, 109.0, 90.0, 115.0, 94.0, 114.0, 113.0, 98.0, 113.0, 122.0, 101.0, 97.0, 109.0, 106.0, 105.0, 115.0, 95.0, 117.0, 118.0, 95.0, 111.0, 88.0, 121.0, 121.0, 117.0, 138.0, 134.0, 89.0, 99.0, 117.0, 93.0, 106.0, 123.0, 117.0, 107.0, 117.0, 108.0, 86.0, 121.0, 125.0, 105.0, 114.0, 107.0, 129.0, 114.0, 114.0, 107.0, 120.0, 118.0, 101.0, 109.0, 107.0, 124.0, 120.0, 116.0, 103.0, 127.0, 126.0, 90.0, 102.0, 114.0, 111.0, 108.0, 136.0, 107.0, 112.0, 104.0, 113.0, 117.0, 133.0, 104.0, 125.0, 119.0, 111.0, 122.0, 100.0, 118.0, 119.0, 104.0, 85.0, 133.0, 104.0, 119.0, 118.0, 95.0, 117.0, 123.0, 101.0, 132.0, 121.0, 110.0, 116.0, 116.0, 111.0, 91.0, 104.0, 104.0, 115.0, 124.0, 105.0, 104.0, 105.0, 101.0, 99.0, 112.0, 126.0, 139.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [69.0, 86.0, 77.0, 73.0, 78.0, 81.0, 100.0, 105.0, 134.0, 134.0, 122.0, 173.0, 158.0, 179.0, 178.0, 172.0, 173.0, 192.0, 186.0, 185.0, 155.0, 157.0, 183.0, 172.0, 179.0, 162.0, 166.0, 176.0, 162.0, 177.0, 178.0, 149.0, 163.0, 200.0, 122.0, 151.0, 160.0, 216.0, 173.0, 192.0, 163.0, 174.0, 167.0, 195.0, 177.0, 181.0, 195.0, 201.0, 171.0, 240.0, 190.0, 187.0, 177.0, 159.0, 167.0, 211.0, 151.0, 167.0, 226.0, 215.0, 184.0, 206.0, 174.0, 166.0, 203.0, 236.0, 215.0, 192.0, 197.0, 197.0, 250.0, 225.0, 178.0, 210.0, 205.0, 223.0, 233.0, 196.0, 258.0, 221.0, 228.0, 237.0, 226.0, 223.0, 188.0, 182.0, 179.0, 198.0, 147.0, 189.0, 211.0, 214.0, 206.0, 216.0, 245.0, 156.0, 216.0, 214.0, 192.0, 170.0, 167.0, 167.0, 171.0, 168.0, 164.0, 141.0, 174.0, 143.0, 140.0, 184.0, 153.0, 162.0, 175.0, 144.0, 145.0, 144.0, 166.0, 110.0, 159.0, 132.0, 128.0, 137.0, 112.0, 132.0, 126.0, 136.0, 128.0, 172.0, 158.0, 131.0, 135.0, 133.0, 133.0, 144.0, 114.0, 123.0, 127.0, 129.0, 121.0, 139.0, 118.0, 107.0, 135.0, 149.0, 155.0, 123.0, 118.0, 109.0, 109.0, 111.0, 101.0, 119.0, 87.0, 118.0, 99.0, 104.0, 99.0, 88.0, 112.0, 112.0, 136.0, 110.0, 122.0, 128.0, 102.0, 105.0, 114.0, 106.0, 103.0, 119.0, 109.0, 83.0, 87.0, 99.0, 136.0, 116.0, 91.0, 112.0, 94.0, 98.0, 128.0, 100.0, 108.0, 115.0, 104.0, 128.0, 109.0, 99.0, 112.0, 96.0, 123.0, 103.0, 109.0, 84.0, 117.0, 105.0, 92.0, 104.0, 83.0, 96.0, 128.0, 71.0, 107.0, 110.0, 99.0, 96.0, 100.0, 100.0, 99.0, 122.0, 94.0, 98.0, 121.0, 118.0, 83.0, 96.0, 99.0, 123.0, 108.0, 107.0, 108.0, 93.0, 89.0, 101.0, 121.0, 121.0, 113.0, 108.0, 83.0, 123.0, 89.0, 105.0, 99.0, 100.0, 108.0, 105.0, 95.0, 112.0, 101.0, 110.0, 93.0, 108.0, 94.0, 120.0, 118.0, 107.0, 98.0, 121.0, 102.0, 97.0, 111.0, 126.0, 102.0, 108.0, 107.0, 108.0, 95.0, 97.0, 96.0, 118.0, 100.0, 111.0, 103.0, 92.0, 100.0, 101.0, 100.0, 103.0, 112.0, 87.0, 86.0, 119.0, 97.0, 101.0, 119.0, 120.0, 124.0, 114.0, 108.0, 105.0, 101.0, 104.0, 103.0, 98.0, 86.0, 101.0, 115.0, 98.0, 90.0, 108.0, 102.0, 102.0, 108.0, 125.0, 109.0, 90.0, 115.0, 94.0, 114.0, 113.0, 98.0, 113.0, 122.0, 101.0, 97.0, 109.0, 106.0, 105.0, 115.0, 95.0, 117.0, 118.0, 95.0, 111.0, 88.0, 121.0, 121.0, 117.0, 138.0, 134.0, 89.0, 99.0, 117.0, 93.0, 106.0, 123.0, 117.0, 107.0, 117.0, 108.0, 86.0, 121.0, 125.0, 105.0, 114.0, 107.0, 129.0, 114.0, 114.0, 107.0, 120.0, 118.0, 101.0, 109.0, 107.0, 124.0, 120.0, 116.0, 103.0, 127.0, 126.0, 90.0, 102.0, 114.0, 111.0, 108.0, 136.0, 107.0, 112.0, 104.0, 113.0, 117.0, 133.0, 104.0, 125.0, 119.0, 111.0, 122.0, 100.0, 118.0, 119.0, 104.0, 85.0, 133.0, 104.0, 119.0, 118.0, 95.0, 117.0, 123.0, 101.0, 132.0, 121.0, 110.0, 116.0, 116.0, 111.0, 91.0, 104.0, 104.0, 115.0, 124.0, 105.0, 104.0, 105.0, 101.0, 99.0, 112.0, 126.0, 139.0]}, "params-norm": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [180.0215, 180.0215, 180.0215, 180.02148, 180.02148, 180.02147, 180.02145, 180.02135, 180.02115, 180.02094, 180.01961, 180.01897, 180.01846, 180.01622, 180.01544, 180.01474, 180.01422, 180.01404, 180.01433, 180.01431, 180.01425, 180.01393, 180.01378, 180.01398, 180.01486, 180.01613, 180.01736, 180.01823, 180.01926, 180.02052, 180.02249, 180.0247, 180.0269, 180.02905, 180.03157, 180.03452, 180.03809, 180.04181, 180.04561, 180.04982, 180.05472, 180.06001, 180.06567, 180.07184, 180.0788, 180.08618, 180.09402, 180.10249, 180.11177, 180.12202, 180.13301, 180.14465, 180.15689, 180.16972, 180.18321, 180.19737, 180.21191, 180.22699, 180.24295, 180.26004, 180.27771, 180.29611, 180.31612, 180.33702, 180.35811, 180.38084, 180.40419, 180.4287, 180.45442, 180.48056, 180.50702, 180.53406, 180.56171, 180.58975, 180.61829, 180.64751, 180.67677, 180.70682, 180.73743, 180.76886, 180.80061, 180.83215, 180.86478, 180.89844, 180.93239, 180.96716, 181.00246, 181.03769, 181.07275, 181.10832, 181.14499, 181.18263, 181.21957, 181.25639, 181.29378, 181.33115, 181.36745, 181.40192, 181.43672, 181.47206, 181.50702, 181.54108, 181.57564, 181.61107, 181.64665, 181.68359, 181.72212, 181.76016, 181.79727, 181.83466, 181.87212, 181.91078, 181.94928, 181.98863, 182.02866, 182.0679, 182.10756, 182.14766, 182.18661, 182.22534, 182.26395, 182.30188, 182.33997, 182.3786, 182.41617, 182.45273, 182.48906, 182.52652, 182.56755, 182.60834, 182.64743, 182.68629, 182.72655, 182.76643, 182.80617, 182.84549, 182.8847, 182.92358, 182.96255, 183.00255, 183.04317, 183.08311, 183.12239, 183.16113, 183.20087, 183.24062, 183.27989, 183.31709, 183.35413, 183.39204, 183.42976, 183.46664, 183.50266, 183.5378, 183.57317, 183.60986, 183.64481, 183.67638, 183.7079, 183.74036, 183.77179, 183.80507, 183.8432, 183.8837, 183.92522, 183.96664, 184.00832, 184.04984, 184.09091, 184.13011, 184.16745, 184.20192, 184.2364, 184.27042, 184.30766, 184.34671, 184.38367, 184.41844, 184.45454, 184.49117, 184.52921, 184.56746, 184.60696, 184.64819, 184.69025, 184.73074, 184.77034, 184.80975, 184.84845, 184.88777, 184.92712, 184.96806, 185.00996, 185.0508, 185.09145, 185.13165, 185.17198, 185.21196, 185.25362, 185.29736, 185.33859, 185.37759, 185.41449, 185.45093, 185.48775, 185.52527, 185.56303, 185.60017, 185.63844, 185.67694, 185.717, 185.75711, 185.79745, 185.83626, 185.87444, 185.91074, 185.94763, 185.98566, 186.02451, 186.06494, 186.10443, 186.14497, 186.18584, 186.22533, 186.26512, 186.30524, 186.34587, 186.38719, 186.42752, 186.46732, 186.5069, 186.54416, 186.58186, 186.62146, 186.66272, 186.7025, 186.74118, 186.78197, 186.82381, 186.86591, 186.90703, 186.94699, 186.98782, 187.02896, 187.07161, 187.11592, 187.16006, 187.20297, 187.24727, 187.29167, 187.33688, 187.38315, 187.43051, 187.47704, 187.52306, 187.56926, 187.61435, 187.65848, 187.70207, 187.74612, 187.791, 187.83688, 187.88379, 187.93002, 187.97664, 188.02202, 188.06602, 188.10904, 188.15352, 188.19698, 188.23994, 188.28452, 188.3309, 188.37823, 188.4254, 188.47156, 188.51752, 188.5639, 188.60988, 188.65466, 188.69901, 188.74353, 188.78758, 188.82999, 188.87415, 188.91789, 188.9626, 189.00793, 189.05475, 189.10188, 189.14818, 189.1933, 189.23761, 189.28363, 189.33023, 189.37675, 189.42268, 189.46941, 189.51593, 189.56395, 189.61171, 189.65927, 189.70778, 189.75581, 189.80321, 189.8503, 189.89809, 189.9472, 189.9967, 190.04593, 190.09396, 190.14343, 190.1933, 190.24219, 190.29274, 190.34343, 190.39359, 190.44443, 190.49617, 190.54893, 190.60107, 190.65158, 190.70294, 190.75449, 190.80663, 190.86197, 190.91545, 190.96892, 191.02086, 191.07315, 191.12288, 191.17188, 191.22237, 191.27545, 191.32816, 191.38139, 191.43503, 191.48665, 191.53937, 191.58943, 191.64163, 191.69427, 191.74928, 191.8026, 191.85596, 191.90891, 191.96182, 192.01491, 192.06815, 192.12227, 192.17641, 192.23074, 192.28561, 192.34024, 192.39484, 192.44731, 192.50171, 192.55782, 192.61383, 192.67009, 192.72624, 192.78252, 192.83763, 192.89287, 192.94981, 193.00703, 193.06404, 193.12177, 193.17989, 193.23723, 193.29391, 193.34985, 193.40605, 193.45912, 193.51132, 193.56346, 193.61696, 193.67215, 193.72841, 193.78329, 193.83797, 193.89262, 193.94887, 194.00604, 194.064, 194.12062, 194.17807, 194.23741, 194.29666, 194.35547, 194.41553, 194.47499, 194.53378, 194.59259, 194.65202, 194.70923, 194.76607, 194.82375, 194.88065, 194.93935]}, "params-norm vs samples": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [180.0215, 180.0215, 180.0215, 180.02148, 180.02148, 180.02147, 180.02145, 180.02135, 180.02115, 180.02094, 180.01961, 180.01897, 180.01846, 180.01622, 180.01544, 180.01474, 180.01422, 180.01404, 180.01433, 180.01431, 180.01425, 180.01393, 180.01378, 180.01398, 180.01486, 180.01613, 180.01736, 180.01823, 180.01926, 180.02052, 180.02249, 180.0247, 180.0269, 180.02905, 180.03157, 180.03452, 180.03809, 180.04181, 180.04561, 180.04982, 180.05472, 180.06001, 180.06567, 180.07184, 180.0788, 180.08618, 180.09402, 180.10249, 180.11177, 180.12202, 180.13301, 180.14465, 180.15689, 180.16972, 180.18321, 180.19737, 180.21191, 180.22699, 180.24295, 180.26004, 180.27771, 180.29611, 180.31612, 180.33702, 180.35811, 180.38084, 180.40419, 180.4287, 180.45442, 180.48056, 180.50702, 180.53406, 180.56171, 180.58975, 180.61829, 180.64751, 180.67677, 180.70682, 180.73743, 180.76886, 180.80061, 180.83215, 180.86478, 180.89844, 180.93239, 180.96716, 181.00246, 181.03769, 181.07275, 181.10832, 181.14499, 181.18263, 181.21957, 181.25639, 181.29378, 181.33115, 181.36745, 181.40192, 181.43672, 181.47206, 181.50702, 181.54108, 181.57564, 181.61107, 181.64665, 181.68359, 181.72212, 181.76016, 181.79727, 181.83466, 181.87212, 181.91078, 181.94928, 181.98863, 182.02866, 182.0679, 182.10756, 182.14766, 182.18661, 182.22534, 182.26395, 182.30188, 182.33997, 182.3786, 182.41617, 182.45273, 182.48906, 182.52652, 182.56755, 182.60834, 182.64743, 182.68629, 182.72655, 182.76643, 182.80617, 182.84549, 182.8847, 182.92358, 182.96255, 183.00255, 183.04317, 183.08311, 183.12239, 183.16113, 183.20087, 183.24062, 183.27989, 183.31709, 183.35413, 183.39204, 183.42976, 183.46664, 183.50266, 183.5378, 183.57317, 183.60986, 183.64481, 183.67638, 183.7079, 183.74036, 183.77179, 183.80507, 183.8432, 183.8837, 183.92522, 183.96664, 184.00832, 184.04984, 184.09091, 184.13011, 184.16745, 184.20192, 184.2364, 184.27042, 184.30766, 184.34671, 184.38367, 184.41844, 184.45454, 184.49117, 184.52921, 184.56746, 184.60696, 184.64819, 184.69025, 184.73074, 184.77034, 184.80975, 184.84845, 184.88777, 184.92712, 184.96806, 185.00996, 185.0508, 185.09145, 185.13165, 185.17198, 185.21196, 185.25362, 185.29736, 185.33859, 185.37759, 185.41449, 185.45093, 185.48775, 185.52527, 185.56303, 185.60017, 185.63844, 185.67694, 185.717, 185.75711, 185.79745, 185.83626, 185.87444, 185.91074, 185.94763, 185.98566, 186.02451, 186.06494, 186.10443, 186.14497, 186.18584, 186.22533, 186.26512, 186.30524, 186.34587, 186.38719, 186.42752, 186.46732, 186.5069, 186.54416, 186.58186, 186.62146, 186.66272, 186.7025, 186.74118, 186.78197, 186.82381, 186.86591, 186.90703, 186.94699, 186.98782, 187.02896, 187.07161, 187.11592, 187.16006, 187.20297, 187.24727, 187.29167, 187.33688, 187.38315, 187.43051, 187.47704, 187.52306, 187.56926, 187.61435, 187.65848, 187.70207, 187.74612, 187.791, 187.83688, 187.88379, 187.93002, 187.97664, 188.02202, 188.06602, 188.10904, 188.15352, 188.19698, 188.23994, 188.28452, 188.3309, 188.37823, 188.4254, 188.47156, 188.51752, 188.5639, 188.60988, 188.65466, 188.69901, 188.74353, 188.78758, 188.82999, 188.87415, 188.91789, 188.9626, 189.00793, 189.05475, 189.10188, 189.14818, 189.1933, 189.23761, 189.28363, 189.33023, 189.37675, 189.42268, 189.46941, 189.51593, 189.56395, 189.61171, 189.65927, 189.70778, 189.75581, 189.80321, 189.8503, 189.89809, 189.9472, 189.9967, 190.04593, 190.09396, 190.14343, 190.1933, 190.24219, 190.29274, 190.34343, 190.39359, 190.44443, 190.49617, 190.54893, 190.60107, 190.65158, 190.70294, 190.75449, 190.80663, 190.86197, 190.91545, 190.96892, 191.02086, 191.07315, 191.12288, 191.17188, 191.22237, 191.27545, 191.32816, 191.38139, 191.43503, 191.48665, 191.53937, 191.58943, 191.64163, 191.69427, 191.74928, 191.8026, 191.85596, 191.90891, 191.96182, 192.01491, 192.06815, 192.12227, 192.17641, 192.23074, 192.28561, 192.34024, 192.39484, 192.44731, 192.50171, 192.55782, 192.61383, 192.67009, 192.72624, 192.78252, 192.83763, 192.89287, 192.94981, 193.00703, 193.06404, 193.12177, 193.17989, 193.23723, 193.29391, 193.34985, 193.40605, 193.45912, 193.51132, 193.56346, 193.61696, 193.67215, 193.72841, 193.78329, 193.83797, 193.89262, 193.94887, 194.00604, 194.064, 194.12062, 194.17807, 194.23741, 194.29666, 194.35547, 194.41553, 194.47499, 194.53378, 194.59259, 194.65202, 194.70923, 194.76607, 194.82375, 194.88065, 194.93935]}, "iteration-time": {"start_step": 0, "end_step": 2000, "step_interval": 5, "values": [25.13033, 1.48166, 1.46987, 1.47023, 1.48503, 1.46592, 1.47336, 1.47508, 1.47402, 1.4685, 1.46594, 1.46551, 1.47349, 1.47267, 1.46624, 1.4694, 1.46787, 1.46277, 1.47132, 1.47851, 1.46741, 1.46542, 1.4696, 1.47275, 1.46461, 1.47691, 1.4675, 1.4656, 1.47118, 1.46861, 1.46276, 1.46336, 1.46191, 1.46454, 1.46661, 1.45397, 1.45433, 1.45318, 1.47248, 1.45987, 1.4605, 1.47021, 1.46471, 1.46712, 1.47916, 1.46564, 1.46806, 1.48231, 1.47331, 1.47647, 1.4749, 1.47736, 1.47088, 1.48046, 1.47029, 1.4749, 1.47423, 1.4743, 1.47451, 1.47312, 1.46669, 1.48162, 1.47248, 1.47813, 1.47924, 1.47693, 1.4857, 1.47407, 1.47761, 1.47904, 1.47169, 1.46697, 1.48901, 1.47837, 1.47292, 1.48078, 1.49273, 1.48823, 1.48311, 1.48576, 1.48783, 1.48617, 1.47144, 1.46991, 1.46885, 1.47351, 1.47373, 1.46882, 1.46809, 1.46714, 1.4672, 1.47772, 1.46612, 1.46651, 1.47094, 1.47578, 1.46913, 1.48331, 1.4865, 1.48787, 1.47171, 1.46821, 1.4802, 1.46723, 1.47379, 1.46841, 1.46785, 1.47559, 1.47509, 1.46854, 1.47345, 1.47159, 1.46793, 1.47819, 1.48813, 1.4716, 1.47495, 1.46872, 1.47829, 1.47064, 1.47018, 1.47559, 1.47576, 1.47037, 1.47433, 1.47533, 1.47013, 1.47921, 1.47494, 1.4767, 1.47607, 1.47345, 1.47128, 1.47431, 1.46759, 1.46948, 1.46669, 1.47222, 1.46674, 1.47388, 1.47388, 1.46524, 1.47407, 1.47207, 1.46963, 1.47611, 1.47057, 1.47046, 1.47507, 1.4718, 1.47093, 1.46875, 1.47966, 1.47691, 1.47958, 1.46848, 1.47659, 1.47233, 1.46829, 1.47134, 1.47162, 1.47084, 1.46812, 1.46169, 1.47005, 1.47196, 1.47131, 1.4779, 1.47053, 1.46873, 1.47177, 1.47562, 1.47441, 1.47279, 1.4738, 1.47473, 1.47647, 1.4711, 1.47612, 1.47591, 1.48126, 1.47512, 1.47351, 1.47769, 1.46263, 1.47234, 1.47526, 1.47224, 1.47085, 1.46942, 1.46803, 1.4759, 1.47343, 1.46362, 1.4685, 1.47079, 1.47101, 1.47158, 1.47044, 1.46992, 1.46298, 1.47836, 1.46169, 1.46751, 1.47839, 1.47255, 1.47103, 1.47052, 1.46863, 1.4668, 1.4769, 1.47204, 1.4723, 1.47157, 1.4667, 1.47441, 1.48003, 1.47181, 1.48009, 1.48373, 1.47652, 1.4796, 1.47353, 1.47567, 1.47796, 1.47632, 1.48009, 1.4717, 1.47188, 1.48104, 1.47363, 1.47129, 1.47793, 1.47574, 1.47484, 1.47619, 1.47177, 1.47614, 1.47933, 1.47156, 1.46844, 1.4802, 1.47829, 1.47093, 1.4754, 1.47276, 1.57859, 1.4684, 1.47537, 1.54583, 1.47639, 1.57948, 1.47918, 1.48066, 1.48212, 1.4774, 1.47852, 1.47639, 1.47826, 1.48039, 1.4739, 1.4819, 1.48028, 1.47407, 1.47624, 1.48205, 1.47628, 1.48393, 1.48589, 1.47517, 1.47758, 1.47729, 1.48745, 1.47685, 1.48033, 1.47602, 1.47812, 1.48054, 1.47432, 1.47337, 1.47804, 1.47123, 1.47425, 1.47715, 1.47794, 1.47273, 1.47454, 1.47875, 1.4782, 1.47577, 1.47167, 1.47763, 1.4744, 1.47683, 1.48168, 1.47497, 1.47434, 1.4796, 1.4776, 1.47214, 1.47435, 1.47766, 1.4835, 1.48072, 1.4744, 1.48392, 1.47533, 1.47683, 1.47742, 1.48516, 1.47634, 1.478, 1.47244, 1.48265, 1.47422, 1.48296, 1.48311, 1.47628, 1.47751, 1.48129, 1.47507, 1.48075, 1.47775, 1.47657, 1.48203, 1.48345, 1.48818, 1.48194, 1.48374, 1.482, 1.48749, 1.48551, 1.48527, 1.4871, 1.49114, 1.48723, 1.47874, 1.47877, 1.48314, 1.47745, 1.47138, 1.4823, 1.4909, 1.48278, 1.48582, 1.48063, 1.47195, 1.47501, 1.47117, 1.47685, 1.47555, 1.47306, 1.54386, 1.47358, 1.57973, 1.47563, 1.47575, 1.56224, 1.47774, 1.4817, 1.48012, 1.48778, 1.47737, 1.47738, 1.48069, 1.47712, 1.47909, 1.47385, 1.47532, 1.47459, 1.47167, 1.47808, 1.48123, 1.47993, 1.46614, 1.46983, 1.47318, 1.47539, 1.47425, 1.47523, 1.47895, 1.47481, 1.4698, 1.46941, 1.47466, 1.47011, 1.46611, 1.47663, 1.47626, 1.4741, 1.47847, 1.46407, 1.47268, 1.47738, 1.46488, 1.48113, 1.47284, 1.46934, 1.47784, 1.4777]}, "lm loss validation": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [5.6001]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [5.6001]}, "lm loss validation ppl": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [270.45398]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 4, "step_interval": 5, "values": [270.45398]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values_dev.json new file mode 100644 index 000000000..178565f51 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values_dev.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.84434, + 10.87343, + 10.85057, + 10.81084, + 10.64478, + 10.63856, + 10.42829, + 10.13529, + 9.9354, + 9.83536, + 9.58562, + 9.84798, + 9.88582, + 9.63128, + 9.79015, + 9.51139, + 9.45969, + 9.65541, + 9.38989, + 9.33926, + 9.24938, + 9.15128, + 9.18196, + 9.0045, + 9.19833, + 9.06658, + 9.16104, + 9.16968, + 9.30055, + 8.98918, + 8.92952, + 9.05033, + 9.04653, + 8.66027, + 8.72522, + 8.75656, + 8.69485, + 8.74326, + 8.66685, + 8.7728, + 8.67074, + 8.86153, + 8.8433, + 8.50914, + 8.39911, + 8.43859, + 8.49596, + 8.39384, + 8.44083, + 8.59281, + 8.37629, + 8.2001, + 8.23362, + 8.23015, + 8.27548, + 7.92086, + 8.10003, + 7.89799, + 8.25216, + 8.23462, + 8.01021, + 7.97597, + 7.9264, + 7.74459, + 7.748, + 7.65018, + 7.52046, + 7.91112, + 7.70254, + 7.456, + 7.74697, + 7.77483, + 7.54415, + 7.3027, + 7.45591, + 7.34318, + 7.46577, + 7.22819, + 7.63648, + 7.28207, + 7.34835, + 7.21309, + 7.21075, + 7.41924, + 7.17318, + 7.28141, + 6.99426, + 7.00286, + 7.03961, + 7.13676, + 6.822, + 6.9855, + 7.08945, + 6.99871, + 6.87487, + 6.75719, + 6.99117, + 7.06005, + 6.70456, + 6.58452, + 6.72787, + 6.74473, + 6.73373, + 6.7382, + 6.6584, + 6.40648, + 6.63688, + 6.61955, + 6.44576, + 6.62788, + 6.74244, + 6.61006, + 6.72544, + 6.69264, + 6.62569, + 6.50572, + 6.59635, + 6.40504, + 6.66311, + 6.24639, + 6.25134, + 6.30293, + 6.39011, + 6.3472, + 6.45168, + 6.29229, + 6.33985, + 6.23688, + 6.20384, + 6.40017, + 6.32742, + 6.32422, + 6.16691, + 6.16021, + 6.24067, + 6.38468, + 6.20364, + 6.15286, + 6.18196, + 6.11784, + 6.06616, + 6.07804, + 6.26273, + 6.41356, + 6.26419, + 6.30289, + 6.10616, + 6.18152, + 6.00825, + 6.03597, + 5.96121, + 6.25362, + 6.19475, + 5.97105, + 5.78892, + 6.1312, + 5.85287, + 6.10817, + 5.79121, + 6.16545, + 6.14698, + 6.08542, + 5.92808, + 6.11875, + 5.94753, + 6.19922, + 5.89541, + 5.79008, + 5.78091, + 5.68691, + 6.01341, + 6.00102, + 6.06828, + 5.89084, + 6.04196, + 5.96792, + 5.99841, + 5.99525, + 5.95169, + 5.84243, + 5.95132, + 5.61796, + 5.70314, + 5.88856, + 5.84026, + 5.86305, + 5.76304, + 5.83656, + 5.72719, + 5.56214, + 5.72112, + 5.62344, + 5.83074, + 5.60385, + 5.7076, + 5.70851, + 5.89941, + 5.64331, + 5.84777, + 5.74091, + 5.86663, + 5.32913, + 5.89635, + 5.87437, + 5.85388, + 5.41178, + 5.40838, + 5.62884, + 5.59534, + 5.48296, + 5.57705, + 5.67454, + 5.47707, + 5.74309, + 5.50833, + 5.59207, + 5.62207, + 5.61979, + 5.51213, + 5.61257, + 5.67073, + 5.67911, + 5.58501, + 5.66043, + 5.37203, + 5.67588, + 5.62767, + 5.42011, + 5.58178, + 5.62963, + 5.55361, + 5.3406, + 5.53513, + 5.48634, + 5.48134, + 5.38001, + 5.55335, + 5.60291, + 5.3855, + 5.51982, + 5.4869, + 5.33392, + 5.50985, + 5.4109, + 5.44586, + 5.31905, + 5.06585, + 5.47792, + 5.56891, + 5.71472, + 5.4116, + 5.6004, + 5.63428, + 5.23158, + 5.26784, + 5.39219, + 5.39546, + 5.32677, + 5.49847, + 5.18449, + 5.2968, + 5.24785, + 5.37475, + 5.25356, + 5.4427, + 5.53544, + 5.30755, + 5.43162, + 5.34057, + 5.07742, + 5.3105, + 5.2513, + 5.30299, + 5.10864, + 5.27348, + 5.26261, + 5.47314, + 5.15993, + 5.26482, + 5.20655, + 5.3524, + 4.98067, + 4.91136, + 5.32265, + 5.39056, + 5.22683, + 5.32037, + 5.10162, + 5.16075, + 5.26068, + 5.07477, + 5.2665, + 5.06803, + 5.34087, + 5.24754, + 5.14536, + 5.2427, + 5.03942, + 5.31639, + 5.05259, + 5.028, + 5.13985, + 5.10959, + 5.2711, + 5.15231, + 5.27332, + 5.09281, + 5.09413, + 5.24576, + 5.32664, + 5.25301, + 5.19004, + 5.14196, + 5.29006, + 4.9529, + 5.20696, + 5.09518, + 5.30439, + 5.17088, + 5.18705, + 5.11541, + 4.98195, + 4.99339, + 5.2219, + 5.30712, + 5.09994, + 5.05467, + 4.91696, + 5.12387, + 5.1162, + 4.92675, + 5.33512, + 5.02297, + 5.09855, + 5.1647, + 5.00177, + 5.06604, + 5.06519, + 4.9938, + 5.07915, + 5.16172, + 4.97704, + 5.18061, + 4.92631, + 4.92011, + 5.06494, + 4.98947, + 4.90622, + 4.7743, + 4.94211, + 5.11143, + 5.01084, + 5.0159, + 5.3267, + 4.95652, + 4.98832, + 5.04364, + 4.80948, + 4.72945, + 4.99165, + 5.0429, + 4.87065, + 4.95272, + 5.04422, + 5.02216, + 4.81261, + 4.89101, + 4.90203, + 4.82648, + 4.73442, + 5.00558, + 4.75484, + 5.20509, + 4.78834, + 4.99179, + 4.73272, + 4.78083, + 4.81532, + 4.64586, + 4.65217, + 4.83878, + 4.8041, + 4.79376, + 4.91789, + 4.88008, + 4.92551, + 4.76829, + 4.87736, + 4.72836, + 4.9114, + 4.95389, + 4.87038, + 4.70453, + 4.77938, + 4.89906, + 4.70579, + 4.85315, + 4.68969, + 4.68533, + 4.6408 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 65.0, + 72.0, + 81.0, + 76.0, + 70.0, + 86.0, + 92.0, + 100.0, + 95.0, + 121.0, + 118.0, + 150.0, + 126.0, + 174.0, + 178.0, + 176.0, + 175.0, + 175.0, + 160.0, + 180.0, + 172.0, + 163.0, + 172.0, + 175.0, + 186.0, + 162.0, + 218.0, + 187.0, + 173.0, + 157.0, + 155.0, + 146.0, + 159.0, + 193.0, + 130.0, + 155.0, + 129.0, + 199.0, + 160.0, + 180.0, + 150.0, + 169.0, + 170.0, + 198.0, + 157.0, + 171.0, + 158.0, + 193.0, + 206.0, + 230.0, + 179.0, + 203.0, + 193.0, + 154.0, + 162.0, + 189.0, + 160.0, + 154.0, + 194.0, + 223.0, + 184.0, + 182.0, + 174.0, + 151.0, + 198.0, + 237.0, + 186.0, + 168.0, + 179.0, + 178.0, + 237.0, + 233.0, + 164.0, + 208.0, + 216.0, + 192.0, + 228.0, + 205.0, + 225.0, + 214.0, + 206.0, + 237.0, + 234.0, + 263.0, + 225.0, + 192.0, + 197.0, + 207.0, + 156.0, + 211.0, + 177.0, + 199.0, + 215.0, + 208.0, + 212.0, + 170.0, + 214.0, + 204.0, + 209.0, + 186.0, + 187.0, + 180.0, + 166.0, + 145.0, + 154.0, + 169.0, + 145.0, + 162.0, + 152.0, + 192.0, + 162.0, + 175.0, + 167.0, + 161.0, + 136.0, + 135.0, + 140.0, + 121.0, + 164.0, + 128.0, + 137.0, + 114.0, + 120.0, + 142.0, + 116.0, + 128.0, + 97.0, + 132.0, + 132.0, + 105.0, + 157.0, + 143.0, + 145.0, + 130.0, + 135.0, + 126.0, + 122.0, + 102.0, + 137.0, + 107.0, + 127.0, + 87.0, + 99.0, + 136.0, + 96.0, + 119.0, + 96.0, + 121.0, + 127.0, + 141.0, + 120.0, + 132.0, + 97.0, + 117.0, + 97.0, + 102.0, + 118.0, + 127.0, + 104.0, + 100.0, + 128.0, + 104.0, + 107.0, + 103.0, + 110.0, + 97.0, + 108.0, + 126.0, + 102.0, + 126.0, + 127.0, + 100.0, + 108.0, + 111.0, + 106.0, + 112.0, + 94.0, + 105.0, + 116.0, + 106.0, + 96.0, + 114.0, + 116.0, + 149.0, + 120.0, + 102.0, + 111.0, + 117.0, + 94.0, + 103.0, + 114.0, + 101.0, + 112.0, + 110.0, + 112.0, + 87.0, + 116.0, + 95.0, + 119.0, + 116.0, + 116.0, + 93.0, + 103.0, + 99.0, + 93.0, + 115.0, + 115.0, + 92.0, + 99.0, + 125.0, + 114.0, + 102.0, + 102.0, + 100.0, + 115.0, + 107.0, + 118.0, + 113.0, + 109.0, + 110.0, + 97.0, + 103.0, + 96.0, + 99.0, + 115.0, + 118.0, + 105.0, + 117.0, + 104.0, + 105.0, + 113.0, + 97.0, + 97.0, + 114.0, + 97.0, + 99.0, + 96.0, + 98.0, + 94.0, + 126.0, + 101.0, + 98.0, + 99.0, + 79.0, + 99.0, + 80.0, + 105.0, + 104.0, + 106.0, + 107.0, + 123.0, + 109.0, + 104.0, + 122.0, + 122.0, + 107.0, + 102.0, + 103.0, + 92.0, + 111.0, + 112.0, + 102.0, + 127.0, + 96.0, + 112.0, + 106.0, + 104.0, + 90.0, + 86.0, + 96.0, + 112.0, + 115.0, + 100.0, + 128.0, + 109.0, + 107.0, + 109.0, + 101.0, + 99.0, + 95.0, + 99.0, + 127.0, + 102.0, + 118.0, + 107.0, + 94.0, + 130.0, + 89.0, + 101.0, + 103.0, + 81.0, + 92.0, + 105.0, + 102.0, + 95.0, + 99.0, + 122.0, + 110.0, + 97.0, + 107.0, + 114.0, + 105.0, + 125.0, + 91.0, + 111.0, + 108.0, + 85.0, + 105.0, + 118.0, + 113.0, + 100.0, + 101.0, + 120.0, + 98.0, + 98.0, + 92.0, + 93.0, + 107.0, + 119.0, + 132.0, + 132.0, + 100.0, + 120.0, + 112.0, + 114.0, + 92.0, + 88.0, + 104.0, + 120.0, + 125.0, + 106.0, + 99.0, + 125.0, + 106.0, + 94.0, + 138.0, + 104.0, + 106.0, + 111.0, + 95.0, + 109.0, + 116.0, + 108.0, + 114.0, + 110.0, + 106.0, + 123.0, + 102.0, + 134.0, + 125.0, + 112.0, + 102.0, + 119.0, + 111.0, + 102.0, + 120.0, + 110.0, + 102.0, + 124.0, + 106.0, + 115.0, + 112.0, + 100.0, + 127.0, + 123.0, + 112.0, + 118.0, + 113.0, + 112.0, + 92.0, + 111.0, + 112.0, + 85.0, + 87.0, + 132.0, + 118.0, + 100.0, + 99.0, + 87.0, + 114.0, + 108.0, + 131.0, + 120.0, + 127.0, + 113.0, + 111.0, + 102.0, + 126.0, + 117.0, + 132.0, + 103.0, + 120.0, + 114.0, + 120.0, + 101.0, + 107.0, + 106.0, + 124.0, + 137.0, + 117.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 18.53864, + 0.95588, + 0.98728, + 0.9479, + 0.9533, + 0.94063, + 0.94265, + 0.94346, + 0.94, + 0.94193, + 0.94448, + 0.94, + 0.94178, + 0.95318, + 0.94344, + 0.94282, + 0.93703, + 0.9594, + 0.93761, + 0.93676, + 0.94059, + 0.94063, + 0.94496, + 0.93892, + 0.9449, + 0.95488, + 0.94465, + 0.95353, + 0.94176, + 0.95336, + 0.95058, + 0.98447, + 0.94686, + 0.98878, + 0.95268, + 0.94258, + 0.94399, + 0.93889, + 0.94158, + 0.94559, + 0.97363, + 0.95633, + 0.95485, + 0.96508, + 0.94859, + 0.94248, + 0.94135, + 0.93696, + 0.946, + 0.93538, + 0.94544, + 0.9507, + 0.94314, + 0.94298, + 0.93954, + 0.93721, + 0.94889, + 0.93927, + 0.93203, + 0.93941, + 0.94011, + 0.94392, + 0.94659, + 0.94179, + 0.94991, + 0.94921, + 0.94542, + 0.94419, + 0.95155, + 0.94371, + 0.95683, + 0.93985, + 0.94159, + 0.95114, + 0.94329, + 0.93652, + 0.94172, + 0.94478, + 0.94508, + 0.9586, + 0.94289, + 0.94346, + 0.9572, + 0.94962, + 0.95027, + 0.94705, + 0.94819, + 0.94109, + 0.94809, + 0.95085, + 0.95144, + 0.94471, + 0.94746, + 0.96865, + 0.96892, + 0.94386, + 0.96563, + 0.9431, + 0.94067, + 0.94592, + 0.95403, + 0.96047, + 0.95154, + 0.94462, + 0.94607, + 0.95516, + 0.94081, + 0.95113, + 0.93236, + 0.94367, + 0.94485, + 0.94482, + 0.94763, + 0.95326, + 0.9491, + 0.94093, + 0.94773, + 0.95426, + 0.96206, + 0.94813, + 0.97033, + 0.94237, + 0.94199, + 0.94838, + 0.95178, + 0.94135, + 0.94579, + 0.93951, + 0.94911, + 0.95218, + 0.94178, + 0.94851, + 0.9509, + 0.94999, + 0.9493, + 0.94828, + 0.94978, + 0.94476, + 0.94705, + 0.95521, + 0.95104, + 0.94511, + 0.94837, + 0.94912, + 0.94671, + 0.9459, + 0.94956, + 0.95319, + 0.95821, + 0.9485, + 0.95174, + 0.94765, + 0.96003, + 0.94582, + 0.95184, + 0.95612, + 0.95158, + 0.98107, + 0.94641, + 0.95282, + 0.95172, + 0.9491, + 0.94978, + 0.94789, + 0.94792, + 0.94025, + 0.93956, + 0.93183, + 0.93056, + 0.93823, + 0.93333, + 0.96058, + 0.93797, + 0.93793, + 0.94018, + 0.93813, + 0.93817, + 0.95695, + 0.93824, + 0.94699, + 0.94388, + 0.94587, + 0.95454, + 0.94299, + 0.94677, + 0.9404, + 0.93396, + 0.9321, + 0.93528, + 0.94403, + 0.9477, + 0.94225, + 0.94179, + 0.93868, + 0.95141, + 0.94067, + 0.94856, + 0.94009, + 0.9422, + 0.94504, + 0.94152, + 0.96476, + 0.94531, + 0.94649, + 0.94942, + 0.94029, + 1.0097, + 0.94409, + 0.95112, + 0.94884, + 0.95061, + 0.95583, + 0.95095, + 0.95022, + 0.95212, + 0.94448, + 0.94873, + 0.95662, + 0.96522, + 0.94569, + 0.94838, + 0.94514, + 0.94892, + 0.95044, + 0.96233, + 0.95231, + 0.94812, + 0.94006, + 0.94158, + 0.943, + 0.94399, + 0.94347, + 0.95689, + 0.95405, + 0.95444, + 0.94624, + 0.93701, + 0.94525, + 0.94239, + 0.94211, + 0.94566, + 0.9479, + 0.94417, + 0.94624, + 0.94886, + 0.96213, + 0.94232, + 0.94635, + 0.94811, + 0.94497, + 0.94019, + 0.93701, + 0.94403, + 0.93885, + 0.94132, + 0.94052, + 0.93236, + 0.95086, + 0.9407, + 0.94154, + 0.9449, + 0.94425, + 0.94813, + 0.94489, + 0.94435, + 0.94217, + 0.94314, + 0.93934, + 0.95872, + 0.94958, + 0.94957, + 0.95599, + 0.95388, + 0.95606, + 0.94371, + 0.94632, + 0.94553, + 0.95892, + 0.953, + 0.94963, + 0.94155, + 0.95559, + 0.94947, + 0.94817, + 0.95593, + 0.95566, + 0.94408, + 0.95495, + 0.949, + 0.95776, + 0.95699, + 0.95315, + 0.95048, + 0.95401, + 0.96139, + 0.97114, + 0.94534, + 0.94445, + 0.94874, + 0.94385, + 0.95005, + 0.95314, + 0.95076, + 0.94059, + 0.95293, + 0.95445, + 0.95102, + 0.9472, + 0.93973, + 0.94443, + 0.9388, + 0.94286, + 0.94317, + 0.94195, + 0.9419, + 0.94506, + 0.95338, + 0.94558, + 0.94449, + 0.94354, + 0.93761, + 0.95019, + 0.93809, + 0.94284, + 0.94196, + 0.93931, + 0.93559, + 0.94288, + 0.93906, + 0.93847, + 0.93964, + 0.93919, + 0.94356, + 0.95154, + 0.9405, + 0.94607, + 0.94801, + 0.94918, + 0.9443, + 0.97237, + 0.94775, + 0.94762, + 0.94701, + 0.94383, + 0.95085, + 0.95617, + 0.95529, + 0.95966, + 0.95961, + 0.96501, + 0.95501, + 0.94915, + 0.94926, + 0.94879, + 0.95826, + 0.95473, + 0.95968, + 0.94356, + 0.96027, + 0.95401, + 0.94791, + 0.95295, + 0.947, + 0.95173, + 0.94958, + 0.94613, + 0.94941, + 0.94801, + 0.9486, + 0.96463, + 0.94302, + 0.95219, + 0.9442, + 0.94287, + 0.93815, + 0.93529, + 0.93952, + 0.94162, + 0.93707, + 0.93837, + 0.94009, + 0.94154, + 0.94407, + 0.94597, + 0.94076, + 0.93482, + 0.93691, + 0.94139, + 0.94406, + 0.94631, + 0.93728, + 0.92955, + 0.94906, + 0.94489, + 0.94899, + 0.94887, + 0.94665, + 0.94811, + 0.93798, + 0.94313 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values_lts.json new file mode 100644 index 000000000..f822a205e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/golden_values_lts.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.84435, + 10.87318, + 10.85036, + 10.81075, + 10.64476, + 10.63865, + 10.4284, + 10.13527, + 9.9354, + 9.83535, + 9.58564, + 9.84799, + 9.88584, + 9.63126, + 9.79019, + 9.51136, + 9.45967, + 9.65536, + 9.38991, + 9.3393, + 9.24938, + 9.15121, + 9.1819, + 9.00438, + 9.19827, + 9.06667, + 9.1611, + 9.16974, + 9.30047, + 8.98931, + 8.9295, + 9.05025, + 9.04643, + 8.66023, + 8.72503, + 8.75641, + 8.69453, + 8.74311, + 8.66664, + 8.77265, + 8.67046, + 8.86117, + 8.84289, + 8.50887, + 8.39866, + 8.43817, + 8.49539, + 8.39331, + 8.44014, + 8.59211, + 8.37558, + 8.19954, + 8.23308, + 8.22973, + 8.27486, + 7.9203, + 8.09935, + 7.89759, + 8.25172, + 8.23421, + 8.00968, + 7.97527, + 7.92604, + 7.74403, + 7.74728, + 7.64954, + 7.51978, + 7.9104, + 7.70203, + 7.45557, + 7.74663, + 7.7747, + 7.54395, + 7.30276, + 7.45598, + 7.34312, + 7.46591, + 7.22838, + 7.63706, + 7.28267, + 7.34901, + 7.21386, + 7.21177, + 7.41978, + 7.17382, + 7.2822, + 6.99443, + 7.00278, + 7.03963, + 7.13669, + 6.82176, + 6.98519, + 7.08886, + 6.99826, + 6.87461, + 6.75718, + 6.99116, + 7.06112, + 6.70481, + 6.58484, + 6.72791, + 6.74611, + 6.73451, + 6.73883, + 6.6589, + 6.40659, + 6.63739, + 6.6201, + 6.44607, + 6.62819, + 6.74266, + 6.6102, + 6.72607, + 6.69279, + 6.6261, + 6.50591, + 6.59661, + 6.40511, + 6.66302, + 6.24641, + 6.25042, + 6.30258, + 6.38946, + 6.34694, + 6.45156, + 6.2927, + 6.33962, + 6.23686, + 6.20391, + 6.39902, + 6.32867, + 6.32319, + 6.16976, + 6.16361, + 6.24291, + 6.38627, + 6.2076, + 6.15571, + 6.1854, + 6.12408, + 6.07117, + 6.07793, + 6.26449, + 6.41645, + 6.26318, + 6.30431, + 6.10357, + 6.18374, + 6.00783, + 6.03849, + 5.96044, + 6.26013, + 6.19494, + 5.97729, + 5.79578, + 6.1331, + 5.85925, + 6.11082, + 5.79246, + 6.16831, + 6.14892, + 6.08853, + 5.92954, + 6.11667, + 5.94404, + 6.19642, + 5.89309, + 5.78869, + 5.77689, + 5.68542, + 6.01319, + 5.99761, + 6.06692, + 5.88893, + 6.04105, + 5.96721, + 5.99332, + 5.99407, + 5.95322, + 5.84284, + 5.95079, + 5.62035, + 5.70822, + 5.89257, + 5.84404, + 5.86509, + 5.76428, + 5.83817, + 5.72742, + 5.56185, + 5.72363, + 5.62165, + 5.83076, + 5.60152, + 5.70824, + 5.70544, + 5.90203, + 5.64105, + 5.84826, + 5.73964, + 5.86591, + 5.32604, + 5.89223, + 5.87356, + 5.85147, + 5.41, + 5.41144, + 5.62864, + 5.59674, + 5.48661, + 5.57868, + 5.67447, + 5.47953, + 5.74541, + 5.51107, + 5.59383, + 5.62438, + 5.62002, + 5.52107, + 5.61786, + 5.67207, + 5.6824, + 5.58833, + 5.66064, + 5.37433, + 5.6798, + 5.63448, + 5.42498, + 5.58338, + 5.63097, + 5.55613, + 5.34386, + 5.53696, + 5.48795, + 5.48091, + 5.37734, + 5.55326, + 5.60019, + 5.38949, + 5.5279, + 5.48792, + 5.33294, + 5.50621, + 5.40686, + 5.44259, + 5.31539, + 5.06376, + 5.47807, + 5.5693, + 5.71381, + 5.41187, + 5.59881, + 5.63378, + 5.2309, + 5.26996, + 5.39128, + 5.39766, + 5.32837, + 5.49524, + 5.18234, + 5.29608, + 5.24551, + 5.37455, + 5.25382, + 5.44198, + 5.53542, + 5.30722, + 5.4305, + 5.33574, + 5.07255, + 5.30787, + 5.24998, + 5.30133, + 5.11033, + 5.27279, + 5.26164, + 5.47438, + 5.15836, + 5.26302, + 5.20727, + 5.35287, + 4.97954, + 4.90839, + 5.32324, + 5.38545, + 5.22544, + 5.31832, + 5.1045, + 5.16052, + 5.26033, + 5.06436, + 5.26, + 5.06647, + 5.33914, + 5.24433, + 5.14664, + 5.24337, + 5.03905, + 5.31384, + 5.05093, + 5.02403, + 5.13908, + 5.11049, + 5.27154, + 5.14863, + 5.27243, + 5.09211, + 5.09214, + 5.24408, + 5.32506, + 5.25134, + 5.19195, + 5.14156, + 5.28838, + 4.95217, + 5.20555, + 5.09208, + 5.30144, + 5.17197, + 5.18544, + 5.11186, + 4.98156, + 4.99246, + 5.22268, + 5.31003, + 5.09805, + 5.05635, + 4.91749, + 5.12083, + 5.11431, + 4.92685, + 5.33318, + 5.02149, + 5.09798, + 5.16452, + 5.003, + 5.06512, + 5.06538, + 4.99155, + 5.08009, + 5.16075, + 4.97693, + 5.18415, + 4.92412, + 4.9196, + 5.06212, + 4.99168, + 4.90728, + 4.77422, + 4.94399, + 5.11441, + 5.01167, + 5.01683, + 5.32789, + 4.95546, + 4.99161, + 5.0459, + 4.81109, + 4.7342, + 4.99359, + 5.04093, + 4.87128, + 4.95515, + 5.04762, + 5.02569, + 4.81796, + 4.8971, + 4.90335, + 4.82861, + 4.73834, + 5.00766, + 4.75352, + 5.20734, + 4.79121, + 4.99076, + 4.73247, + 4.782, + 4.81736, + 4.64772, + 4.65226, + 4.84032, + 4.80478, + 4.79458, + 4.91773, + 4.88236, + 4.92733, + 4.77215, + 4.87882, + 4.7305, + 4.91488, + 4.95406, + 4.8724, + 4.70482, + 4.77933, + 4.89858, + 4.70781, + 4.85495, + 4.69185, + 4.69004, + 4.64291 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 78.0, + 81.0, + 63.0, + 62.0, + 74.0, + 67.0, + 96.0, + 102.0, + 121.0, + 141.0, + 102.0, + 133.0, + 149.0, + 150.0, + 194.0, + 155.0, + 151.0, + 191.0, + 179.0, + 169.0, + 155.0, + 187.0, + 186.0, + 195.0, + 184.0, + 160.0, + 216.0, + 201.0, + 146.0, + 147.0, + 163.0, + 147.0, + 125.0, + 170.0, + 114.0, + 185.0, + 171.0, + 195.0, + 182.0, + 185.0, + 149.0, + 175.0, + 173.0, + 175.0, + 187.0, + 170.0, + 188.0, + 173.0, + 156.0, + 216.0, + 201.0, + 172.0, + 211.0, + 171.0, + 173.0, + 194.0, + 163.0, + 159.0, + 226.0, + 243.0, + 167.0, + 158.0, + 197.0, + 183.0, + 197.0, + 250.0, + 222.0, + 204.0, + 183.0, + 188.0, + 225.0, + 262.0, + 197.0, + 237.0, + 209.0, + 240.0, + 237.0, + 241.0, + 253.0, + 210.0, + 218.0, + 226.0, + 196.0, + 229.0, + 204.0, + 174.0, + 185.0, + 196.0, + 174.0, + 186.0, + 198.0, + 183.0, + 213.0, + 204.0, + 212.0, + 154.0, + 195.0, + 191.0, + 168.0, + 162.0, + 155.0, + 186.0, + 170.0, + 178.0, + 133.0, + 154.0, + 161.0, + 158.0, + 155.0, + 189.0, + 176.0, + 160.0, + 148.0, + 161.0, + 147.0, + 141.0, + 142.0, + 102.0, + 160.0, + 139.0, + 160.0, + 120.0, + 120.0, + 148.0, + 144.0, + 95.0, + 100.0, + 137.0, + 114.0, + 139.0, + 133.0, + 138.0, + 134.0, + 113.0, + 125.0, + 130.0, + 111.0, + 128.0, + 114.0, + 115.0, + 115.0, + 110.0, + 112.0, + 129.0, + 124.0, + 125.0, + 123.0, + 125.0, + 121.0, + 115.0, + 129.0, + 109.0, + 119.0, + 123.0, + 106.0, + 113.0, + 115.0, + 137.0, + 131.0, + 135.0, + 128.0, + 118.0, + 123.0, + 97.0, + 115.0, + 123.0, + 112.0, + 105.0, + 115.0, + 120.0, + 112.0, + 91.0, + 89.0, + 96.0, + 121.0, + 127.0, + 106.0, + 114.0, + 115.0, + 111.0, + 99.0, + 103.0, + 94.0, + 146.0, + 102.0, + 113.0, + 104.0, + 114.0, + 117.0, + 116.0, + 111.0, + 135.0, + 117.0, + 126.0, + 98.0, + 102.0, + 99.0, + 100.0, + 101.0, + 106.0, + 125.0, + 92.0, + 121.0, + 123.0, + 106.0, + 115.0, + 88.0, + 95.0, + 123.0, + 98.0, + 99.0, + 81.0, + 95.0, + 118.0, + 90.0, + 102.0, + 109.0, + 91.0, + 106.0, + 92.0, + 114.0, + 105.0, + 91.0, + 97.0, + 107.0, + 95.0, + 97.0, + 100.0, + 97.0, + 117.0, + 119.0, + 104.0, + 85.0, + 113.0, + 115.0, + 118.0, + 94.0, + 103.0, + 112.0, + 94.0, + 89.0, + 111.0, + 119.0, + 114.0, + 111.0, + 104.0, + 121.0, + 122.0, + 123.0, + 106.0, + 109.0, + 106.0, + 115.0, + 118.0, + 124.0, + 91.0, + 98.0, + 110.0, + 106.0, + 104.0, + 104.0, + 100.0, + 96.0, + 87.0, + 104.0, + 115.0, + 99.0, + 114.0, + 126.0, + 108.0, + 128.0, + 110.0, + 109.0, + 115.0, + 103.0, + 127.0, + 86.0, + 107.0, + 98.0, + 107.0, + 110.0, + 118.0, + 88.0, + 109.0, + 113.0, + 90.0, + 92.0, + 100.0, + 110.0, + 103.0, + 104.0, + 119.0, + 98.0, + 121.0, + 113.0, + 121.0, + 97.0, + 109.0, + 87.0, + 120.0, + 136.0, + 123.0, + 100.0, + 96.0, + 111.0, + 116.0, + 97.0, + 108.0, + 134.0, + 93.0, + 102.0, + 93.0, + 101.0, + 126.0, + 102.0, + 100.0, + 96.0, + 123.0, + 111.0, + 123.0, + 89.0, + 106.0, + 118.0, + 125.0, + 99.0, + 121.0, + 92.0, + 109.0, + 123.0, + 126.0, + 96.0, + 124.0, + 135.0, + 94.0, + 107.0, + 117.0, + 114.0, + 95.0, + 123.0, + 103.0, + 119.0, + 124.0, + 115.0, + 115.0, + 115.0, + 101.0, + 115.0, + 88.0, + 106.0, + 105.0, + 122.0, + 125.0, + 131.0, + 112.0, + 130.0, + 117.0, + 102.0, + 94.0, + 129.0, + 115.0, + 130.0, + 92.0, + 126.0, + 105.0, + 125.0, + 107.0, + 93.0, + 137.0, + 113.0, + 93.0, + 104.0, + 106.0, + 89.0, + 126.0, + 97.0, + 92.0, + 122.0, + 105.0, + 107.0, + 121.0, + 111.0, + 122.0, + 118.0, + 137.0, + 130.0, + 124.0, + 119.0, + 98.0, + 117.0, + 92.0, + 101.0, + 119.0, + 112.0, + 128.0, + 104.0, + 125.0, + 94.0, + 105.0, + 97.0, + 121.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 23.34406, + 1.17039, + 1.173, + 1.16494, + 1.16483, + 1.16575, + 1.16204, + 1.15812, + 1.15652, + 1.16643, + 1.16078, + 1.15939, + 1.17115, + 1.16564, + 1.17859, + 1.17606, + 1.17037, + 1.19888, + 1.16983, + 1.16754, + 1.16743, + 1.17055, + 1.18175, + 1.16888, + 1.17043, + 1.17177, + 1.17337, + 1.15677, + 1.1758, + 1.17204, + 1.16365, + 1.17047, + 1.16702, + 1.16606, + 1.16955, + 1.31288, + 1.17263, + 1.16582, + 1.17041, + 1.16844, + 1.17019, + 1.1644, + 1.16909, + 1.17402, + 1.16538, + 1.16778, + 1.17243, + 1.17766, + 1.16747, + 1.17131, + 1.16449, + 1.1653, + 1.16464, + 1.15861, + 1.16313, + 1.16527, + 1.17028, + 1.15912, + 1.17959, + 1.1734, + 1.16816, + 1.16551, + 1.16725, + 1.16506, + 1.16617, + 1.16308, + 1.1618, + 1.16946, + 1.16336, + 1.16426, + 1.17998, + 1.16623, + 1.17535, + 1.16411, + 1.16492, + 1.17299, + 1.1715, + 1.17869, + 1.1699, + 1.16461, + 1.16863, + 1.16382, + 1.17047, + 1.16995, + 1.1666, + 1.16418, + 1.16868, + 1.16579, + 1.15628, + 1.16798, + 1.17082, + 1.17331, + 1.17053, + 1.17126, + 1.17403, + 1.16881, + 1.16136, + 1.16745, + 1.16624, + 1.16489, + 1.18239, + 1.17464, + 1.1711, + 1.17745, + 1.17608, + 1.18067, + 1.18708, + 1.18901, + 1.18633, + 1.18603, + 1.1786, + 1.19418, + 1.17856, + 1.18123, + 1.1837, + 1.18369, + 1.18422, + 1.18768, + 1.19076, + 1.1812, + 1.19114, + 1.18605, + 1.14129, + 1.1575, + 1.14066, + 1.17639, + 1.18425, + 1.17001, + 1.19176, + 1.19108, + 1.1768, + 1.18485, + 1.20499, + 1.19189, + 1.18064, + 1.17787, + 1.19195, + 1.19927, + 1.23073, + 1.18677, + 1.19046, + 1.18187, + 1.18937, + 1.21167, + 1.18566, + 1.16935, + 1.1701, + 1.17709, + 1.19274, + 1.17738, + 1.17826, + 1.1664, + 1.17572, + 1.16895, + 1.16753, + 1.17343, + 1.16903, + 1.16971, + 1.16984, + 1.1811, + 1.18941, + 1.17477, + 1.1806, + 1.18288, + 1.1785, + 1.17701, + 1.17703, + 1.17515, + 1.18327, + 1.17311, + 1.1815, + 1.17316, + 1.17856, + 1.17628, + 1.17449, + 1.17852, + 1.17782, + 1.17168, + 1.17438, + 1.17469, + 1.17762, + 1.17228, + 1.17742, + 1.17533, + 1.18953, + 1.18268, + 1.18624, + 1.18127, + 1.20293, + 1.18602, + 1.16879, + 1.17376, + 1.17027, + 1.17957, + 1.17958, + 1.16575, + 1.15516, + 1.16934, + 1.16302, + 1.15534, + 1.1531, + 1.15489, + 1.15748, + 1.1576, + 1.15839, + 1.16766, + 1.15465, + 1.15694, + 1.18582, + 1.16999, + 1.1796, + 1.16425, + 1.17182, + 1.15726, + 1.1736, + 1.17724, + 1.17386, + 1.17529, + 1.17695, + 1.17936, + 1.18069, + 1.19431, + 1.18189, + 1.18116, + 1.19235, + 1.17797, + 1.18177, + 1.18354, + 1.18555, + 1.18237, + 1.17595, + 1.17961, + 1.17756, + 1.18234, + 1.18358, + 1.19028, + 1.18217, + 1.18209, + 1.17902, + 1.18184, + 1.18224, + 1.19588, + 1.17959, + 1.18437, + 1.18271, + 1.18035, + 1.18619, + 1.18573, + 1.18876, + 1.18917, + 1.18496, + 1.18739, + 1.19656, + 1.1969, + 1.19473, + 1.19324, + 1.19377, + 1.18283, + 1.18739, + 1.18158, + 1.16288, + 1.16683, + 1.16152, + 1.16074, + 1.1663, + 1.16591, + 1.17901, + 1.16145, + 1.17191, + 1.17179, + 1.16773, + 1.17832, + 1.1581, + 1.16003, + 1.15189, + 1.15472, + 1.16209, + 1.16107, + 1.1599, + 1.16155, + 1.16286, + 1.17, + 1.16147, + 1.15785, + 1.16164, + 1.15976, + 1.15927, + 1.57688, + 1.17603, + 1.17314, + 1.19224, + 1.17822, + 1.1882, + 1.176, + 1.17781, + 1.17984, + 1.17471, + 1.17492, + 1.18073, + 1.17692, + 1.17325, + 1.1761, + 1.17727, + 1.17111, + 1.17951, + 1.17441, + 1.1568, + 1.17807, + 1.17874, + 1.17104, + 1.2905, + 1.17805, + 1.17121, + 1.17166, + 1.17232, + 1.17459, + 1.17913, + 1.1708, + 1.17391, + 1.17531, + 1.17594, + 1.15935, + 1.18042, + 1.19, + 1.17793, + 1.17594, + 1.17602, + 1.17535, + 1.17812, + 1.17362, + 1.17173, + 1.17584, + 1.17377, + 1.17806, + 1.17619, + 1.17216, + 1.18278, + 1.18527, + 1.17597, + 1.18145, + 1.17917, + 1.18892, + 1.17329, + 1.17202, + 1.17508, + 1.17162, + 1.17129, + 1.17396, + 1.1761, + 1.17031, + 1.17211, + 1.17692, + 1.17391, + 1.17361, + 1.17899, + 1.1729, + 1.18055, + 1.17626, + 1.18141, + 1.17443, + 1.18144, + 1.17746, + 1.17164, + 1.17448, + 1.17469, + 1.17222, + 1.16882, + 1.17741, + 1.1801, + 1.17277, + 1.17196, + 1.17407, + 1.17266, + 1.18371, + 1.16781, + 1.17137, + 1.18646, + 1.17403, + 1.17343, + 1.18012, + 1.19053, + 1.18436, + 1.18323, + 1.18326, + 1.19376, + 1.18423, + 1.18445, + 1.18876, + 1.18424, + 1.18265, + 1.18961, + 1.18624, + 1.18422, + 1.19539, + 1.18601, + 1.18424, + 1.18663, + 1.19269, + 1.18535, + 1.18709 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/golden_values_lts.json new file mode 100644 index 000000000..d9ac04b70 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/golden_values_lts.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.84281, + 10.87156, + 10.85024, + 10.81087, + 10.64538, + 10.63934, + 10.42688, + 10.13546, + 9.93506, + 9.83519, + 9.58594, + 9.84758, + 9.88551, + 9.63096, + 9.7903, + 9.51156, + 9.46066, + 9.65595, + 9.39004, + 9.33876, + 9.24973, + 9.15195, + 9.18229, + 9.0045, + 9.19852, + 9.06684, + 9.16057, + 9.1694, + 9.30036, + 8.98804, + 8.92928, + 9.05055, + 9.04612, + 8.66028, + 8.72508, + 8.75696, + 8.69546, + 8.74285, + 8.66664, + 8.77472, + 8.67052, + 8.86172, + 8.84439, + 8.50979, + 8.39973, + 8.43913, + 8.49858, + 8.39565, + 8.44221, + 8.5946, + 8.37829, + 8.20125, + 8.23616, + 8.23212, + 8.27689, + 7.92295, + 8.10195, + 7.89881, + 8.25251, + 8.23582, + 8.01118, + 7.97634, + 7.92749, + 7.74444, + 7.74885, + 7.65064, + 7.52144, + 7.91177, + 7.70414, + 7.45671, + 7.74832, + 7.77633, + 7.5457, + 7.3039, + 7.4575, + 7.34295, + 7.46662, + 7.22849, + 7.63676, + 7.28251, + 7.34888, + 7.21267, + 7.21199, + 7.41851, + 7.1723, + 7.28229, + 6.99638, + 7.00458, + 7.041, + 7.13727, + 6.82404, + 6.98585, + 7.08989, + 6.99796, + 6.87497, + 6.75678, + 6.9902, + 7.0599, + 6.70435, + 6.58313, + 6.72673, + 6.74468, + 6.73224, + 6.73703, + 6.65746, + 6.40543, + 6.63595, + 6.61889, + 6.4461, + 6.62563, + 6.74233, + 6.61107, + 6.72514, + 6.69288, + 6.62633, + 6.50732, + 6.5976, + 6.40631, + 6.66393, + 6.24768, + 6.25154, + 6.30255, + 6.39096, + 6.34863, + 6.44764, + 6.29035, + 6.33694, + 6.23532, + 6.19824, + 6.39433, + 6.32582, + 6.32144, + 6.16153, + 6.15745, + 6.23995, + 6.38527, + 6.20636, + 6.15496, + 6.18343, + 6.11838, + 6.06459, + 6.07836, + 6.26065, + 6.41059, + 6.25866, + 6.29585, + 6.10032, + 6.1774, + 6.00305, + 6.02765, + 5.95654, + 6.24947, + 6.18571, + 5.96627, + 5.78662, + 6.12372, + 5.84881, + 6.10369, + 5.78679, + 6.16294, + 6.14376, + 6.0842, + 5.92922, + 6.11492, + 5.9447, + 6.19974, + 5.89262, + 5.79056, + 5.78307, + 5.68749, + 6.01402, + 5.99524, + 6.06674, + 5.88914, + 6.03765, + 5.96656, + 5.99047, + 5.98834, + 5.94697, + 5.8355, + 5.94663, + 5.6128, + 5.69653, + 5.88316, + 5.8366, + 5.85812, + 5.75833, + 5.83104, + 5.71842, + 5.55202, + 5.71578, + 5.61535, + 5.82228, + 5.59303, + 5.70184, + 5.69953, + 5.89507, + 5.63439, + 5.84274, + 5.73236, + 5.86008, + 5.31958, + 5.89046, + 5.86601, + 5.84531, + 5.40447, + 5.40406, + 5.61921, + 5.59024, + 5.48118, + 5.57099, + 5.66723, + 5.47089, + 5.73832, + 5.50405, + 5.58544, + 5.61657, + 5.61237, + 5.50569, + 5.60738, + 5.6669, + 5.67189, + 5.58255, + 5.65371, + 5.36912, + 5.67319, + 5.6212, + 5.41609, + 5.57636, + 5.62365, + 5.54654, + 5.33431, + 5.53159, + 5.4831, + 5.47937, + 5.37214, + 5.54636, + 5.59486, + 5.38333, + 5.51064, + 5.48113, + 5.32652, + 5.49925, + 5.4045, + 5.43954, + 5.31199, + 5.06367, + 5.4733, + 5.56319, + 5.70734, + 5.4102, + 5.60048, + 5.62764, + 5.22974, + 5.26831, + 5.38869, + 5.39546, + 5.32238, + 5.49179, + 5.1799, + 5.29588, + 5.24419, + 5.37317, + 5.24943, + 5.43946, + 5.53386, + 5.30678, + 5.42913, + 5.33771, + 5.07227, + 5.31196, + 5.25048, + 5.30133, + 5.10703, + 5.27013, + 5.26342, + 5.4691, + 5.15196, + 5.26536, + 5.21133, + 5.35484, + 4.98363, + 4.91007, + 5.32369, + 5.38822, + 5.23113, + 5.31853, + 5.1042, + 5.16326, + 5.26536, + 5.06514, + 5.25967, + 5.06459, + 5.34476, + 5.24852, + 5.14912, + 5.24104, + 5.03889, + 5.31716, + 5.05084, + 5.02763, + 5.1438, + 5.11162, + 5.27099, + 5.15001, + 5.27559, + 5.09088, + 5.09234, + 5.25039, + 5.32494, + 5.25054, + 5.19165, + 5.14073, + 5.29135, + 4.9522, + 5.20657, + 5.09061, + 5.30262, + 5.17436, + 5.18916, + 5.11216, + 4.98097, + 4.99321, + 5.22248, + 5.30876, + 5.09899, + 5.05573, + 4.91169, + 5.12563, + 5.11705, + 4.92669, + 5.33894, + 5.02766, + 5.10049, + 5.16601, + 5.0033, + 5.06756, + 5.0671, + 4.99549, + 5.08098, + 5.16392, + 4.97844, + 5.18513, + 4.93002, + 4.92386, + 5.05976, + 4.9961, + 4.90829, + 4.7741, + 4.94498, + 5.11669, + 5.01494, + 5.01393, + 5.33083, + 4.95827, + 4.99054, + 5.04514, + 4.80726, + 4.73417, + 4.99694, + 5.04196, + 4.87567, + 4.95538, + 5.04654, + 5.02371, + 4.81502, + 4.89538, + 4.90642, + 4.83132, + 4.74159, + 5.01714, + 4.75382, + 5.20665, + 4.7909, + 4.99173, + 4.73837, + 4.79161, + 4.82223, + 4.6564, + 4.65659, + 4.84461, + 4.8126, + 4.79697, + 4.92166, + 4.88529, + 4.92384, + 4.77039, + 4.88193, + 4.73381, + 4.91736, + 4.9605, + 4.87429, + 4.70962, + 4.78912, + 4.90775, + 4.71373, + 4.86621, + 4.69718, + 4.69178, + 4.64762 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 75.0, + 71.0, + 78.0, + 74.0, + 84.0, + 89.0, + 108.0, + 110.0, + 110.0, + 136.0, + 126.0, + 167.0, + 142.0, + 197.0, + 184.0, + 182.0, + 183.0, + 179.0, + 174.0, + 178.0, + 175.0, + 187.0, + 181.0, + 161.0, + 197.0, + 153.0, + 174.0, + 175.0, + 159.0, + 170.0, + 162.0, + 148.0, + 143.0, + 192.0, + 127.0, + 179.0, + 141.0, + 190.0, + 166.0, + 196.0, + 146.0, + 154.0, + 184.0, + 163.0, + 162.0, + 180.0, + 184.0, + 206.0, + 144.0, + 208.0, + 212.0, + 155.0, + 191.0, + 166.0, + 192.0, + 199.0, + 149.0, + 166.0, + 233.0, + 209.0, + 168.0, + 213.0, + 194.0, + 189.0, + 192.0, + 227.0, + 193.0, + 185.0, + 211.0, + 152.0, + 229.0, + 222.0, + 177.0, + 241.0, + 220.0, + 190.0, + 219.0, + 221.0, + 233.0, + 201.0, + 220.0, + 231.0, + 210.0, + 246.0, + 211.0, + 207.0, + 177.0, + 197.0, + 191.0, + 171.0, + 181.0, + 192.0, + 206.0, + 197.0, + 199.0, + 137.0, + 240.0, + 185.0, + 182.0, + 140.0, + 163.0, + 196.0, + 190.0, + 168.0, + 146.0, + 129.0, + 157.0, + 155.0, + 127.0, + 185.0, + 163.0, + 142.0, + 158.0, + 174.0, + 161.0, + 155.0, + 142.0, + 96.0, + 143.0, + 105.0, + 140.0, + 137.0, + 108.0, + 173.0, + 160.0, + 130.0, + 137.0, + 147.0, + 142.0, + 128.0, + 133.0, + 139.0, + 117.0, + 99.0, + 110.0, + 122.0, + 134.0, + 118.0, + 116.0, + 139.0, + 114.0, + 108.0, + 108.0, + 160.0, + 110.0, + 142.0, + 110.0, + 130.0, + 111.0, + 131.0, + 127.0, + 100.0, + 112.0, + 126.0, + 95.0, + 106.0, + 109.0, + 111.0, + 97.0, + 107.0, + 143.0, + 95.0, + 92.0, + 125.0, + 109.0, + 107.0, + 136.0, + 103.0, + 105.0, + 101.0, + 108.0, + 101.0, + 98.0, + 104.0, + 116.0, + 101.0, + 113.0, + 103.0, + 107.0, + 108.0, + 109.0, + 136.0, + 132.0, + 134.0, + 112.0, + 74.0, + 103.0, + 106.0, + 96.0, + 101.0, + 102.0, + 105.0, + 124.0, + 105.0, + 105.0, + 107.0, + 109.0, + 91.0, + 82.0, + 108.0, + 115.0, + 107.0, + 108.0, + 103.0, + 100.0, + 119.0, + 92.0, + 75.0, + 106.0, + 109.0, + 108.0, + 118.0, + 99.0, + 90.0, + 80.0, + 109.0, + 106.0, + 105.0, + 97.0, + 103.0, + 97.0, + 121.0, + 88.0, + 109.0, + 95.0, + 98.0, + 100.0, + 123.0, + 103.0, + 111.0, + 105.0, + 102.0, + 87.0, + 91.0, + 96.0, + 110.0, + 92.0, + 109.0, + 90.0, + 105.0, + 100.0, + 112.0, + 101.0, + 92.0, + 101.0, + 90.0, + 98.0, + 95.0, + 111.0, + 118.0, + 113.0, + 113.0, + 97.0, + 90.0, + 113.0, + 115.0, + 100.0, + 122.0, + 105.0, + 121.0, + 129.0, + 112.0, + 98.0, + 106.0, + 110.0, + 93.0, + 83.0, + 92.0, + 111.0, + 103.0, + 107.0, + 124.0, + 101.0, + 133.0, + 100.0, + 98.0, + 84.0, + 142.0, + 98.0, + 106.0, + 91.0, + 104.0, + 96.0, + 106.0, + 125.0, + 87.0, + 110.0, + 101.0, + 104.0, + 92.0, + 104.0, + 97.0, + 92.0, + 102.0, + 89.0, + 95.0, + 101.0, + 104.0, + 109.0, + 113.0, + 109.0, + 124.0, + 134.0, + 109.0, + 115.0, + 116.0, + 93.0, + 116.0, + 119.0, + 96.0, + 106.0, + 102.0, + 122.0, + 104.0, + 92.0, + 101.0, + 102.0, + 95.0, + 128.0, + 139.0, + 129.0, + 100.0, + 119.0, + 112.0, + 101.0, + 117.0, + 96.0, + 131.0, + 83.0, + 112.0, + 94.0, + 104.0, + 95.0, + 116.0, + 111.0, + 112.0, + 126.0, + 136.0, + 109.0, + 91.0, + 110.0, + 123.0, + 106.0, + 115.0, + 107.0, + 117.0, + 130.0, + 102.0, + 123.0, + 113.0, + 134.0, + 91.0, + 101.0, + 136.0, + 117.0, + 103.0, + 127.0, + 118.0, + 124.0, + 107.0, + 120.0, + 97.0, + 104.0, + 107.0, + 129.0, + 114.0, + 110.0, + 114.0, + 123.0, + 103.0, + 85.0, + 108.0, + 112.0, + 107.0, + 124.0, + 104.0, + 95.0, + 98.0, + 98.0, + 110.0, + 103.0, + 128.0, + 124.0, + 112.0, + 109.0, + 137.0, + 115.0, + 109.0, + 110.0, + 119.0, + 129.0, + 100.0, + 115.0, + 121.0, + 111.0, + 114.0, + 104.0, + 121.0, + 112.0, + 104.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 24.02205, + 1.24506, + 1.24858, + 1.24333, + 1.25283, + 1.25037, + 1.25421, + 1.2463, + 1.2501, + 1.26513, + 1.24828, + 1.26203, + 1.26152, + 1.25844, + 1.24358, + 1.24649, + 1.24037, + 1.26933, + 1.24565, + 1.24581, + 1.25219, + 1.26148, + 1.25382, + 1.28389, + 1.25754, + 1.2668, + 1.25991, + 1.26913, + 1.25979, + 1.27196, + 1.26206, + 1.27391, + 1.2598, + 1.2609, + 1.26823, + 1.41237, + 1.25989, + 1.27952, + 1.26096, + 1.2672, + 1.2739, + 1.26104, + 1.26514, + 1.26304, + 1.26101, + 1.26808, + 1.28355, + 1.25498, + 1.25385, + 1.26471, + 1.26743, + 1.27834, + 1.25081, + 1.24998, + 1.273, + 1.25459, + 1.28314, + 1.25536, + 1.27322, + 1.25723, + 1.25258, + 1.2737, + 1.25174, + 1.25458, + 1.25465, + 1.26423, + 1.25884, + 1.25794, + 1.29369, + 1.25823, + 1.26468, + 1.25525, + 1.28545, + 1.25487, + 1.25381, + 1.26521, + 1.26327, + 1.25623, + 1.26167, + 1.28421, + 1.25744, + 2.38212, + 1.25396, + 1.25408, + 1.26624, + 1.26554, + 1.25271, + 1.26468, + 1.27195, + 1.27503, + 1.2657, + 1.2661, + 1.27456, + 1.26939, + 1.26586, + 1.28144, + 1.26291, + 1.26343, + 1.27277, + 1.26516, + 1.25715, + 1.25949, + 1.26476, + 1.27715, + 1.263, + 1.27197, + 1.2799, + 1.26544, + 1.26319, + 1.26268, + 1.27214, + 1.26451, + 1.26377, + 1.26014, + 1.27229, + 1.25668, + 1.26217, + 1.27766, + 1.25964, + 1.26318, + 1.26686, + 1.27178, + 1.28624, + 1.26331, + 1.27682, + 1.4189, + 1.28511, + 1.272, + 1.26632, + 1.27543, + 1.28147, + 1.27518, + 1.28733, + 1.28232, + 1.27614, + 1.27792, + 1.27502, + 1.2703, + 1.269, + 1.26508, + 1.27296, + 1.26464, + 1.27352, + 1.25925, + 1.27647, + 1.27531, + 1.262, + 1.27258, + 1.26864, + 1.26393, + 1.27468, + 1.2704, + 1.2669, + 1.27408, + 1.26653, + 1.25934, + 1.27085, + 1.26066, + 1.26381, + 1.27106, + 1.26813, + 1.27425, + 1.2675, + 1.26972, + 1.27219, + 1.2599, + 1.25343, + 1.26631, + 1.26613, + 1.26456, + 1.26363, + 1.24696, + 1.24735, + 1.23999, + 1.24278, + 1.24375, + 1.30135, + 1.29599, + 1.41849, + 1.55305, + 1.28657, + 1.28352, + 1.27354, + 1.27715, + 1.27402, + 1.26602, + 1.2595, + 1.27111, + 1.25739, + 1.26466, + 1.26356, + 1.27812, + 1.27551, + 1.25594, + 1.26434, + 1.26429, + 1.26587, + 1.26167, + 1.25603, + 1.26467, + 1.25248, + 1.28015, + 1.25039, + 1.26242, + 1.25191, + 1.25406, + 1.28967, + 1.25465, + 1.25278, + 1.24787, + 1.28566, + 1.24579, + 1.23833, + 1.25526, + 1.24804, + 1.25288, + 1.25311, + 1.27069, + 1.2692, + 1.26358, + 1.26482, + 1.26587, + 1.25692, + 1.24695, + 1.2519, + 1.25969, + 1.25174, + 1.25841, + 1.26427, + 1.2659, + 1.24632, + 1.2552, + 1.24879, + 1.26097, + 1.25377, + 1.25145, + 1.2607, + 1.25105, + 1.26351, + 1.2637, + 1.26492, + 1.26318, + 1.25456, + 1.25979, + 1.25791, + 1.26316, + 1.25826, + 1.25874, + 1.25298, + 1.2801, + 1.25579, + 1.26876, + 1.2587, + 1.24948, + 1.2555, + 1.25745, + 1.26029, + 1.25145, + 1.26455, + 1.25779, + 1.25424, + 1.25778, + 1.2666, + 1.26833, + 1.25606, + 1.25517, + 1.24487, + 1.26487, + 1.26401, + 1.25739, + 1.25258, + 1.25456, + 1.26282, + 1.2624, + 1.25291, + 1.24606, + 1.24381, + 1.2644, + 1.26256, + 1.24699, + 1.25568, + 1.26046, + 1.26178, + 1.24752, + 1.24631, + 1.25387, + 1.25042, + 1.25335, + 1.24857, + 1.2779, + 1.25834, + 1.26516, + 1.26356, + 1.25971, + 1.24704, + 1.24808, + 1.25221, + 1.25458, + 1.24918, + 1.24796, + 1.25898, + 1.25776, + 1.24651, + 1.25908, + 1.25272, + 1.24913, + 1.25911, + 1.25475, + 1.25986, + 1.25067, + 1.26015, + 1.25973, + 1.26456, + 1.24812, + 1.26296, + 1.26051, + 1.25975, + 1.25669, + 1.25402, + 1.2504, + 1.24884, + 1.25361, + 1.25258, + 1.24646, + 1.25477, + 1.26152, + 1.25586, + 1.24538, + 1.24197, + 1.24636, + 1.26242, + 1.24754, + 1.25326, + 1.25781, + 1.25382, + 1.25739, + 1.25142, + 1.25264, + 1.26736, + 1.25905, + 1.25007, + 1.25292, + 1.25509, + 1.25421, + 1.25501, + 1.26274, + 1.25472, + 1.24705, + 1.2509, + 1.24897, + 1.25724, + 1.26927, + 1.2435, + 1.24864, + 1.25188, + 1.26436, + 1.25981, + 1.253, + 1.27425, + 1.25967, + 1.25959, + 1.25327, + 1.27673, + 1.25991, + 1.26104, + 1.27188, + 1.26418, + 1.26076, + 1.26686, + 1.26275, + 1.25723, + 1.25852, + 1.26733, + 1.26316, + 1.25518, + 1.25632, + 1.26586, + 1.26115, + 1.25001, + 1.25691, + 1.26643, + 1.26538, + 1.26127, + 1.2626, + 1.25793, + 1.26064, + 1.24679, + 1.26877, + 1.26311, + 1.26057, + 1.26505, + 1.26031, + 1.25609, + 1.25635, + 1.27454, + 1.2607, + 1.25592, + 1.26731, + 1.26013, + 1.25184 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/golden_values_lts.json new file mode 100644 index 000000000..8ab2e6aa8 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/golden_values_lts.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.84281, + 10.8602, + 10.84999, + 10.84774, + 10.76636, + 10.77408, + 10.67858, + 10.52999, + 10.38404, + 10.29654, + 9.92018, + 10.03622, + 10.04292, + 9.75387, + 9.87024, + 9.5746, + 9.50961, + 9.70647, + 9.43153, + 9.37511, + 9.2839, + 9.18277, + 9.2068, + 9.02341, + 9.21672, + 9.08417, + 9.17272, + 9.1834, + 9.31583, + 9.00482, + 8.94553, + 9.06057, + 9.05805, + 8.66725, + 8.73031, + 8.76025, + 8.69527, + 8.7424, + 8.66437, + 8.77107, + 8.66573, + 8.85403, + 8.83635, + 8.4981, + 8.38759, + 8.42877, + 8.48639, + 8.38117, + 8.42713, + 8.57914, + 8.36219, + 8.18553, + 8.21873, + 8.21382, + 8.25922, + 7.90601, + 8.08557, + 7.88018, + 8.23301, + 8.21569, + 7.98993, + 7.95406, + 7.9038, + 7.7218, + 7.72536, + 7.62754, + 7.4981, + 7.88743, + 7.68187, + 7.43224, + 7.72578, + 7.75506, + 7.52549, + 7.28473, + 7.43749, + 7.325, + 7.44968, + 7.21207, + 7.61943, + 7.26503, + 7.33398, + 7.19587, + 7.1959, + 7.40349, + 7.15631, + 7.26599, + 6.98182, + 6.99043, + 7.02736, + 7.12446, + 6.81155, + 6.97364, + 7.07875, + 6.98755, + 6.86407, + 6.74572, + 6.97998, + 7.05045, + 6.69521, + 6.57372, + 6.71809, + 6.73769, + 6.72491, + 6.72932, + 6.64962, + 6.39817, + 6.62884, + 6.61225, + 6.44041, + 6.62049, + 6.73772, + 6.60649, + 6.72094, + 6.69103, + 6.62304, + 6.50533, + 6.59423, + 6.4041, + 6.66308, + 6.24515, + 6.24906, + 6.30054, + 6.38907, + 6.34697, + 6.4469, + 6.28762, + 6.33409, + 6.23225, + 6.19562, + 6.39132, + 6.32229, + 6.31914, + 6.15903, + 6.15439, + 6.23698, + 6.38374, + 6.20283, + 6.15101, + 6.18002, + 6.11521, + 6.05969, + 6.07001, + 6.25319, + 6.40492, + 6.25175, + 6.28985, + 6.09297, + 6.17173, + 5.99681, + 6.02122, + 5.95045, + 6.24644, + 6.18058, + 5.96137, + 5.78046, + 6.12011, + 5.84322, + 6.09822, + 5.78081, + 6.15781, + 6.14053, + 6.07776, + 5.9216, + 6.10613, + 5.93659, + 6.19189, + 5.88668, + 5.78198, + 5.77526, + 5.67823, + 6.00679, + 5.98742, + 6.06154, + 5.88349, + 6.03601, + 5.96, + 5.98847, + 5.9833, + 5.94207, + 5.83297, + 5.94365, + 5.60922, + 5.69609, + 5.88105, + 5.83424, + 5.85386, + 5.75731, + 5.83131, + 5.7185, + 5.55025, + 5.71302, + 5.61355, + 5.82048, + 5.59018, + 5.69903, + 5.69897, + 5.89103, + 5.63206, + 5.8395, + 5.72871, + 5.85809, + 5.31691, + 5.88601, + 5.86484, + 5.84617, + 5.40506, + 5.4014, + 5.61912, + 5.58866, + 5.48021, + 5.57073, + 5.66568, + 5.46994, + 5.73634, + 5.50306, + 5.5841, + 5.61686, + 5.61674, + 5.50882, + 5.61236, + 5.6652, + 5.67791, + 5.58162, + 5.65657, + 5.36804, + 5.67455, + 5.62344, + 5.41616, + 5.5772, + 5.62748, + 5.54855, + 5.33671, + 5.53535, + 5.48455, + 5.47652, + 5.37564, + 5.55193, + 5.5984, + 5.38152, + 5.5108, + 5.48257, + 5.33075, + 5.49836, + 5.40228, + 5.43822, + 5.31254, + 5.06398, + 5.4762, + 5.56579, + 5.71052, + 5.41274, + 5.60048, + 5.63276, + 5.23413, + 5.26919, + 5.38942, + 5.39341, + 5.32533, + 5.49404, + 5.18166, + 5.29727, + 5.24478, + 5.37352, + 5.25182, + 5.44215, + 5.53267, + 5.3099, + 5.43346, + 5.33577, + 5.07318, + 5.31092, + 5.25044, + 5.2999, + 5.10968, + 5.27424, + 5.26315, + 5.4705, + 5.15808, + 5.26612, + 5.21445, + 5.35712, + 4.98463, + 4.91368, + 5.32349, + 5.38994, + 5.22877, + 5.32196, + 5.10427, + 5.16318, + 5.26658, + 5.06627, + 5.26492, + 5.06652, + 5.346, + 5.24918, + 5.15509, + 5.24631, + 5.04501, + 5.31881, + 5.05452, + 5.02952, + 5.14477, + 5.11544, + 5.27085, + 5.15606, + 5.282, + 5.09723, + 5.09588, + 5.25152, + 5.3321, + 5.25666, + 5.19714, + 5.14253, + 5.29088, + 4.9539, + 5.20872, + 5.09462, + 5.30323, + 5.17682, + 5.19418, + 5.11484, + 4.98736, + 4.99456, + 5.22345, + 5.31285, + 5.10172, + 5.06227, + 4.9149, + 5.1282, + 5.12213, + 4.92763, + 5.34106, + 5.02698, + 5.10671, + 5.17164, + 5.01014, + 5.06965, + 5.07235, + 4.99705, + 5.08526, + 5.16503, + 4.98231, + 5.18481, + 4.93544, + 4.92878, + 5.06693, + 4.99971, + 4.91319, + 4.77885, + 4.95138, + 5.12143, + 5.01874, + 5.01841, + 5.33612, + 4.96297, + 4.99367, + 5.05123, + 4.81546, + 4.74029, + 5.00003, + 5.04668, + 4.87836, + 4.96043, + 5.05128, + 5.029, + 4.82256, + 4.89557, + 4.90977, + 4.8381, + 4.74409, + 5.01875, + 4.75876, + 5.21068, + 4.79582, + 4.99901, + 4.74235, + 4.79046, + 4.82199, + 4.65865, + 4.65941, + 4.84913, + 4.81473, + 4.80628, + 4.92791, + 4.89144, + 4.93259, + 4.7758, + 4.88576, + 4.73689, + 4.91979, + 4.96589, + 4.88082, + 4.70772, + 4.7922, + 4.90855, + 4.7196, + 4.87298, + 4.70121, + 4.69977, + 4.65183 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 75.0, + 74.0, + 69.0, + 62.0, + 72.0, + 85.0, + 91.0, + 77.0, + 86.0, + 101.0, + 85.0, + 180.0, + 138.0, + 163.0, + 179.0, + 139.0, + 179.0, + 181.0, + 165.0, + 156.0, + 158.0, + 164.0, + 174.0, + 170.0, + 191.0, + 186.0, + 200.0, + 209.0, + 173.0, + 142.0, + 157.0, + 140.0, + 138.0, + 182.0, + 136.0, + 127.0, + 155.0, + 206.0, + 184.0, + 182.0, + 181.0, + 180.0, + 179.0, + 180.0, + 179.0, + 189.0, + 165.0, + 190.0, + 156.0, + 217.0, + 223.0, + 170.0, + 207.0, + 143.0, + 177.0, + 198.0, + 183.0, + 163.0, + 232.0, + 230.0, + 187.0, + 207.0, + 202.0, + 176.0, + 191.0, + 247.0, + 210.0, + 197.0, + 205.0, + 194.0, + 240.0, + 248.0, + 194.0, + 200.0, + 213.0, + 196.0, + 215.0, + 225.0, + 253.0, + 220.0, + 220.0, + 260.0, + 221.0, + 206.0, + 214.0, + 203.0, + 187.0, + 208.0, + 167.0, + 229.0, + 191.0, + 223.0, + 214.0, + 187.0, + 241.0, + 153.0, + 197.0, + 199.0, + 187.0, + 172.0, + 177.0, + 182.0, + 183.0, + 159.0, + 149.0, + 157.0, + 187.0, + 174.0, + 129.0, + 184.0, + 178.0, + 133.0, + 157.0, + 131.0, + 133.0, + 146.0, + 158.0, + 118.0, + 157.0, + 137.0, + 170.0, + 121.0, + 156.0, + 150.0, + 173.0, + 136.0, + 129.0, + 150.0, + 139.0, + 146.0, + 124.0, + 113.0, + 132.0, + 115.0, + 125.0, + 125.0, + 128.0, + 144.0, + 117.0, + 117.0, + 142.0, + 133.0, + 119.0, + 125.0, + 140.0, + 152.0, + 105.0, + 104.0, + 99.0, + 113.0, + 101.0, + 75.0, + 87.0, + 118.0, + 104.0, + 95.0, + 115.0, + 98.0, + 130.0, + 127.0, + 133.0, + 119.0, + 128.0, + 108.0, + 109.0, + 94.0, + 93.0, + 125.0, + 97.0, + 124.0, + 112.0, + 119.0, + 100.0, + 102.0, + 96.0, + 129.0, + 89.0, + 103.0, + 129.0, + 106.0, + 121.0, + 98.0, + 115.0, + 143.0, + 96.0, + 122.0, + 95.0, + 94.0, + 82.0, + 100.0, + 138.0, + 109.0, + 117.0, + 116.0, + 103.0, + 109.0, + 90.0, + 111.0, + 101.0, + 89.0, + 122.0, + 84.0, + 118.0, + 114.0, + 118.0, + 99.0, + 110.0, + 81.0, + 105.0, + 98.0, + 99.0, + 121.0, + 108.0, + 135.0, + 120.0, + 95.0, + 113.0, + 99.0, + 126.0, + 96.0, + 89.0, + 93.0, + 105.0, + 79.0, + 93.0, + 86.0, + 104.0, + 116.0, + 78.0, + 108.0, + 127.0, + 89.0, + 98.0, + 80.0, + 100.0, + 76.0, + 90.0, + 89.0, + 113.0, + 130.0, + 91.0, + 100.0, + 112.0, + 115.0, + 118.0, + 93.0, + 90.0, + 103.0, + 100.0, + 104.0, + 93.0, + 86.0, + 117.0, + 112.0, + 106.0, + 86.0, + 101.0, + 120.0, + 102.0, + 97.0, + 111.0, + 96.0, + 121.0, + 106.0, + 109.0, + 100.0, + 109.0, + 97.0, + 100.0, + 116.0, + 106.0, + 111.0, + 118.0, + 117.0, + 106.0, + 113.0, + 97.0, + 105.0, + 97.0, + 121.0, + 108.0, + 86.0, + 113.0, + 109.0, + 119.0, + 83.0, + 104.0, + 105.0, + 105.0, + 93.0, + 119.0, + 86.0, + 118.0, + 98.0, + 96.0, + 91.0, + 104.0, + 97.0, + 111.0, + 86.0, + 125.0, + 125.0, + 116.0, + 120.0, + 95.0, + 117.0, + 107.0, + 97.0, + 116.0, + 102.0, + 106.0, + 98.0, + 138.0, + 119.0, + 96.0, + 95.0, + 102.0, + 99.0, + 112.0, + 122.0, + 113.0, + 111.0, + 102.0, + 118.0, + 105.0, + 107.0, + 102.0, + 117.0, + 106.0, + 89.0, + 103.0, + 114.0, + 138.0, + 93.0, + 88.0, + 117.0, + 126.0, + 124.0, + 103.0, + 100.0, + 131.0, + 99.0, + 118.0, + 116.0, + 98.0, + 101.0, + 101.0, + 94.0, + 108.0, + 123.0, + 115.0, + 105.0, + 110.0, + 104.0, + 115.0, + 119.0, + 115.0, + 117.0, + 108.0, + 108.0, + 99.0, + 110.0, + 114.0, + 121.0, + 132.0, + 123.0, + 99.0, + 120.0, + 94.0, + 121.0, + 100.0, + 131.0, + 89.0, + 133.0, + 115.0, + 84.0, + 112.0, + 116.0, + 115.0, + 137.0, + 107.0, + 112.0, + 94.0, + 126.0, + 121.0, + 115.0, + 139.0, + 119.0, + 98.0, + 116.0, + 116.0, + 124.0, + 124.0, + 84.0, + 87.0, + 126.0, + 116.0, + 115.0, + 116.0, + 127.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 25.403, + 1.36901, + 1.32789, + 1.35574, + 1.34115, + 1.3441, + 1.34468, + 1.33177, + 1.31979, + 1.35178, + 1.32886, + 1.33111, + 1.34487, + 1.3273, + 1.34186, + 1.34676, + 1.32736, + 1.33277, + 1.34223, + 1.3278, + 1.33346, + 1.33096, + 1.35516, + 1.33304, + 1.34537, + 1.32876, + 1.33649, + 1.33633, + 1.32353, + 1.31875, + 1.3419, + 1.32045, + 1.31879, + 1.33556, + 1.32183, + 1.33539, + 1.33467, + 1.31998, + 1.34002, + 1.32021, + 1.31828, + 1.34009, + 1.32231, + 1.32892, + 1.34004, + 1.34102, + 1.33151, + 1.34109, + 1.34054, + 1.32736, + 1.33084, + 1.33943, + 1.33163, + 1.34679, + 1.3493, + 1.34079, + 1.34467, + 1.36311, + 1.36072, + 1.33909, + 1.35483, + 1.34492, + 1.3287, + 1.34086, + 1.34508, + 1.3343, + 1.33604, + 1.34284, + 1.32854, + 1.33619, + 1.34638, + 1.32885, + 1.34151, + 1.3311, + 1.32446, + 1.33974, + 1.33736, + 1.34269, + 1.34906, + 1.34377, + 1.33473, + 1.343, + 1.34132, + 1.33943, + 1.341, + 1.33716, + 1.32547, + 1.3371, + 1.33437, + 1.32555, + 1.33543, + 1.33621, + 1.3215, + 1.33266, + 1.31534, + 1.32595, + 1.32734, + 1.32015, + 1.32492, + 1.31855, + 1.33359, + 1.66786, + 1.31743, + 1.32696, + 1.33579, + 1.32251, + 1.33627, + 1.32576, + 1.32653, + 1.34276, + 1.31981, + 1.33486, + 1.32873, + 1.32028, + 1.32507, + 1.32211, + 1.32709, + 1.33106, + 1.3183, + 1.33122, + 1.31664, + 1.33108, + 1.34366, + 1.31693, + 1.32452, + 1.32835, + 1.31419, + 1.32546, + 1.31977, + 1.3262, + 1.33176, + 1.31601, + 1.33275, + 1.32058, + 1.32678, + 1.32324, + 1.317, + 1.3437, + 1.31867, + 1.32231, + 1.32286, + 1.3207, + 1.33345, + 1.3182, + 1.3252, + 1.33531, + 1.32194, + 1.33212, + 1.32008, + 1.33452, + 1.32165, + 1.31727, + 1.33005, + 1.31945, + 1.32647, + 1.32811, + 1.31652, + 1.33327, + 1.32326, + 1.3281, + 1.32732, + 1.31953, + 1.33364, + 1.33098, + 1.45235, + 1.32995, + 1.3361, + 1.32739, + 1.33322, + 1.33125, + 1.32348, + 1.33073, + 1.32539, + 1.3246, + 1.32195, + 1.31924, + 1.32845, + 1.32487, + 1.32061, + 1.31966, + 1.31579, + 1.3277, + 1.32271, + 1.32605, + 1.32261, + 1.32156, + 1.32647, + 1.31813, + 1.3288, + 1.32253, + 1.3231, + 1.32536, + 1.31897, + 1.32751, + 1.32578, + 1.32909, + 1.33532, + 1.33326, + 1.33105, + 1.32709, + 1.33676, + 1.33904, + 1.3295, + 1.32664, + 1.35848, + 1.32898, + 1.33485, + 1.33037, + 1.32875, + 1.33465, + 1.33401, + 1.33837, + 1.3293, + 1.33445, + 1.34421, + 1.32972, + 1.33724, + 1.34139, + 1.33243, + 1.33291, + 1.33723, + 1.33388, + 1.32865, + 1.33127, + 1.33318, + 1.33165, + 1.34222, + 1.33634, + 1.3365, + 1.33796, + 1.34048, + 1.32719, + 1.33315, + 1.33195, + 1.32817, + 1.3339, + 1.32838, + 1.33821, + 1.3587, + 1.34806, + 1.35603, + 1.33734, + 1.32992, + 1.33619, + 1.33521, + 1.33764, + 1.33246, + 1.33105, + 1.332, + 1.33518, + 1.33735, + 1.32633, + 1.33962, + 1.33025, + 1.33331, + 1.332, + 1.33835, + 1.32945, + 1.33547, + 1.3322, + 1.32881, + 1.33281, + 1.3315, + 1.33043, + 1.32953, + 1.3237, + 1.3313, + 1.32987, + 1.32727, + 1.33098, + 1.3258, + 1.32451, + 1.33015, + 1.32723, + 1.32992, + 1.32266, + 1.31868, + 1.32973, + 1.32567, + 1.32905, + 1.3309, + 1.33101, + 1.33208, + 1.3296, + 1.32644, + 1.33636, + 1.33075, + 1.32271, + 1.33314, + 1.32512, + 1.32355, + 1.32919, + 1.32649, + 1.33633, + 1.32914, + 1.32897, + 1.33177, + 1.32609, + 1.32965, + 1.33361, + 1.32785, + 1.33132, + 1.33811, + 1.32252, + 1.33111, + 1.3308, + 1.32999, + 1.32903, + 1.32462, + 1.32932, + 1.33299, + 1.32873, + 1.33539, + 1.33319, + 1.32521, + 1.33441, + 1.33404, + 1.33913, + 1.3349, + 1.33111, + 1.3365, + 1.33511, + 1.32963, + 1.33379, + 1.33388, + 1.32718, + 1.33768, + 1.32834, + 1.32755, + 1.33517, + 1.32821, + 1.32989, + 1.32599, + 1.32244, + 1.33073, + 1.32566, + 1.32905, + 1.32964, + 1.32515, + 1.32781, + 1.32553, + 1.33138, + 1.33053, + 1.32261, + 1.33906, + 1.32748, + 1.31974, + 1.33166, + 1.32414, + 1.3312, + 1.32577, + 1.32043, + 1.33388, + 1.32097, + 1.32899, + 1.32974, + 1.32268, + 1.32709, + 1.32536, + 1.32531, + 1.32299, + 1.32853, + 1.32355, + 1.3324, + 1.3289, + 1.32327, + 1.32737, + 1.45318, + 1.32088, + 1.32958, + 1.32066, + 1.32821, + 1.32819, + 1.32165, + 1.33189, + 1.32339, + 1.33049, + 1.32136, + 1.32188, + 1.32441, + 1.32573, + 1.3288, + 1.32306, + 1.32552, + 1.32893, + 1.31947, + 1.32236, + 1.31683, + 1.33123, + 1.32665, + 1.31857, + 1.32751, + 1.32303, + 1.33184, + 1.32535, + 1.32112, + 1.32827, + 1.3264, + 1.32321, + 1.3315 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/golden_values_lts.json new file mode 100644 index 000000000..0463c4d01 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/golden_values_lts.json @@ -0,0 +1,1223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 10.89904, + 10.90777, + 10.89232, + 10.83544, + 10.6834, + 10.65974, + 10.44873, + 10.16308, + 9.95831, + 9.85932, + 9.60254, + 9.85446, + 9.88893, + 9.63287, + 9.79405, + 9.51078, + 9.46463, + 9.65471, + 9.39306, + 9.33895, + 9.24972, + 9.15413, + 9.17988, + 9.0065, + 9.19899, + 9.06474, + 9.16249, + 9.16631, + 9.30043, + 8.98957, + 8.93842, + 9.05744, + 9.05222, + 8.66356, + 8.72626, + 8.7667, + 8.70006, + 8.74817, + 8.67179, + 8.78274, + 8.67795, + 8.86767, + 8.84929, + 8.51536, + 8.40624, + 8.45093, + 8.51004, + 8.40653, + 8.45216, + 8.6026, + 8.38502, + 8.21394, + 8.24297, + 8.23879, + 8.28518, + 7.93123, + 8.10705, + 7.90575, + 8.25948, + 8.24016, + 8.01415, + 7.97894, + 7.93174, + 7.74864, + 7.74918, + 7.65293, + 7.52384, + 7.91349, + 7.70509, + 7.46214, + 7.74596, + 7.77384, + 7.5447, + 7.30561, + 7.45871, + 7.34545, + 7.46856, + 7.23017, + 7.64088, + 7.27983, + 7.34981, + 7.21134, + 7.21081, + 7.42102, + 7.17384, + 7.28052, + 6.99786, + 7.00152, + 7.03624, + 7.13136, + 6.82298, + 6.98762, + 7.08699, + 6.99714, + 6.87231, + 6.75444, + 6.98392, + 7.05773, + 6.69999, + 6.57801, + 6.72248, + 6.73865, + 6.73005, + 6.73698, + 6.65374, + 6.40729, + 6.6365, + 6.61972, + 6.44423, + 6.62637, + 6.74067, + 6.60551, + 6.72345, + 6.68935, + 6.62052, + 6.50773, + 6.59703, + 6.40181, + 6.66219, + 6.24576, + 6.24815, + 6.29992, + 6.38652, + 6.34284, + 6.44395, + 6.2868, + 6.33137, + 6.23064, + 6.19419, + 6.38932, + 6.31955, + 6.31115, + 6.15595, + 6.14904, + 6.23012, + 6.37609, + 6.19108, + 6.14016, + 6.17443, + 6.108, + 6.05677, + 6.07051, + 6.2515, + 6.40359, + 6.25653, + 6.30179, + 6.09464, + 6.1786, + 6.00393, + 6.03024, + 5.95456, + 6.25097, + 6.18949, + 5.96652, + 5.78509, + 6.12471, + 5.85239, + 6.09954, + 5.78907, + 6.1634, + 6.14662, + 6.08899, + 5.93324, + 6.11629, + 5.94863, + 6.19744, + 5.89699, + 5.79464, + 5.78508, + 5.6887, + 6.01484, + 5.99513, + 6.06793, + 5.88964, + 6.04218, + 5.96664, + 5.9946, + 5.98873, + 5.94909, + 5.83777, + 5.94965, + 5.62073, + 5.70203, + 5.88937, + 5.84442, + 5.86415, + 5.75977, + 5.83426, + 5.72464, + 5.56351, + 5.71986, + 5.62642, + 5.83426, + 5.60742, + 5.71258, + 5.70976, + 5.8987, + 5.64295, + 5.85277, + 5.73889, + 5.87053, + 5.32966, + 5.89533, + 5.87205, + 5.85426, + 5.41037, + 5.40663, + 5.62114, + 5.59572, + 5.48482, + 5.57586, + 5.67197, + 5.4726, + 5.74298, + 5.50672, + 5.5935, + 5.61776, + 5.6179, + 5.51203, + 5.61413, + 5.67291, + 5.68327, + 5.58724, + 5.66009, + 5.37678, + 5.68099, + 5.62359, + 5.42053, + 5.57867, + 5.62946, + 5.54954, + 5.33822, + 5.53445, + 5.48149, + 5.47842, + 5.37511, + 5.5464, + 5.60351, + 5.38706, + 5.51715, + 5.48729, + 5.33094, + 5.50178, + 5.40732, + 5.44712, + 5.31548, + 5.06617, + 5.47969, + 5.56831, + 5.7133, + 5.41401, + 5.59841, + 5.63558, + 5.2322, + 5.27319, + 5.38792, + 5.39306, + 5.32904, + 5.49509, + 5.17834, + 5.29764, + 5.24393, + 5.37614, + 5.25456, + 5.44258, + 5.54017, + 5.31017, + 5.43225, + 5.33341, + 5.07298, + 5.31187, + 5.2557, + 5.30514, + 5.10844, + 5.27459, + 5.26496, + 5.47616, + 5.16669, + 5.26555, + 5.21176, + 5.355, + 4.98377, + 4.91178, + 5.33096, + 5.38935, + 5.23414, + 5.31329, + 5.10388, + 5.16417, + 5.26356, + 5.06801, + 5.27045, + 5.07377, + 5.34602, + 5.24563, + 5.15001, + 5.24094, + 5.04069, + 5.31488, + 5.04958, + 5.02979, + 5.13788, + 5.11434, + 5.26734, + 5.14852, + 5.27369, + 5.08851, + 5.09324, + 5.24624, + 5.32324, + 5.25443, + 5.19052, + 5.14435, + 5.29055, + 4.94885, + 5.20441, + 5.0907, + 5.29874, + 5.17267, + 5.18858, + 5.11677, + 4.98159, + 4.99122, + 5.22123, + 5.30764, + 5.10222, + 5.0544, + 4.91358, + 5.12177, + 5.11614, + 4.92915, + 5.33612, + 5.01913, + 5.10051, + 5.16573, + 4.99929, + 5.06049, + 5.06814, + 4.99437, + 5.07642, + 5.16464, + 4.98109, + 5.1825, + 4.92945, + 4.92916, + 5.06868, + 4.99902, + 4.90979, + 4.77687, + 4.94499, + 5.11671, + 5.01541, + 5.02126, + 5.32954, + 4.95713, + 4.99895, + 5.05055, + 4.81011, + 4.73872, + 5.00091, + 5.04398, + 4.87805, + 4.95233, + 5.04347, + 5.02539, + 4.82104, + 4.90025, + 4.90912, + 4.83747, + 4.75039, + 5.01482, + 4.74829, + 5.21037, + 4.79047, + 5.00245, + 4.74175, + 4.79189, + 4.82107, + 4.65381, + 4.66051, + 4.84616, + 4.81073, + 4.8078, + 4.92405, + 4.88723, + 4.93597, + 4.77468, + 4.88361, + 4.74125, + 4.92209, + 4.96252, + 4.87874, + 4.71289, + 4.79114, + 4.90017, + 4.7175, + 4.87202, + 4.69846, + 4.70626, + 4.65256 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 58.0, + 87.0, + 81.0, + 84.0, + 84.0, + 90.0, + 104.0, + 124.0, + 102.0, + 132.0, + 129.0, + 152.0, + 143.0, + 181.0, + 202.0, + 161.0, + 161.0, + 177.0, + 184.0, + 189.0, + 151.0, + 167.0, + 183.0, + 182.0, + 186.0, + 154.0, + 178.0, + 163.0, + 167.0, + 148.0, + 145.0, + 138.0, + 187.0, + 168.0, + 140.0, + 142.0, + 167.0, + 204.0, + 169.0, + 203.0, + 148.0, + 155.0, + 141.0, + 200.0, + 190.0, + 169.0, + 187.0, + 196.0, + 175.0, + 229.0, + 207.0, + 188.0, + 199.0, + 157.0, + 186.0, + 178.0, + 154.0, + 138.0, + 248.0, + 232.0, + 174.0, + 186.0, + 188.0, + 193.0, + 201.0, + 239.0, + 207.0, + 166.0, + 208.0, + 203.0, + 208.0, + 254.0, + 168.0, + 251.0, + 210.0, + 201.0, + 239.0, + 211.0, + 241.0, + 211.0, + 204.0, + 215.0, + 193.0, + 225.0, + 213.0, + 184.0, + 182.0, + 191.0, + 206.0, + 206.0, + 188.0, + 218.0, + 214.0, + 205.0, + 203.0, + 166.0, + 206.0, + 174.0, + 195.0, + 174.0, + 140.0, + 154.0, + 176.0, + 165.0, + 129.0, + 148.0, + 168.0, + 157.0, + 137.0, + 180.0, + 175.0, + 163.0, + 175.0, + 145.0, + 138.0, + 134.0, + 159.0, + 128.0, + 173.0, + 161.0, + 151.0, + 113.0, + 133.0, + 129.0, + 177.0, + 125.0, + 153.0, + 137.0, + 120.0, + 142.0, + 148.0, + 143.0, + 100.0, + 113.0, + 106.0, + 124.0, + 129.0, + 93.0, + 119.0, + 125.0, + 107.0, + 107.0, + 141.0, + 141.0, + 122.0, + 91.0, + 142.0, + 120.0, + 101.0, + 141.0, + 130.0, + 112.0, + 107.0, + 110.0, + 132.0, + 105.0, + 102.0, + 116.0, + 115.0, + 122.0, + 96.0, + 122.0, + 87.0, + 104.0, + 112.0, + 91.0, + 110.0, + 107.0, + 101.0, + 103.0, + 107.0, + 117.0, + 83.0, + 102.0, + 105.0, + 133.0, + 96.0, + 115.0, + 93.0, + 128.0, + 129.0, + 113.0, + 112.0, + 104.0, + 104.0, + 90.0, + 85.0, + 92.0, + 96.0, + 79.0, + 140.0, + 112.0, + 103.0, + 85.0, + 96.0, + 103.0, + 104.0, + 90.0, + 109.0, + 115.0, + 113.0, + 82.0, + 123.0, + 128.0, + 86.0, + 113.0, + 103.0, + 100.0, + 129.0, + 90.0, + 96.0, + 92.0, + 106.0, + 106.0, + 113.0, + 127.0, + 112.0, + 118.0, + 96.0, + 106.0, + 114.0, + 93.0, + 85.0, + 74.0, + 105.0, + 113.0, + 97.0, + 113.0, + 107.0, + 97.0, + 109.0, + 87.0, + 89.0, + 108.0, + 106.0, + 87.0, + 120.0, + 115.0, + 109.0, + 111.0, + 100.0, + 114.0, + 102.0, + 106.0, + 94.0, + 106.0, + 77.0, + 124.0, + 112.0, + 102.0, + 104.0, + 111.0, + 109.0, + 125.0, + 114.0, + 109.0, + 120.0, + 120.0, + 103.0, + 107.0, + 86.0, + 111.0, + 95.0, + 102.0, + 108.0, + 78.0, + 100.0, + 90.0, + 107.0, + 101.0, + 104.0, + 119.0, + 100.0, + 113.0, + 110.0, + 113.0, + 90.0, + 101.0, + 107.0, + 106.0, + 111.0, + 88.0, + 125.0, + 93.0, + 106.0, + 103.0, + 116.0, + 127.0, + 100.0, + 84.0, + 102.0, + 97.0, + 97.0, + 94.0, + 120.0, + 109.0, + 110.0, + 98.0, + 97.0, + 113.0, + 108.0, + 106.0, + 143.0, + 104.0, + 111.0, + 106.0, + 103.0, + 99.0, + 110.0, + 106.0, + 130.0, + 121.0, + 112.0, + 103.0, + 101.0, + 97.0, + 115.0, + 127.0, + 117.0, + 116.0, + 109.0, + 101.0, + 129.0, + 101.0, + 99.0, + 112.0, + 91.0, + 113.0, + 104.0, + 122.0, + 91.0, + 120.0, + 124.0, + 89.0, + 106.0, + 106.0, + 119.0, + 101.0, + 98.0, + 102.0, + 129.0, + 107.0, + 116.0, + 126.0, + 127.0, + 112.0, + 86.0, + 106.0, + 136.0, + 135.0, + 107.0, + 93.0, + 102.0, + 118.0, + 117.0, + 104.0, + 123.0, + 99.0, + 114.0, + 92.0, + 128.0, + 92.0, + 107.0, + 92.0, + 124.0, + 106.0, + 101.0, + 112.0, + 106.0, + 99.0, + 107.0, + 110.0, + 97.0, + 108.0, + 117.0, + 119.0, + 102.0, + 116.0, + 116.0, + 118.0, + 108.0, + 130.0, + 116.0, + 118.0, + 122.0, + 105.0, + 104.0, + 126.0, + 123.0, + 118.0, + 124.0, + 126.0, + 97.0, + 123.0, + 133.0, + 101.0, + 117.0, + 114.0, + 120.0, + 139.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 2000, + "step_interval": 5, + "values": [ + 26.69562, + 2.22579, + 2.19499, + 2.18927, + 2.20347, + 2.20486, + 2.19616, + 2.18953, + 2.19243, + 2.19162, + 2.19709, + 2.20446, + 2.21739, + 2.14781, + 2.14676, + 2.13968, + 2.16989, + 2.16276, + 2.16801, + 2.16925, + 2.14717, + 2.14458, + 2.54878, + 2.14922, + 2.2881, + 2.14125, + 2.13827, + 2.14419, + 2.13768, + 2.14618, + 2.29108, + 2.17599, + 2.15672, + 2.15376, + 2.15175, + 2.15365, + 2.14489, + 2.16446, + 2.14511, + 2.14607, + 2.14463, + 2.14073, + 2.15082, + 2.14429, + 2.1629, + 2.14914, + 2.14671, + 2.14152, + 2.1578, + 2.15036, + 2.18156, + 2.14947, + 2.15499, + 2.15448, + 2.14498, + 2.17022, + 2.15074, + 2.15315, + 2.14479, + 2.14643, + 2.1539, + 2.17161, + 2.15621, + 2.14956, + 2.18535, + 2.17453, + 2.19533, + 2.18873, + 2.17428, + 2.17286, + 2.16489, + 2.17738, + 2.1729, + 2.16198, + 2.15566, + 2.16685, + 2.17114, + 2.17505, + 2.16943, + 2.18665, + 2.18086, + 2.17335, + 2.16894, + 2.17859, + 2.17143, + 2.16927, + 2.17751, + 2.16672, + 2.18668, + 2.16427, + 2.15535, + 2.16126, + 2.16744, + 2.15529, + 2.1683, + 2.14738, + 2.16013, + 2.15296, + 2.14264, + 2.14233, + 2.1445, + 2.17158, + 2.14916, + 2.14433, + 2.1608, + 2.15794, + 2.14246, + 2.15069, + 2.15369, + 2.14475, + 2.1647, + 2.1604, + 2.18225, + 2.15673, + 2.14813, + 2.14564, + 2.16483, + 2.1564, + 2.15075, + 2.30566, + 2.14216, + 2.14965, + 2.15397, + 2.15357, + 2.15392, + 2.15154, + 2.14714, + 2.15537, + 2.15606, + 2.15318, + 2.39222, + 2.15518, + 2.14998, + 2.16426, + 2.15347, + 2.14496, + 2.14627, + 2.14836, + 2.17996, + 2.16333, + 2.16367, + 2.14627, + 2.14971, + 2.14499, + 2.14774, + 2.14902, + 2.14984, + 2.17596, + 2.15014, + 2.15114, + 2.17123, + 2.15357, + 2.14945, + 2.14978, + 2.14929, + 2.143, + 2.15155, + 2.16019, + 2.17298, + 2.16063, + 2.15144, + 2.16011, + 2.14807, + 2.14632, + 2.15697, + 2.15198, + 2.1584, + 2.15233, + 2.16268, + 2.1648, + 2.1546, + 2.14525, + 2.14593, + 2.14622, + 2.14391, + 2.15344, + 2.16086, + 2.15831, + 2.15122, + 2.14385, + 2.15243, + 2.13958, + 2.14961, + 2.16846, + 2.1672, + 2.15294, + 2.1424, + 2.14522, + 2.19892, + 2.17537, + 2.16817, + 2.1508, + 2.15436, + 2.15954, + 2.15932, + 2.15852, + 2.15398, + 2.13928, + 2.13132, + 2.16325, + 2.14825, + 2.16326, + 2.17018, + 2.16749, + 2.17147, + 2.16062, + 2.16772, + 2.1526, + 2.15889, + 2.16306, + 2.17467, + 2.15558, + 2.16352, + 2.1856, + 2.19806, + 2.2298, + 2.20851, + 2.17979, + 2.17878, + 2.17373, + 2.17104, + 2.18177, + 2.15319, + 2.15977, + 2.16469, + 2.16464, + 2.1571, + 2.15656, + 2.16189, + 2.16054, + 2.16321, + 2.14799, + 2.1629, + 2.14171, + 2.1408, + 2.14258, + 2.14713, + 2.17553, + 2.17828, + 2.15109, + 2.14335, + 2.14927, + 2.1447, + 2.15428, + 2.14328, + 2.14617, + 2.14817, + 2.14913, + 2.1404, + 2.15508, + 2.13322, + 2.1406, + 2.14928, + 2.13653, + 2.14713, + 2.13506, + 2.27029, + 2.15052, + 2.14911, + 2.14541, + 2.16559, + 2.16935, + 2.15521, + 2.13934, + 2.16298, + 2.16669, + 2.1549, + 2.13974, + 2.14288, + 2.13777, + 2.14539, + 2.13368, + 2.14607, + 2.14212, + 2.15813, + 2.14424, + 2.20917, + 2.15467, + 2.15789, + 2.13681, + 2.142, + 2.13498, + 2.15345, + 2.14681, + 2.13383, + 2.14469, + 2.13318, + 2.16468, + 2.16004, + 2.14196, + 2.1427, + 2.68517, + 2.1476, + 2.14172, + 2.14451, + 2.1428, + 2.14565, + 2.1421, + 2.14395, + 2.14997, + 2.14164, + 2.13444, + 2.1407, + 2.1462, + 2.16449, + 2.15818, + 2.16163, + 2.1363, + 2.15192, + 2.14322, + 2.14276, + 2.14054, + 2.1415, + 2.15422, + 2.14653, + 2.14785, + 2.15357, + 2.2487, + 2.14206, + 2.16734, + 2.15219, + 2.14305, + 2.1461, + 2.14578, + 2.14928, + 2.14065, + 2.14592, + 2.16086, + 2.16724, + 2.16219, + 2.15334, + 2.14984, + 2.15032, + 2.14921, + 2.14531, + 2.13826, + 2.13748, + 2.14995, + 2.14539, + 2.1389, + 2.16049, + 2.18618, + 2.17643, + 2.16597, + 2.15903, + 2.16816, + 2.16298, + 2.1688, + 2.17148, + 2.16559, + 2.15895, + 2.15812, + 2.1641, + 2.17292, + 2.18083, + 2.31263, + 2.16745, + 2.14954, + 2.15456, + 2.16475, + 2.16778, + 2.17943, + 2.16494, + 2.17602, + 2.15629, + 2.15465, + 2.17417, + 2.15746, + 2.1614, + 2.15894, + 2.172, + 2.19984, + 2.16888, + 2.16555, + 2.17016, + 2.16439, + 2.18253, + 2.18012, + 2.16923, + 2.1657, + 2.16063, + 2.14964, + 2.14503, + 2.15339, + 2.15052, + 2.14668, + 2.13928, + 2.16527, + 2.17177, + 2.1525, + 2.15968, + 2.16198, + 2.16082, + 2.17578, + 2.1759, + 2.14695, + 2.15109, + 2.15254, + 2.15433, + 2.17792 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..34dfa4f6b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.8401, + 10.87259, + 10.85024, + 10.79646, + 10.68156, + 10.60618, + 10.12768, + 10.22185, + 10.13788, + 9.82309 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1698.0, + 1855.0, + 1949.0, + 1968.0, + 1881.0, + 1783.0, + 1653.0, + 2037.0, + 2313.0, + 2300.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 5.37706, + 0.09618, + 0.09432, + 0.09666, + 0.09442, + 0.09619, + 0.09453, + 0.0975, + 0.09517, + 0.09727 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml index ee84d93de..c9de15222 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/model_config.yaml index ffdaec80a..90c257012 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml index 9dd9e9ecd..fcaad9932 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml index 470ba6f92..174164735 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..75bf20ee5 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.8401, + 10.87259, + 10.85023, + 10.79646, + 10.68153, + 10.60619, + 10.12767, + 10.22185, + 10.13787, + 9.82307 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1698.0, + 1855.0, + 1896.0, + 1866.0, + 2032.0, + 1814.0, + 1664.0, + 1961.0, + 2306.0, + 2403.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 8.00253, + 0.13176, + 0.13026, + 0.13184, + 0.13023, + 0.13135, + 0.13014, + 0.13143, + 0.1305, + 0.13191 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml index fb07f9d30..b51ada7c0 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..206d78993 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.82974, 10.85934, 10.88536, 10.78981, 10.64534, 10.56415, 9.99534, 10.13972, 10.06259, 9.71481]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [261.0, 256.0, 258.0, 250.0, 243.0, 265.0, 254.0, 299.0, 299.0, 294.0]}, "iteration_timing_avg": 0.3993126470588235} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..c0c3ead53 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.85803, 10.88122, 10.85832, 10.80987, 10.66115, 10.55375, 10.01843, 10.14234, 10.05958, 9.71149]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [244.0, 231.0, 243.0, 257.0, 247.0, 267.0, 256.0, 299.0, 318.0, 325.0]}, "iteration_timing_avg": 0.3993126470588235} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 000000000..4af4dd14f --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,54 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 + NVTE_FUSED_ATTN: 0 + NVTE_FLASH_ATTN: 1 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_PATH} + --load: ${CHECKPOINT_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 2 + --context-parallel-size: 4 + --cp-comm-type: a2a+p2p + --hierarchical-context-parallel-sizes: 2 2 + --sequence-parallel: true + --hidden-dropout: 0.0 + --attention-dropout: 0.0 + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 000000000..fef122404 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,55 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 + NVTE_FUSED_ATTN: 0 + NVTE_FLASH_ATTN: 1 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_PATH} + --load: ${CHECKPOINT_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 2 + --context-parallel-size: 4 + --cp-comm-type: a2a+p2p + --hierarchical-context-parallel-sizes: 2 2 + --sequence-parallel: true + --hidden-dropout: 0.0 + --attention-dropout: 0.0 + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml index 7cdb56dd0..2d2c1ce9a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,6 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/model_config.yaml index 7bdd0c46e..d11f45995 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -41,6 +40,7 @@ MODEL_ARGS: --tensor-model-parallel-size: 1 --pipeline-model-parallel-size: 2 --position-embedding-type: rope + --no-rope-fusion: true --no-ckpt-fully-parallel-save: true --deterministic-mode: true --no-gradient-accumulation-fusion: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml index b014fdabc..40f43682b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..3020fb561 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.8468, + 10.87769, + 10.90302, + 10.82026, + 10.67979, + 10.60157, + 10.06449, + 10.19316, + 10.11411, + 9.76007 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1692.0, + 2044.0, + 2005.0, + 2007.0, + 1945.0, + 1868.0, + 1701.0, + 2085.0, + 2389.0, + 2377.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.20538, + 0.14353, + 0.14213, + 0.14213, + 0.14068, + 0.14104, + 0.14078, + 0.14149, + 0.14065, + 0.14118 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/model_config.yaml index b2a1643ec..159a9a58d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -41,6 +40,7 @@ MODEL_ARGS: --tensor-model-parallel-size: 1 --pipeline-model-parallel-size: 2 --position-embedding-type: rope + --no-rope-fusion: true --no-ckpt-fully-parallel-save: true --deterministic-mode: true --no-gradient-accumulation-fusion: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..50486e0bb --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.84474, + 10.87688, + 10.90253, + 10.81872, + 10.67849, + 10.60076, + 10.06361, + 10.19267, + 10.11344, + 9.75987 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1769.0, + 2129.0, + 1987.0, + 1961.0, + 1961.0, + 1886.0, + 1655.0, + 2130.0, + 2315.0, + 2362.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 8.72642, + 0.16194, + 0.15926, + 0.15956, + 0.15972, + 0.1623, + 0.16029, + 0.15863, + 0.15947, + 0.15935 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml index 6c2c9e51a..65a87d67a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..cd1e76664 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.79205, + 10.86789, + 10.89149, + 10.78328, + 10.66126, + 10.58275, + 10.08467, + 10.19448, + 10.13785, + 9.81454 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1580.0, + 1778.0, + 1849.0, + 1841.0, + 1884.0, + 1679.0, + 1544.0, + 1953.0, + 2449.0, + 2335.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.79458, + 0.16744, + 0.16286, + 0.16276, + 0.16292, + 0.16346, + 0.16288, + 0.16273, + 0.16282, + 0.16245 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/model_config.yaml index 2e0188551..f3e4ce8a6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/model_config.yaml index 8fa10f4b9..440638b53 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/model_config.yaml index c64a4ef5e..059716a6a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/model_config.yaml index dda1876e1..f82a51e4f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml index df7ba9fb3..3d4dc222a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..e8a20535b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.79208, + 10.86688, + 10.89063, + 10.7818, + 10.65964, + 10.58005, + 10.0819, + 10.19136, + 10.13478, + 9.81149 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1602.0, + 1792.0, + 1751.0, + 1885.0, + 1872.0, + 1716.0, + 1561.0, + 1867.0, + 2355.0, + 2329.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 13.82777, + 0.17397, + 0.17253, + 0.17285, + 0.17221, + 0.17204, + 0.17139, + 0.17105, + 0.17258, + 0.17185 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/model_config.yaml index 479916c65..3e5acc65a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..6a5671c4a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.74049, + 10.81937, + 10.84178, + 10.75558, + 10.69821, + 10.63096, + 10.2026, + 10.36288, + 10.25634, + 9.94255 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 2529.0, + 2845.0, + 2909.0, + 2683.0, + 2631.0, + 2573.0, + 2281.0, + 2559.0, + 2484.0, + 2360.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 14.80986, + 0.17896, + 0.17664, + 0.17758, + 0.17762, + 0.17676, + 0.17638, + 0.1761, + 0.17725, + 0.1755 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/model_config.yaml index 20c57f0c9..9ae648b7b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..e7ae5fe9a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.90105, + 10.91105, + 10.91632, + 10.84823, + 10.70727, + 10.63015, + 10.15241, + 10.26049, + 10.15995, + 9.83163 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 22727080.0, + 23021764.0, + 22500984.0, + 22830798.0, + 22739428.0, + 22547260.0, + 22955476.0, + 22590172.0, + 22659570.0, + 22884676.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 17.09091, + 0.17551, + 0.17095, + 0.1714, + 0.17144, + 0.1711, + 0.17223, + 0.17069, + 0.17123, + 0.17064 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml index f7c52c997..85e8e81ff 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..1c4e36d7e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.82005, + 10.87447, + 10.87793, + 10.79509, + 10.68164, + 10.59514, + 10.10045, + 10.21239, + 10.13862, + 9.80879 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1562.0, + 1754.0, + 1879.0, + 1778.0, + 1877.0, + 1733.0, + 1578.0, + 1924.0, + 2299.0, + 2292.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 18.71949, + 0.16575, + 0.16508, + 0.16465, + 0.16475, + 0.16222, + 0.16473, + 0.16461, + 0.16489, + 0.16518 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml index 210febf44..fea891cd9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..e614c5390 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.82005, + 10.87448, + 10.87796, + 10.79506, + 10.68153, + 10.59413, + 10.09983, + 10.20957, + 10.13642, + 9.80012 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1562.0, + 1687.0, + 1848.0, + 1736.0, + 1955.0, + 1764.0, + 1580.0, + 1886.0, + 2252.0, + 2259.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 16.16694, + 0.16354, + 0.16237, + 0.16232, + 0.16088, + 0.15891, + 0.15894, + 0.15865, + 0.16009, + 0.1576 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/model_config.yaml index fd67df60c..b096c06b6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..ccb851874 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.82005, + 10.87447, + 10.87793, + 10.79509, + 10.68164, + 10.59514, + 10.10045, + 10.21239, + 10.13862, + 9.80879 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1562.0, + 1754.0, + 1879.0, + 1778.0, + 1877.0, + 1733.0, + 1578.0, + 1924.0, + 2299.0, + 2292.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 18.68941, + 0.16498, + 0.16403, + 0.16281, + 0.16302, + 0.16352, + 0.16473, + 0.16207, + 0.16362, + 0.16219 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml index 0c0bc85f6..a2c641b31 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..1ebd78a1c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.82005, + 10.87447, + 10.87799, + 10.79507, + 10.68165, + 10.59511, + 10.10047, + 10.2124, + 10.13861, + 9.80876 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1562.0, + 1738.0, + 1852.0, + 1802.0, + 1917.0, + 1765.0, + 1570.0, + 1949.0, + 2251.0, + 2270.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 14.96968, + 0.16347, + 0.16403, + 0.16317, + 0.162, + 0.16129, + 0.16268, + 0.16156, + 0.16212, + 0.16407 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml index 7a92bfd8c..2b9346ee7 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..badf67291 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.82005, + 10.87447, + 10.87799, + 10.79507, + 10.68165, + 10.59511, + 10.10047, + 10.2124, + 10.13861, + 9.80876 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1562.0, + 1738.0, + 1852.0, + 1802.0, + 1917.0, + 1765.0, + 1570.0, + 1949.0, + 2251.0, + 2270.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 17.23575, + 0.17553, + 0.34737, + 0.17165, + 0.32526, + 0.17081, + 0.32706, + 0.17037, + 0.3321, + 0.16992 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml index ef5b64d28..61adccbb9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..5d79a14a4 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.82005, + 10.87447, + 10.87799, + 10.79508, + 10.68163, + 10.59514, + 10.10047, + 10.21237, + 10.13864, + 9.80877 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1562.0, + 1738.0, + 1852.0, + 1796.0, + 1869.0, + 1788.0, + 1517.0, + 1941.0, + 2226.0, + 2214.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 17.43169, + 0.16677, + 0.33581, + 0.16498, + 0.33103, + 0.16418, + 0.33146, + 0.16539, + 0.33075, + 0.1651 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/model_config.yaml index ca1de0ad3..023747a48 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..99b20e2dc --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.9359, + 10.93551, + 10.9424, + 10.88073, + 10.75652, + 10.66333, + 10.16716, + 10.27244, + 10.19575, + 9.86005 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 22727668.0, + 23021008.0, + 22501280.0, + 22830020.0, + 22739656.0, + 22548262.0, + 22955680.0, + 22589964.0, + 22660156.0, + 22884572.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 16.12696, + 0.16574, + 0.16735, + 0.16507, + 0.1657, + 0.16626, + 0.16614, + 0.16517, + 0.16625, + 0.16568 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml index 30137a040..e573b9097 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/model_config.yaml index 1513a1819..c31e5b66b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml index 077c9a36e..9b02b473b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index 1ccbe1ae3..d98716ac4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml index b9ca81949..92b2e3528 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml index 25ea6c933..1f2fa9e2d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml index 7b7bc27f4..49865dde8 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/model_config.yaml similarity index 75% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/model_config.yaml index cf4a90e41..49bd5f94c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/model_config.yaml @@ -14,7 +14,7 @@ MODEL_ARGS: --log-timers-to-tensorboard: true --tensorboard-dir: ${TENSORBOARD_PATH} --micro-batch-size: 4 - --global-batch-size: 32 + --global-batch-size: 40 --seq-length: 1024 --max-position-embeddings: 1024 --train-iters: 100 @@ -38,19 +38,11 @@ MODEL_ARGS: --eval-interval: 1000 --eval-iters: 10 --transformer-impl: transformer_engine - --tensor-model-parallel-size: 2 - --pipeline-model-parallel-size: 1 - --expert-model-parallel-size: 2 - --no-ckpt-fully-parallel-save: true - --moe-grouped-gemm: true - --disable-bias-linear: true - --sequence-parallel: true - --num-experts: 8 - --use-distributed-optimizer: true - --moe-router-load-balancing-type: sinkhorn - --moe-router-topk: 1 - --overlap-grad-reduce: true - --overlap-param-gather: true + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --num-layers-per-virtual-pipeline-stage: 1 + --overlap-p2p-communication-warmup-flush: true + --microbatch-group-size-per-virtual-pipeline-stage: 5 --deterministic-mode: true --no-gradient-accumulation-fusion: true --attention-softmax-in-fp32: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..a03d56c82 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81184, 10.84052, 10.8763, 10.79906, 10.68214, 10.59702, 10.49258, 10.11236, 10.12393, 9.98165]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1118.0, 1331.0, 1230.0, 1085.0, 1180.0, 1245.0, 1454.0, 1330.0, 1752.0, 1851.0]}, "iteration-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [17.24286, 0.35341, 0.35187, 0.35028, 0.34941, 0.35093, 0.3488, 0.35179, 0.34905, 0.34684]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..91c3ae697 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81184, 10.84052, 10.87624, 10.79904, 10.68212, 10.59698, 10.49257, 10.11232, 10.12396, 9.98163]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1125.0, 1304.0, 1252.0, 1102.0, 1201.0, 1200.0, 1489.0, 1395.0, 1677.0, 1867.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1125.0, 1304.0, 1252.0, 1102.0, 1201.0, 1200.0, 1489.0, 1395.0, 1677.0, 1867.0]}, "iteration-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [22.22011, 0.36082, 0.35927, 0.35627, 0.35901, 0.35008, 0.34828, 0.34774, 0.35145, 0.35141]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 000000000..ee9b7ec95 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,53 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 + N_REPEATS: 5 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 40 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_PATH} + --load: ${CHECKPOINT_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --num-layers-per-virtual-pipeline-stage: 1 + --overlap-p2p-communication-warmup-flush: true + --microbatch-group-size-per-virtual-pipeline-stage: 5 + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..551870d31 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.9735, + 10.96043, + 10.95577, + 10.91036, + 10.78792, + 10.71198, + 10.22428, + 10.28927, + 10.19052, + 9.86378 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 22727056.0, + 23021982.0, + 22501104.0, + 22831164.0, + 22740086.0, + 22547896.0, + 22955344.0, + 22589272.0, + 22658866.0, + 22885040.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 13.92799, + 0.16275, + 0.16118, + 0.16212, + 0.16165, + 0.16181, + 0.16104, + 0.16149, + 0.16151, + 0.16055 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/model_config.yaml index 059265a07..bdb6ab308 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/model_config.yaml @@ -49,4 +49,4 @@ MODEL_ARGS: --bf16: true --decoder-first-pipeline-num-layers: 2 --decoder-last-pipeline-num-layers: 2 -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..b87c0bca7 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.88759, 10.90846, 10.88099, 10.84518, 10.69285, 10.6019, 10.09544, 10.18239, 10.08764, 9.76749]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [578.0, 659.0, 683.0, 700.0, 697.0, 620.0, 572.0, 774.0, 807.0, 837.0]}, "iteration_timing_avg": 0.3462723529411765} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml index 7da0cc5dd..d07e244b7 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,8 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 + NVTE_FUSED_ATTN: 0 + NVTE_FLASH_ATTN: 1 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml similarity index 80% rename from tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts/model_config.yaml rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index c6728722e..912b9bb53 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -17,7 +16,7 @@ MODEL_ARGS: --global-batch-size: 32 --seq-length: 1024 --max-position-embeddings: 1024 - --train-iters: 50 + --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 --save: ${CHECKPOINT_PATH} @@ -34,21 +33,20 @@ MODEL_ARGS: --clip-grad: 1.0 --lr-warmup-fraction: .01 --log-interval: 1 - --save-interval: 10000 + --save-interval: 50 --eval-interval: 1000 --eval-iters: 10 - --transformer-impl: local + --transformer-impl: transformer_engine --tensor-model-parallel-size: 2 - --pipeline-model-parallel-size: 2 - --num-experts: 2 - --sequence-parallel: true - --moe-router-load-balancing-type: sinkhorn - --moe-router-topk: 1 + --pipeline-model-parallel-size: 1 + --use-torch-fsdp2: true --deterministic-mode: true --no-gradient-accumulation-fusion: true + --no-async-tensor-model-parallel-allreduce: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true --use-mcore-models: true --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true - --apply-query-key-layer-scaling: true -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: ckpt-resume \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..0386ad6e8 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.88734, + 10.91614, + 10.89061, + 10.86173, + 10.72753, + 10.64491, + 10.18012, + 10.2562, + 10.1611, + 9.8539 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 3268.0, + 4040.0, + 4142.0, + 3766.0, + 4028.0, + 3648.0, + 3306.0, + 4028.0, + 4648.0, + 4546.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 7.0561, + 0.32588, + 0.32628, + 0.32385, + 0.32419, + 0.32364, + 0.32337, + 0.32334, + 0.32358, + 0.32395 + ] + } +} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..15a93d025 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.88734, 10.91612, 10.8906, 10.86171, 10.72752, 10.64491, 10.18015, 10.25622, 10.16111, 9.85394]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3228.0, 3820.0, 3890.0, 3848.0, 3902.0, 3486.0, 3310.0, 3982.0, 4472.0, 4532.0]}, "iteration_timing_avg": 0.22043823529411763} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml similarity index 87% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G/model_config.yaml rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml index 29b87e907..0947c8c1e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -40,12 +39,10 @@ MODEL_ARGS: --transformer-impl: transformer_engine --tensor-model-parallel-size: 2 --pipeline-model-parallel-size: 1 - --expert-model-parallel-size: 2 - --sequence-parallel: true - --num-experts: 8 - --moe-router-load-balancing-type: sinkhorn - --moe-router-topk: 1 - --ckpt-fully-parallel-load: true + --use-distributed-optimizer: true + --num-distributed-optimizer-instances: 2 + --overlap-grad-reduce: true + --overlap-param-gather: true --deterministic-mode: true --no-gradient-accumulation-fusion: true --attention-softmax-in-fp32: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml index 476a1b6b9..4d2dea459 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,8 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 + NVTE_FUSED_ATTN: 0 + NVTE_FLASH_ATTN: 1 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml similarity index 87% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dgx_a100_1N8G/model_config.yaml rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml index 613559a96..359f483c3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 @@ -40,12 +39,10 @@ MODEL_ARGS: --transformer-impl: transformer_engine --tensor-model-parallel-size: 2 --pipeline-model-parallel-size: 1 - --expert-model-parallel-size: 2 - --sequence-parallel: true - --num-experts: 8 - --moe-router-load-balancing-type: sinkhorn - --moe-router-topk: 1 - --ckpt-fully-parallel-load: true + --use-distributed-optimizer: true + --num-distributed-optimizer-instances: 2 + --overlap-grad-reduce: true + --overlap-param-gather: true --deterministic-mode: true --no-gradient-accumulation-fusion: true --attention-softmax-in-fp32: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml index a1f86a64c..edc9eed73 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml index 6c454ecca..b12ef70b9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml index 793bfb21d..46a56c109 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G/golden_values.json deleted file mode 100644 index 7e38f0853..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79806, 10.86466, 10.87219, 10.80704, 10.71201, 10.63836, 10.19365, 10.30955, 10.22074, 9.91587]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [31010.0, 37271.0, 37922.0, 36177.0, 33568.0, 34619.0, 31252.0, 34977.0, 36315.0, 37480.0]}, "iteration_timing_avg": 0.35529294117647064} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values.json deleted file mode 100644 index c7739ce69..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79806, 10.86508, 10.87232, 10.80773, 10.71115, 10.63886, 10.19259, 10.30975, 10.22077, 9.9157]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [31010.0, 37093.0, 37540.0, 35923.0, 33445.0, 34824.0, 30686.0, 35286.0, 36691.0, 37420.0]}, "iteration_timing_avg": 0.3566726470588235} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..a09763fbe --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.79987, + 10.85947, + 10.86478, + 10.80039, + 10.70971, + 10.63893, + 10.19526, + 10.31102, + 10.22247, + 9.91425 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 30798.0, + 37696.0, + 37844.0, + 36275.0, + 33140.0, + 35137.0, + 30638.0, + 35309.0, + 36677.0, + 37604.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 12.59746, + 0.61072, + 0.61063, + 0.61049, + 0.61015, + 0.60932, + 0.61233, + 0.61024, + 0.61226, + 0.61621 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..6afdc07f7 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1,37 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.79987, + 10.85983, + 10.865, + 10.799, + 10.70987, + 10.63782, + 10.1965, + 10.3099, + 10.22262, + 9.91423 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 30784.0, + 37528.0, + 37616.0, + 36105.0, + 33464.0, + 34923.0, + 30806.0, + 35663.0, + 36661.0, + 37641.0 + ] + }, + "iteration_timing_avg": 0.3566726470588235 +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml index c4b791a9d..be3e678db 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values.json deleted file mode 100644 index 787d84d47..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.80392, 10.86451, 10.86393, 10.80306, 10.71669, 10.64561, 10.21267, 10.32342, 10.22503, 9.92985]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [31227.0, 37874.0, 38070.0, 36215.0, 33120.0, 34374.0, 30579.0, 35192.0, 36094.0, 37183.0]}, "iteration_timing_avg": 0.2153429411764706} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values.json deleted file mode 100644 index a8f23f172..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.80392, 10.86451, 10.86407, 10.80254, 10.71523, 10.64479, 10.21223, 10.32267, 10.22495, 9.93003]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [31227.0, 37874.0, 37773.0, 35936.0, 33255.0, 34279.0, 30117.0, 35460.0, 36069.0, 36785.0]}, "iteration_timing_avg": 0.21900323529411767} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..c531fcd9a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.8029, + 10.86149, + 10.86819, + 10.80829, + 10.72062, + 10.64588, + 10.21132, + 10.32324, + 10.2265, + 9.92918 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 31473.0, + 37753.0, + 38332.0, + 36348.0, + 33270.0, + 34310.0, + 30284.0, + 35432.0, + 36356.0, + 37109.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 5.94452, + 0.40526, + 0.40286, + 0.40289, + 0.40215, + 0.40351, + 0.40373, + 0.40354, + 0.40382, + 0.41286 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..8f4c4706a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1,37 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.8029, + 10.86149, + 10.86819, + 10.80829, + 10.72062, + 10.64588, + 10.21132, + 10.32324, + 10.2265, + 9.92918 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 31473.0, + 37753.0, + 38332.0, + 36348.0, + 33270.0, + 34310.0, + 30284.0, + 35432.0, + 36356.0, + 37109.0 + ] + }, + "iteration_timing_avg": 0.21900323529411767 +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml index bc5da0c31..f3da93728 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values.json deleted file mode 100644 index 5b81d0706..000000000 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.83503, 10.88475, 10.87872, 10.81608, 10.69357, 10.60024, 10.08934, 10.21378, 10.10871, 9.78568]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [26744.0, 33099.0, 33750.0, 31697.0, 28979.0, 30817.0, 28713.0, 33425.0, 33927.0, 35074.0]}, "iteration_timing_avg": 0.28211852941176474} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..91e6f5e77 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.83445, + 10.87978, + 10.87924, + 10.81567, + 10.69374, + 10.60333, + 10.08824, + 10.21471, + 10.10778, + 9.78309 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 26648.0, + 32884.0, + 33611.0, + 31683.0, + 28744.0, + 30671.0, + 28602.0, + 33538.0, + 34560.0, + 35099.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.03575, + 0.59809, + 0.59808, + 0.60171, + 0.60477, + 0.611, + 0.62441, + 0.63554, + 0.64372, + 0.64983 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..d47ee5acb --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1,37 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.83445, + 10.87978, + 10.87924, + 10.81567, + 10.69374, + 10.60333, + 10.08824, + 10.21471, + 10.10778, + 9.78309 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 26648.0, + 32884.0, + 33611.0, + 31683.0, + 28744.0, + 30671.0, + 28602.0, + 33538.0, + 34560.0, + 35099.0 + ] + }, + "iteration_timing_avg": 0.28211852941176474 +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml index 7c437e0b1..91e9e836c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..af8753157 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.81823, + 10.86998, + 10.8727, + 10.80014, + 10.67571, + 10.57944, + 10.06572, + 10.19342, + 10.08575, + 9.75236 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 26801.0, + 32734.0, + 32925.0, + 31593.0, + 28610.0, + 30362.0, + 28464.0, + 33486.0, + 33403.0, + 35162.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 8.63293, + 0.29454, + 0.28102, + 0.28297, + 0.28369, + 0.2848, + 0.30008, + 0.29214, + 0.31041, + 0.295 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..af7288cbd --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.81823, + 10.86998, + 10.8727, + 10.80014, + 10.67571, + 10.57944, + 10.06572, + 10.19342, + 10.08575, + 9.75236 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 26801.0, + 32734.0, + 32925.0, + 31593.0, + 28610.0, + 30362.0, + 28464.0, + 33486.0, + 33403.0, + 35162.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 11.94141, + 0.28425, + 0.28413, + 0.29449, + 0.28534, + 0.29977, + 0.30061, + 0.30321, + 0.30986, + 0.30404 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/model_config.yaml similarity index 90% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/model_config.yaml index c2631e84e..85b76573a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/model_config.yaml @@ -40,16 +40,17 @@ MODEL_ARGS: --transformer-impl: transformer_engine --tensor-model-parallel-size: 2 --pipeline-model-parallel-size: 1 - --expert-model-parallel-size: 2 - --no-ckpt-fully-parallel-save: true - --moe-grouped-gemm: true + --expert-model-parallel-size: 4 + --expert-tensor-parallel-size: 1 --disable-bias-linear: true --sequence-parallel: true --num-experts: 8 - --moe-router-load-balancing-type: sinkhorn - --moe-router-topk: 1 + --moe-router-load-balancing-type: aux_loss + --moe-router-topk: 2 + --moe-aux-loss-coeff: 1e-2 --deterministic-mode: true --no-gradient-accumulation-fusion: true + --moe-grouped-gemm: true --attention-softmax-in-fp32: true --use-mcore-models: true --ckpt-format: torch_dist diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..4c8008e6a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.93292, 10.93657, 10.88788, 10.86131, 10.71505, 10.61066, 10.06697, 10.17616, 10.07539, 9.74965]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [607.0, 638.0, 643.0, 649.0, 648.0, 590.0, 548.0, 772.0, 834.0, 836.0]}, "iteration_timing_avg": 0.3993126470588235} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml index dde8a620d..a6cf383db 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,8 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 + NVTE_FUSED_ATTN: 0 + NVTE_FLASH_ATTN: 1 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..98ff45e7d --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.92705, 10.93627, 10.89332, 10.87322, 10.74871, 10.65375, 10.15756, 10.24634, 10.15177, 9.83799]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1707.0, 1885.0, 1986.0, 1760.0, 1773.0, 1859.0, 1598.0, 1965.0, 2199.0, 2316.0]}, "iteration_timing_avg": 0.20321264705882353} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml index 303182bca..8f0bf337b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,6 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..a1c3bc04e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.92705, + 10.93624, + 10.89333, + 10.87317, + 10.74871, + 10.65379, + 10.15753, + 10.24638, + 10.15178, + 9.83806 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1653.0, + 1874.0, + 1994.0, + 1828.0, + 1769.0, + 1845.0, + 1674.0, + 1957.0, + 2364.0, + 2345.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 11.33146, + 0.22344, + 0.21997, + 0.21977, + 0.21792, + 0.21685, + 0.22555, + 0.21755, + 0.21796, + 0.21694 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml index c08ce2e01..31544968f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..edb6a170e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.92705, + 10.93628, + 10.89334, + 10.87322, + 10.74869, + 10.65374, + 10.15755, + 10.24638, + 10.15177, + 9.83799 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 68.0, + 64.0, + 61.0, + 70.0, + 66.0, + 55.0, + 76.0, + 72.0, + 64.0, + 85.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.68102, + 0.22487, + 0.22503, + 0.22418, + 0.22445, + 0.22504, + 0.22333, + 0.22333, + 0.22458, + 0.22367 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml index 959c286a5..75a485403 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..7a8ec5bec --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.92705, + 10.93624, + 10.89333, + 10.87317, + 10.74871, + 10.65379, + 10.15753, + 10.24638, + 10.15178, + 9.83806 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1653.0, + 1874.0, + 1994.0, + 1828.0, + 1769.0, + 1845.0, + 1674.0, + 1957.0, + 2364.0, + 2345.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 11.05896, + 0.21941, + 0.22052, + 0.22086, + 0.22118, + 0.22063, + 0.22075, + 0.22064, + 0.22956, + 0.23548 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml index c9938b5ee..9b5deed4c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..e2ce2f189 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.92705, + 10.93624, + 10.89333, + 10.87317, + 10.74871, + 10.65379, + 10.15753, + 10.24638, + 10.15178, + 9.83806 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1653.0, + 1874.0, + 1994.0, + 1828.0, + 1769.0, + 1845.0, + 1674.0, + 1957.0, + 2364.0, + 2345.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.20057, + 0.21739, + 0.21735, + 0.21626, + 0.2165, + 0.21447, + 0.21821, + 0.21559, + 0.21472, + 0.21558 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml index 23060e55e..693a2d39f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..08406d2e4 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.92705, + 10.93624, + 10.89333, + 10.87317, + 10.74871, + 10.65379, + 10.15753, + 10.24638, + 10.15178, + 9.83806 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1653.0, + 1874.0, + 1994.0, + 1828.0, + 1769.0, + 1845.0, + 1674.0, + 1957.0, + 2364.0, + 2345.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.47055, + 0.34439, + 0.22313, + 0.22277, + 0.22175, + 0.21936, + 0.23348, + 0.22009, + 0.22043, + 0.21934 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml index 32bd642de..3aa23b39a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml index 7d64cf477..d15043536 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,8 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 + NVTE_FUSED_ATTN: 0 + NVTE_FLASH_ATTN: 1 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml index 6014052dd..95f706d04 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,6 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml index 6d8a59097..e74a0cc99 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml index c304692d6..f041fd4ac 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index d8f1585ae..e683475ff 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml index c02d1fdc6..1b416d029 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml index 7d5b13b75..4f922838b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/model_config.yaml index cff824669..bdb039ffd 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,6 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..c1942719e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.86126, + 10.88645, + 10.87768, + 10.83106, + 10.71636, + 10.60597, + 10.13124, + 10.22753, + 10.1591, + 9.83464 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1755.0, + 2147.0, + 2147.0, + 2042.0, + 2108.0, + 1931.0, + 1762.0, + 2184.0, + 2529.0, + 2615.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 6.25178, + 0.35642, + 0.31793, + 0.31783, + 0.31708, + 0.31607, + 0.31789, + 0.31477, + 0.31433, + 0.31727 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml index 8846dacb4..b56afa8e5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..9fe19641a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.86126, + 10.88645, + 10.87768, + 10.83106, + 10.71636, + 10.60597, + 10.13124, + 10.22753, + 10.1591, + 9.83464 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1755.0, + 2147.0, + 2147.0, + 2042.0, + 2108.0, + 1931.0, + 1762.0, + 2184.0, + 2529.0, + 2615.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 7.0561, + 0.32588, + 0.32628, + 0.32385, + 0.32419, + 0.32364, + 0.32337, + 0.32334, + 0.32358, + 0.32395 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml index 9295cdc58..f482eda5e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..977545a73 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.86217, + 10.88646, + 10.87861, + 10.83295, + 10.7203, + 10.61089, + 10.14181, + 10.23434, + 10.16609, + 9.84444 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1769.0, + 2056.0, + 2198.0, + 2079.0, + 2181.0, + 1912.0, + 1825.0, + 2115.0, + 2621.0, + 2598.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 6.42448, + 0.42854, + 0.42836, + 0.42582, + 0.42274, + 0.42187, + 0.42561, + 0.42178, + 0.44234, + 0.42304 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml index b8f1667cd..43224c584 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml index d2888f767..dda321f57 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml index 27acfbee8..93e1ce646 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml index 1ea30bae7..6418b0c5d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml index f3348d608..a5de20178 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml @@ -1,7 +1,6 @@ ENV_VARS: CUDA_DEVICE_MAX_CONNECTIONS: 1 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/model_config.yaml index fbb767cb1..226dfbc6b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/model_config.yaml index cf65df920..f2934a302 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/model_config.yaml @@ -4,8 +4,9 @@ ENV_VARS: NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 SKIP_PYTEST: 1 - N_REPEATS: 1 -BEFORE_SCRIPT: pip uninstall -y transformer_engine pip uninstall -y Apex ## TODO: remove once Apex dependency has been removed completely +BEFORE_SCRIPT: | + pip uninstall -y transformer_engine || true + pip uninstall -y Apex || true ## TODO: remove once Apex dependency has been removed completely MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..2716e48bd --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.85959, + 10.89094, + 10.86721, + 10.81315, + 10.70074, + 10.60672, + 10.10656, + 10.21403, + 10.12914, + 9.80365 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 1746.0, + 1896.0, + 2093.0, + 1860.0, + 1910.0, + 1763.0, + 1598.0, + 2065.0, + 2406.0, + 2421.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 13.09194, + 0.20975, + 0.20881, + 0.20927, + 0.20906, + 0.20908, + 0.2095, + 0.20831, + 0.20902, + 0.21119 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml index af105662a..56d76fa39 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index 3d27f95aa..52b0887e0 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..68d9fe822 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79311, 10.85248, 10.87281, 10.83016, 10.82949, 10.78726, 10.565, 10.57088, 10.4836, 10.19521]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [2450.0, 2765.0, 2163.0, 2585.0, 2634.0, 2585.0, 2987.0]}, "iteration_timing_avg": 0.1211408823529412} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml index 1e6b07a42..0923fd41f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/model_config.yaml index 2ff5fc222..9ea57cb3a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json similarity index 100% rename from tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..87df9ed6c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0]}, "iteration_timing_avg": 0.14292588235294112} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 4e4a96341..ea96682fe 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index 8d11e207e..beaaa986a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 512 diff --git a/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/model_config.yaml new file mode 100644 index 000000000..a32a8f28b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/model_config.yaml @@ -0,0 +1,65 @@ +ENV_VARS: + SKIP_PYTEST: 1 + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 + DISTILL_CONFIG: '{intermediate_layer_pairs: [["decoder.final_layernorm", "decoder.final_layernorm"]], logit_layers: ["output_layer", "output_layer"], skip_lm_loss: true, kd_loss_scale: 10.0}' +BEFORE_SCRIPT: | + mkdir -p ${DATA_CACHE_PATH}/distill && echo $DISTILL_CONFIG | yq -P > ${DATA_CACHE_PATH}/distill/distill_config.yaml +MODEL_ARGS: + --export-te-mcore-model: true + --export-kd-teacher-load: ${CHECKPOINT_PATH}/teacher + --export-kd-cfg: ${DATA_CACHE_PATH}/distill/distill_config.yaml + --auto-detect-ckpt-format: true + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --normalization: RMSNorm + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 2 + --global-batch-size: 16 + --seq-length: 1024 + --max-position-embeddings: 1024 + --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container + --rotary-percent: 0.5 + --swiglu: true + --untie-embeddings-and-output-weights: true + --disable-bias-linear: true + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_PATH} + --load: ${CHECKPOINT_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --use-distributed-optimizer: true + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 1 + --sequence-parallel: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --use-checkpoint-opt_param-scheduler: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.9.0.json new file mode 100644 index 000000000..23735ec0f --- /dev/null +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.9.0.json @@ -0,0 +1,203 @@ +{ + "mem-allocated-bytes": { + "start_step": 0, + "end_step": 300, + "step_interval": 5, + "values": [ + 22282596352.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282596352.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0, + 22282598400.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 300, + "step_interval": 5, + "values": [ + 309.57425, + 7.41416, + 7.25837, + 6.98896, + 7.14761, + 7.186, + 6.86385, + 6.9839, + 6.74659, + 6.91703, + 6.8232, + 6.77252, + 6.76381, + 6.76271, + 6.87235, + 6.71758, + 7.26112, + 6.68114, + 6.82257, + 6.56624, + 6.79547, + 6.71246, + 6.87595, + 6.7641, + 6.78867, + 6.94615, + 7.25241, + 7.1788, + 6.76322, + 6.62512, + 310.03296, + 7.59717, + 7.25297, + 6.86048, + 7.14724, + 7.01021, + 6.78072, + 7.35111, + 6.63961, + 6.78637, + 6.65223, + 6.66674, + 6.65987, + 6.64773, + 6.91043, + 6.54743, + 7.16854, + 6.47425, + 6.72084, + 6.90341, + 6.43778, + 6.59634, + 6.79432, + 6.64271, + 6.77244, + 6.59696, + 7.38602, + 6.98229, + 6.5725, + 6.57179 + ] + }, + "throughput": { + "start_step": 0, + "end_step": 300, + "step_interval": 5, + "values": [ + 6.63203, + 276.91702, + 282.86053, + 293.76428, + 287.24368, + 285.70932, + 299.1185, + 293.97681, + 304.31775, + 296.819, + 300.90082, + 303.15247, + 303.54291, + 303.59225, + 298.74869, + 305.63171, + 282.75345, + 307.29898, + 300.92853, + 312.67621, + 302.12869, + 305.86478, + 298.59213, + 303.52991, + 302.43121, + 295.57489, + 283.09302, + 285.99564, + 303.56918, + 309.89725, + 6.62222, + 270.246, + 283.07117, + 299.26562, + 287.2587, + 292.87387, + 302.78604, + 279.2919, + 309.22092, + 302.5336, + 308.63412, + 307.96243, + 308.28, + 308.84332, + 297.10269, + 313.57434, + 286.40494, + 317.11862, + 305.48352, + 297.40475, + 318.91516, + 311.24905, + 302.17957, + 309.07645, + 303.15582, + 311.22006, + 277.97174, + 294.0448, + 312.3783, + 312.41217 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/model_config.yaml index 9516076dc..8814e3a39 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/model_config.yaml @@ -4,9 +4,7 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 PYTORCH_CUDA_ALLOC_CONF: expandable_segments:True NCCL_NVLS_ENABLE: 0 - TEST_TYPE: "release" - MODEL_ARGS: # Distributed args --distributed-timeout-minutes: 60 @@ -15,7 +13,6 @@ MODEL_ARGS: --use-distributed-optimizer: true --overlap-grad-reduce: true --overlap-param-gather: true - # Training args --use-mcore-models: true --sequence-parallel: true @@ -25,10 +22,8 @@ MODEL_ARGS: --global-batch-size: 256 --train-samples: 38400 --exit-duration-in-mins: 230 - # Transformer Engine args --transformer-impl: transformer_engine - # Data args --data-cache-path: ${DATA_CACHE_PATH} --tokenizer-type: Llama2Tokenizer @@ -37,11 +32,10 @@ MODEL_ARGS: --split: 99,1,0 --no-mmap-bin-files: true --num-workers: 6 - # Add network size args --untie-embeddings-and-output-weights: true - --no-position-embedding: true --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container --rotary-percent: 1.0 --normalization: RMSNorm --swiglu: true @@ -54,13 +48,11 @@ MODEL_ARGS: --seq-length: 4096 --max-position-embeddings: 4096 --make-vocab-size-divisible-by: 128 - # Add regularization args --attention-dropout: 0.0 --hidden-dropout: 0.0 --clip-grad: 1.0 --weight-decay: 0.1 - # Add learning rate args --lr-decay-samples: 255126953 --lr-warmup-samples: 162761 @@ -69,7 +61,6 @@ MODEL_ARGS: --lr-decay-style: cosine --adam-beta1: 0.9 --adam-beta2: 0.95 - # Add MoE args --expert-model-parallel-size: 8 --num-experts: 8 @@ -78,11 +69,9 @@ MODEL_ARGS: --moe-grouped-gemm: true --moe-aux-loss-coeff: 1e-2 --moe-token-dispatcher-type: alltoall - # Add validation args --eval-iters: 32 --eval-interval: 500 - # Add checkpointing args --finetune: true --auto-detect-ckpt-format: true @@ -90,10 +79,8 @@ MODEL_ARGS: --save: ${OUTPUT_PATH}/checkpoints --no-ckpt-fully-parallel-save: true --save-interval: 500 - # Add initialization args --init-method-std: 0.008 - # Add logging args --log-timers-to-tensorboard: true --log-memory-to-tensorboard: true @@ -105,6 +92,5 @@ MODEL_ARGS: --tensorboard-dir: ${OUTPUT_PATH}/tensorboard --wandb-project: megatron-core-release-runs --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args --bf16: true diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.8.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.8.0.json index fd05d1239..b3244d584 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.8.0.json +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.8.0.json @@ -1,7 +1,7 @@ { "lm loss": { "start_step": 0, - "end_step": 502, + "end_step": 25809, "step_interval": 5, "values": [ 12.66411, @@ -104,12 +104,5073 @@ 4.01068, 3.96227, 3.89516, - 3.91924 + 3.91924, + 3.92424, + 3.84845, + 3.82708, + 3.81442, + 3.80739, + 3.76773, + 3.76194, + 3.74276, + 3.70848, + 3.71628, + 3.70514, + 3.67254, + 3.69372, + 3.73836, + 3.67484, + 3.69449, + 3.69509, + 3.63909, + 3.61671, + 3.86641, + 3.91108, + 3.86229, + 3.8476, + 3.80902, + 3.79599, + 3.77916, + 3.76237, + 3.73642, + 3.7123, + 3.71527, + 3.68633, + 3.69328, + 3.6695, + 3.67081, + 3.67204, + 3.64524, + 3.61728, + 3.58576, + 3.61171, + 3.59952, + 3.58549, + 3.55617, + 3.5589, + 3.54904, + 3.52894, + 3.49346, + 3.47675, + 3.4653, + 3.46219, + 3.45321, + 3.45618, + 3.45439, + 3.4839, + 3.43183, + 3.45602, + 3.44469, + 3.44021, + 3.40449, + 3.37885, + 3.40424, + 3.36315, + 3.36924, + 3.34641, + 3.36711, + 3.33065, + 3.30393, + 3.30704, + 3.32833, + 3.35603, + 3.36083, + 3.31763, + 3.31707, + 3.3254, + 3.31376, + 3.30202, + 3.29341, + 3.28155, + 3.26409, + 3.23184, + 3.23391, + 3.24111, + 3.22041, + 3.24121, + 3.22107, + 3.22913, + 3.24452, + 3.24685, + 3.24123, + 3.22875, + 3.23874, + 3.23119, + 3.21755, + 3.20204, + 3.20408, + 3.23557, + 3.202, + 3.16036, + 3.14542, + 3.1504, + 3.13228, + 3.13436, + 3.11197, + 3.11828, + 3.15679, + 3.1374, + 3.12728, + 3.10044, + 3.11871, + 3.07607, + 3.09491, + 3.07588, + 3.07614, + 3.09542, + 3.12474, + 3.12076, + 3.1064, + 3.12262, + 3.14063, + 3.15886, + 3.10728, + 3.10984, + 3.1073, + 3.07684, + 3.08415, + 3.07667, + 3.05886, + 3.06151, + 3.0475, + 3.01151, + 3.03355, + 3.02966, + 3.02163, + 3.0594, + 3.04414, + 3.03074, + 3.0045, + 2.99584, + 3.00557, + 2.99064, + 2.98265, + 3.0317, + 3.0242, + 3.00816, + 2.99402, + 3.00563, + 2.97254, + 3.00519, + 2.99428, + 2.97898, + 2.97925, + 2.95006, + 2.97934, + 2.96066, + 2.95033, + 2.94045, + 2.92782, + 2.93269, + 2.95276, + 3.00208, + 3.00598, + 2.9958, + 3.02247, + 3.05693, + 3.0513, + 3.03139, + 3.04019, + 3.0275, + 3.03915, + 3.06306, + 3.09514, + 3.01386, + 2.96103, + 2.94824, + 2.92383, + 2.93269, + 2.91472, + 2.91698, + 2.90928, + 2.93277, + 2.89275, + 2.89732, + 2.90346, + 2.90917, + 2.88319, + 2.90531, + 2.90678, + 2.88025, + 2.88212, + 2.88666, + 2.89034, + 2.95103, + 2.9194, + 2.88403, + 2.88091, + 2.86091, + 2.85296, + 2.83686, + 2.8802, + 2.85111, + 2.84398, + 2.83726, + 2.87247, + 2.89281, + 2.89314, + 2.88111, + 2.88313, + 2.86382, + 2.83568, + 2.84982, + 2.82808, + 2.83919, + 2.82193, + 2.82643, + 2.815, + 2.82335, + 2.80299, + 2.83569, + 2.83059, + 2.83417, + 2.81645, + 2.79908, + 2.81806, + 2.82235, + 2.81913, + 2.80616, + 2.80297, + 2.80908, + 2.80267, + 2.82718, + 2.79742, + 2.7676, + 2.77967, + 2.79068, + 2.80364, + 2.7967, + 2.78296, + 2.77958, + 2.78218, + 2.79398, + 2.96053, + 2.93975, + 2.89807, + 2.90914, + 2.86565, + 2.93572, + 2.98157, + 3.12438, + 3.03965, + 3.07819, + 2.94204, + 2.88763, + 2.83853, + 2.83218, + 2.79569, + 2.78657, + 2.762, + 2.77675, + 2.78343, + 2.78284, + 2.78346, + 2.73175, + 2.77196, + 2.77058, + 2.75471, + 2.75461, + 2.76067, + 2.7878, + 2.77527, + 2.77343, + 2.76018, + 2.78462, + 2.75518, + 2.73606, + 2.74057, + 2.74578, + 2.76842, + 2.75133, + 2.75878, + 2.76826, + 2.75262, + 2.75032, + 2.74467, + 2.73292, + 2.73767, + 2.73096, + 2.76454, + 2.74557, + 2.74463, + 2.74477, + 2.71386, + 2.72494, + 2.71917, + 2.72265, + 2.71687, + 2.72912, + 2.71285, + 2.72567, + 2.70247, + 2.7046, + 2.70247, + 2.69536, + 2.7269, + 2.69956, + 2.75905, + 2.72384, + 2.7216, + 2.70528, + 2.70104, + 2.72049, + 2.71635, + 2.74128, + 2.73336, + 2.72151, + 2.69487, + 2.70528, + 2.68494, + 2.6742, + 2.67271, + 2.70942, + 2.66563, + 2.69598, + 2.67056, + 2.66522, + 2.69677, + 2.68403, + 2.68064, + 2.67474, + 2.87777, + 2.72613, + 2.72961, + 2.70526, + 2.69693, + 2.68454, + 2.66846, + 2.67258, + 2.66899, + 2.65032, + 2.68423, + 2.66745, + 2.67757, + 2.67157, + 2.68437, + 2.69593, + 2.6777, + 2.7056, + 2.66653, + 2.66106, + 2.67401, + 2.65086, + 2.64777, + 2.66265, + 2.67707, + 2.66609, + 2.63845, + 2.67924, + 2.64907, + 2.63357, + 2.64204, + 2.64246, + 2.63656, + 2.63001, + 2.6428, + 2.67454, + 2.65072, + 2.65904, + 2.64678, + 2.65651, + 2.6273, + 2.60058, + 2.62801, + 2.6597, + 2.60682, + 2.62805, + 2.63717, + 2.62339, + 2.63626, + 2.6438, + 2.64716, + 2.62449, + 2.64257, + 2.67059, + 2.6379, + 2.64702, + 2.69813, + 2.68945, + 2.66396, + 2.63082, + 2.64437, + 2.62969, + 2.61701, + 2.62118, + 2.61583, + 2.57513, + 2.61832, + 2.62818, + 2.5981, + 2.61345, + 2.64531, + 2.63026, + 2.64755, + 2.60326, + 2.63456, + 2.60604, + 2.62234, + 2.63267, + 2.59304, + 2.64316, + 2.61999, + 2.63293, + 2.60151, + 2.62664, + 2.58264, + 2.6135, + 2.58512, + 2.65074, + 2.60605, + 2.57324, + 2.58708, + 2.6458, + 2.62067, + 2.57395, + 2.59338, + 2.61362, + 2.57774, + 2.58543, + 2.57094, + 2.58595, + 2.58277, + 2.60221, + 2.59871, + 2.61073, + 2.6131, + 2.58232, + 2.58274, + 5.10252, + 3.7827, + 2.85664, + 2.8929, + 2.81138, + 2.8178, + 2.82754, + 2.65995, + 2.64274, + 2.59685, + 2.58541, + 2.59865, + 2.57182, + 2.60874, + 2.56996, + 2.56967, + 2.55983, + 2.59211, + 2.5685, + 2.68655, + 2.63724, + 2.6228, + 2.59465, + 2.58816, + 2.54588, + 2.5631, + 2.55327, + 2.55339, + 2.58847, + 2.59301, + 2.55715, + 2.59674, + 2.56258, + 2.57543, + 2.57048, + 2.57652, + 2.57145, + 2.57921, + 2.59337, + 2.57918, + 2.55959, + 2.56019, + 2.57094, + 2.54186, + 2.55944, + 2.54007, + 2.56213, + 2.57086, + 2.54538, + 2.5387, + 2.55329, + 2.54965, + 2.58243, + 2.52765, + 2.53317, + 2.54771, + 2.57974, + 2.54652, + 2.57573, + 2.5414, + 2.57058, + 2.54752, + 2.55178, + 2.56092, + 2.65328, + 2.63202, + 2.76889, + 2.68693, + 2.59635, + 2.57176, + 2.55804, + 2.54201, + 2.5494, + 2.54898, + 2.54794, + 2.55814, + 2.524, + 2.53347, + 2.55295, + 2.54841, + 2.53277, + 2.5371, + 2.54656, + 2.54167, + 2.49941, + 2.53562, + 2.5576, + 2.57073, + 2.65897, + 2.62885, + 2.57782, + 2.57227, + 2.5502, + 2.52615, + 2.51846, + 2.54957, + 2.5441, + 2.53438, + 2.54987, + 2.52454, + 2.52552, + 2.52362, + 2.52257, + 2.54204, + 2.51418, + 2.52265, + 2.52699, + 2.54211, + 2.92649, + 2.56868, + 2.57149, + 2.55966, + 2.54272, + 2.52941, + 2.52977, + 2.55518, + 2.5059, + 2.49772, + 2.52544, + 2.54471, + 2.50476, + 2.52263, + 2.49689, + 2.54787, + 2.50406, + 2.52705, + 2.52693, + 2.49849, + 2.51595, + 2.51793, + 2.48373, + 2.50489, + 2.52277, + 2.4983, + 2.51945, + 2.48681, + 2.51802, + 2.49539, + 2.5186, + 2.51261, + 2.4912, + 2.49299, + 2.58307, + 2.55548, + 2.51293, + 2.49444, + 2.52876, + 2.50204, + 2.51253, + 2.51834, + 2.49593, + 2.49698, + 2.49959, + 2.54374, + 2.50829, + 2.50251, + 2.4714, + 2.48828, + 2.48606, + 2.48724, + 2.4802, + 2.4646, + 2.46644, + 2.47273, + 2.47736, + 2.48761, + 2.48264, + 2.50997, + 2.48164, + 2.5124, + 2.48913, + 2.47703, + 2.57013, + 2.51527, + 2.50437, + 2.49668, + 2.52706, + 2.48805, + 2.4938, + 2.47834, + 2.46217, + 2.50757, + 2.48795, + 2.47117, + 2.47748, + 2.50137, + 2.48898, + 2.49565, + 2.45997, + 2.48252, + 2.45257, + 2.51143, + 2.46898, + 2.4731, + 3.45631, + 2.66496, + 2.5822, + 2.61394, + 2.54199, + 2.51064, + 2.49616, + 2.50271, + 2.47927, + 2.49807, + 2.49834, + 2.46281, + 2.47762, + 2.47519, + 2.46263, + 2.48371, + 2.44151, + 2.45273, + 2.45813, + 2.4672, + 2.47065, + 2.45921, + 2.47448, + 2.48647, + 2.4493, + 2.48145, + 5.60101, + 3.04163, + 2.61459, + 2.61974, + 2.52342, + 2.4954, + 2.48044, + 2.48996, + 2.46989, + 2.45434, + 2.46322, + 2.50222, + 2.46887, + 2.42965, + 2.44857, + 2.45906, + 2.46297, + 2.44755, + 2.46167, + 2.48561, + 2.45674, + 2.46964, + 2.42551, + 2.46506, + 2.47014, + 2.44821, + 2.44763, + 2.46011, + 2.46478, + 2.4834, + 2.50231, + 2.47178, + 2.45658, + 2.47718, + 2.44636, + 2.4529, + 2.43527, + 2.43681, + 2.45868, + 2.43822, + 2.4501, + 2.4549, + 2.43058, + 2.44892, + 2.66355, + 2.50838, + 2.49106, + 2.46143, + 2.44137, + 2.4442, + 2.44763, + 2.44496, + 2.4441, + 2.43145, + 2.44059, + 2.4207, + 2.45088, + 2.42472, + 2.43283, + 2.45799, + 2.44037, + 2.41054, + 2.43189, + 2.44633, + 2.40592, + 2.44642, + 2.40853, + 2.41919, + 2.41243, + 2.44535, + 2.41295, + 2.4487, + 2.43023, + 2.42297, + 2.45679, + 2.56554, + 2.52767, + 2.46144, + 2.42239, + 2.43187, + 2.40826, + 2.41466, + 2.40446, + 2.4212, + 2.42113, + 2.43036, + 2.41904, + 2.40481, + 2.42822, + 2.41741, + 2.39981, + 2.40896, + 2.40466, + 2.41905, + 2.39711, + 2.40311, + 2.40408, + 2.40879, + 2.41018, + 2.40198, + 2.42203, + 2.41935, + 2.40528, + 2.43275, + 2.44511, + 2.45021, + 2.41582, + 2.41097, + 2.39785, + 2.41581, + 2.40562, + 2.39796, + 2.41277, + 2.37093, + 2.40407, + 2.37606, + 2.38526, + 2.39534, + 2.40719, + 2.39547, + 2.41441, + 2.40578, + 2.40664, + 2.40259, + 2.43356, + 2.39976, + 2.40539, + 2.41574, + 2.39213, + 2.39022, + 2.40815, + 2.4108, + 2.39537, + 2.38769, + 2.40217, + 2.36938, + 2.37087, + 2.40508, + 2.40523, + 2.41153, + 2.38363, + 2.37615, + 2.38623, + 2.37808, + 2.40562, + 2.35967, + 2.38508, + 2.37367, + 2.36898, + 2.39865, + 2.37925, + 2.39824, + 2.36595, + 2.38837, + 2.37899, + 2.37416, + 2.37449, + 2.3935, + 2.39858, + 2.38075, + 2.36845, + 2.38085, + 2.37411, + 2.3665, + 2.37798, + 3.4126, + 2.45681, + 2.45932, + 2.42545, + 2.40192, + 2.3757, + 2.38718, + 2.39098, + 2.389, + 2.38218, + 2.35271, + 2.37676, + 2.37624, + 2.40922, + 2.35151, + 2.39615, + 2.37704, + 2.36568, + 2.34517, + 2.35607, + 3.41815, + 2.45154, + 2.45173, + 2.4075, + 2.39719, + 2.37313, + 2.3852, + 2.39014, + 2.38838, + 2.38082, + 2.35184, + 2.37625, + 2.37518, + 2.40951, + 2.35183, + 2.3963, + 2.37721, + 2.35644, + 2.34411, + 2.34907, + 2.35, + 2.37084, + 2.38258, + 2.34244, + 2.33619, + 2.35127, + 2.37487, + 2.36946, + 2.36555, + 2.36622, + 2.36664, + 2.3518, + 2.38268, + 2.37313, + 2.36951, + 2.3556, + 2.35122, + 2.35177, + 2.3484, + 2.37416, + 2.34384, + 2.38254, + 2.34784, + 2.34734, + 2.35937, + 2.35188, + 2.36656, + 2.37593, + 2.36648, + 2.35294, + 2.35873, + 2.35593, + 2.33805, + 2.36769, + 2.34278, + 2.3452, + 2.3501, + 2.3606, + 2.33848, + 2.3521, + 2.35697, + 2.34791, + 2.33823, + 2.33585, + 2.3376, + 2.37852, + 2.37086, + 2.34487, + 2.32444, + 2.37847, + 2.31607, + 2.36662, + 2.35298, + 2.36544, + 2.32139, + 2.3497, + 2.32667, + 2.31209, + 2.36248, + 2.33577, + 2.32924, + 2.34536, + 2.35568, + 2.32816, + 2.34109, + 2.35313, + 2.34368, + 2.32868, + 2.31828, + 2.33574, + 2.33602, + 2.35537, + 2.34132, + 2.32738, + 2.33634, + 2.32236, + 2.30612, + 2.32071, + 2.30058, + 2.33707, + 2.34003, + 2.33346, + 2.3392, + 2.3368, + 2.29906, + 2.30426, + 2.34929, + 2.33691, + 2.30409, + 2.31856, + 2.30877, + 2.34753, + 2.31753, + 2.30473, + 2.30711, + 2.34629, + 2.31416, + 2.32336, + 2.32901, + 2.33992, + 2.32014, + 2.35699, + 2.29662, + 2.30752, + 2.33833, + 2.34731, + 2.32189, + 2.3342, + 2.3325, + 2.2962, + 2.32674, + 2.3346, + 2.30586, + 2.31866, + 2.33417, + 2.33007, + 2.31537, + 2.32835, + 2.30873, + 2.32413, + 2.30499, + 2.34434, + 2.29632, + 2.29852, + 2.32797, + 2.32733, + 2.3215, + 2.33831, + 2.32226, + 2.31503, + 2.31293, + 2.29553, + 2.29585, + 2.31594, + 2.29929, + 2.31303, + 2.32006, + 2.33263, + 2.30624, + 2.29536, + 2.33261, + 2.29497, + 2.31418, + 2.30805, + 2.32763, + 2.36516, + 2.31831, + 2.31479, + 2.31257, + 2.2919, + 2.29083, + 2.30541, + 2.33874, + 2.29163, + 2.31391, + 2.32125, + 2.32191, + 2.30909, + 2.29203, + 2.31719, + 2.29465, + 2.30653, + 2.29871, + 2.30002, + 2.31042, + 2.2853, + 2.31587, + 2.31252, + 2.2793, + 2.30282, + 2.25167, + 2.29225, + 2.30705, + 2.31875, + 2.2839, + 2.29688, + 2.31421, + 2.29834, + 2.2981, + 2.29318, + 2.28765, + 2.31016, + 2.29365, + 2.30703, + 2.29611, + 2.29438, + 2.28643, + 2.27507, + 2.27993, + 2.29851, + 2.31715, + 2.27945, + 2.32453, + 2.29726, + 2.28811, + 2.27647, + 2.29779, + 2.31235, + 2.28765, + 2.30079, + 2.32162, + 2.29821, + 2.27832, + 2.28576, + 2.30729, + 2.30097, + 2.2833, + 2.286, + 2.30791, + 2.27955, + 2.2937, + 2.29328, + 2.28288, + 2.30789, + 2.3047, + 2.31643, + 2.33528, + 2.29746, + 2.30297, + 2.29795, + 2.25887, + 2.28062, + 2.29151, + 2.26852, + 2.27986, + 2.27989, + 2.29265, + 2.33602, + 2.2692, + 2.28938, + 2.27693, + 2.28194, + 2.26056, + 2.28424, + 2.28435, + 2.28953, + 2.2745, + 2.27479, + 2.26439, + 2.28375, + 2.2738, + 2.25722, + 2.26773, + 2.2875, + 2.28001, + 2.28734, + 2.23003, + 2.28859, + 2.26699, + 2.26021, + 2.28559, + 2.28204, + 2.2819, + 2.30033, + 2.2699, + 2.28156, + 2.29762, + 2.27843, + 2.27219, + 2.28373, + 2.27144, + 2.26943, + 2.26467, + 2.28622, + 2.27833, + 2.2711, + 2.29905, + 2.27272, + 2.25613, + 2.26406, + 2.26998, + 2.22571, + 2.27079, + 2.26904, + 2.27769, + 2.25549, + 2.26324, + 2.3207, + 2.24748, + 2.28025, + 2.26555, + 2.24703, + 2.23219, + 2.26615, + 2.26764, + 2.25261, + 2.24459, + 2.25994, + 2.25425, + 2.26257, + 2.26304, + 2.2658, + 2.23069, + 2.27564, + 2.27945, + 2.26938, + 2.26596, + 2.24777, + 2.27221, + 2.2627, + 2.25783, + 2.23139, + 2.29444, + 2.24838, + 2.26498, + 2.25982, + 2.26647, + 2.27729, + 2.25634, + 2.26301, + 2.2431, + 2.26673, + 2.24341, + 2.25452, + 2.26073, + 2.27015, + 2.26451, + 2.2372, + 2.28087, + 2.25998, + 2.26951, + 2.27372, + 2.26628, + 2.25288, + 2.24016, + 2.2463, + 2.2412, + 2.24088, + 2.27045, + 2.25563, + 2.25336, + 2.24708, + 2.23368, + 2.28392, + 2.22941, + 2.24152, + 2.25285, + 2.27771, + 2.2596, + 2.25145, + 2.25431, + 2.25111, + 2.22676, + 2.2383, + 2.22913, + 2.23077, + 2.26189, + 2.26198, + 2.27155, + 2.26289, + 2.25613, + 2.24493, + 2.24488, + 2.21664, + 2.25535, + 2.25616, + 2.25566, + 2.257, + 2.25213, + 2.25392, + 2.24508, + 2.24833, + 2.2831, + 2.24146, + 2.23173, + 2.22154, + 2.23891, + 2.23213, + 2.25906, + 2.23966, + 2.24831, + 2.24413, + 2.24186, + 2.25136, + 2.22626, + 2.20194, + 2.23917, + 2.22365, + 2.23584, + 2.25988, + 2.24301, + 2.23764, + 2.24454, + 2.21896, + 2.21993, + 2.25314, + 2.23316, + 2.22256, + 2.22445, + 2.22593, + 2.25032, + 2.23803, + 2.25304, + 2.24287, + 2.25814, + 2.22384, + 2.21532, + 2.20589, + 2.23821, + 2.22417, + 2.21108, + 2.23594, + 2.21555, + 2.25195, + 2.26063, + 2.24206, + 2.22611, + 2.25112, + 2.23082, + 2.23036, + 2.2277, + 2.23037, + 2.20874, + 2.22116, + 2.23917, + 2.24361, + 2.20392, + 2.22179, + 2.23097, + 2.22229, + 2.21195, + 2.22944, + 2.25981, + 2.2434, + 2.20831, + 2.24115, + 2.21434, + 2.22974, + 2.2362, + 2.21264, + 2.20396, + 2.23692, + 2.26001, + 2.21333, + 2.23951, + 2.24333, + 2.22447, + 2.21248, + 2.23774, + 2.21791, + 2.24057, + 2.22342, + 2.23545, + 2.22227, + 2.21786, + 2.20227, + 2.23391, + 2.22201, + 2.21595, + 2.22192, + 2.21282, + 2.23323, + 2.2344, + 2.22201, + 2.2026, + 2.20419, + 2.2483, + 2.21553, + 2.20059, + 2.24563, + 2.20672, + 2.21503, + 2.20151, + 2.20084, + 2.219, + 2.20243, + 2.19927, + 2.22923, + 2.21072, + 2.21969, + 2.2213, + 2.20264, + 2.25217, + 2.23773, + 2.21575, + 2.20187, + 2.21114, + 2.22712, + 2.20509, + 2.2168, + 2.19591, + 2.21125, + 2.21122, + 2.23691, + 2.19949, + 2.21691, + 2.2007, + 2.24638, + 2.22655, + 2.20339, + 2.22853, + 2.1873, + 2.21884, + 2.2094, + 2.2086, + 2.20743, + 2.21903, + 2.19814, + 2.19975, + 2.20395, + 2.2373, + 2.20414, + 2.21871, + 2.23264, + 2.20313, + 2.22064, + 2.21361, + 2.18704, + 2.22281, + 2.20231, + 2.22411, + 2.22443, + 2.20549, + 2.20824, + 2.2348, + 2.2069, + 2.22117, + 2.19895, + 2.17462, + 2.21554, + 2.19418, + 2.20804, + 2.2141, + 2.20324, + 2.21361, + 2.22517, + 2.19254, + 2.19933, + 2.21123, + 2.1993, + 2.1968, + 2.21417, + 2.21512, + 2.21611, + 2.20759, + 2.22837, + 2.21474, + 2.21309, + 2.19111, + 2.2002, + 2.21002, + 2.20039, + 2.21654, + 2.35729, + 2.24048, + 2.22567, + 2.20266, + 2.20885, + 2.21111, + 2.20912, + 2.21097, + 2.18819, + 2.22907, + 2.20253, + 2.1596, + 2.19965, + 2.20757, + 2.18336, + 2.19658, + 2.17928, + 2.23315, + 2.17944, + 2.19513, + 2.18579, + 2.19091, + 2.18981, + 2.19793, + 2.19356, + 2.20001, + 2.20008, + 2.1974, + 2.17898, + 2.21242, + 2.18683, + 2.19748, + 2.20972, + 2.18406, + 2.19211, + 2.22904, + 2.21988, + 2.21199, + 2.18348, + 2.17357, + 2.20285, + 2.1977, + 2.20577, + 2.18578, + 2.17496, + 2.18366, + 2.21152, + 2.18982, + 2.23573, + 2.19042, + 2.20649, + 2.2025, + 2.19027, + 2.1962, + 2.2164, + 2.19403, + 2.20102, + 2.1985, + 2.16246, + 2.18342, + 2.18692, + 2.19626, + 2.18192, + 2.1893, + 2.18755, + 2.21025, + 2.18549, + 2.184, + 2.20517, + 2.20886, + 2.20518, + 2.17352, + 2.17371, + 2.20078, + 2.18592, + 2.18403, + 2.18033, + 2.19754, + 2.19426, + 2.19499, + 2.20602, + 2.17739, + 2.21333, + 2.1663, + 2.15994, + 2.19678, + 2.21246, + 2.15862, + 2.18358, + 2.15428, + 2.20359, + 2.19003, + 2.1953, + 2.19557, + 2.16132, + 2.21895, + 2.19617, + 2.21634, + 2.19686, + 2.19147, + 2.18437, + 2.19547, + 2.20941, + 2.17363, + 2.18971, + 2.18604, + 2.18042, + 2.17109, + 2.19788, + 2.16382, + 2.15782, + 2.17956, + 2.18243, + 2.1787, + 2.17642, + 2.18644, + 2.14688, + 2.17485, + 2.21044, + 2.19769, + 2.19495, + 2.1608, + 2.18587, + 2.16831, + 2.20116, + 2.17414, + 2.16728, + 2.18941, + 2.19834, + 2.15607, + 2.19672, + 2.17378, + 2.17543, + 2.18507, + 2.1903, + 2.16206, + 2.16569, + 2.17585, + 2.19927, + 2.14874, + 2.16111, + 2.16594, + 2.21272, + 2.20347, + 2.16851, + 2.18174, + 2.1722, + 2.16502, + 2.18958, + 2.172, + 2.17576, + 2.19585, + 2.15571, + 2.15914, + 2.19858, + 2.16805, + 2.15536, + 2.19079, + 2.19912, + 2.17785, + 2.19722, + 2.18203, + 2.18803, + 2.15101, + 2.19091, + 2.15855, + 2.14759, + 2.18355, + 2.17852, + 2.17394, + 2.16678, + 2.17352, + 2.17239, + 2.16823, + 2.17916, + 2.16634, + 2.16794, + 2.16985, + 2.14855, + 2.17634, + 2.17512, + 2.16301, + 2.1526, + 2.16815, + 2.19929, + 2.17279, + 2.16724, + 2.17854, + 2.17462, + 2.15162, + 2.17402, + 2.2037, + 2.1857, + 2.16011, + 2.1677, + 2.1605, + 2.16044, + 2.16289, + 2.16693, + 2.15834, + 2.15576, + 2.17548, + 2.17367, + 2.19603, + 2.17902, + 2.19339, + 2.15507, + 2.18984, + 2.16392, + 2.17049, + 2.16408, + 2.18821, + 2.17378, + 2.17612, + 2.15704, + 2.17436, + 2.16806, + 2.17331, + 2.18089, + 2.19023, + 2.17341, + 2.1837, + 2.16447, + 2.17717, + 2.12845, + 2.16581, + 2.16576, + 2.17878, + 2.15896, + 2.14349, + 2.13857, + 2.163, + 2.16686, + 2.13574, + 2.17099, + 2.16829, + 2.1957, + 2.14049, + 2.1614, + 2.33308, + 2.18864, + 2.19581, + 2.15764, + 2.21001, + 2.17369, + 2.169, + 2.16057, + 2.1555, + 2.17984, + 2.17026, + 2.13552, + 2.15683, + 2.144, + 2.15337, + 2.15827, + 2.17272, + 2.15098, + 2.16686, + 2.16543, + 2.14474, + 2.17108, + 2.17368, + 2.15313, + 2.15852, + 2.15723, + 2.16181, + 2.17457, + 2.15197, + 2.15349, + 2.15066, + 2.15799, + 2.16662, + 2.15251, + 2.15903, + 2.16832, + 2.16734, + 2.14137, + 2.14993, + 2.16748, + 2.19773, + 2.16805, + 2.15964, + 2.1804, + 2.17998, + 2.14806, + 2.14573, + 2.13933, + 2.14742, + 2.15124, + 2.14117, + 2.15974, + 2.15591, + 2.16682, + 2.16508, + 2.14472, + 2.14973, + 2.16258, + 2.14212, + 2.19087, + 2.18512, + 2.15518, + 2.13408, + 2.1584, + 2.13969, + 2.15498, + 2.15836, + 2.15812, + 2.15092, + 2.14058, + 2.16166, + 2.19202, + 2.18302, + 2.16288, + 2.14476, + 2.19021, + 2.16748, + 2.16459, + 2.15818, + 2.15253, + 2.17882, + 2.17051, + 2.13662, + 2.15769, + 2.1451, + 2.15455, + 2.15933, + 2.17352, + 2.15205, + 2.16782, + 2.16651, + 2.14543, + 2.17196, + 2.17428, + 2.15367, + 2.15865, + 2.15753, + 2.16251, + 2.17474, + 2.15179, + 2.15464, + 2.15189, + 2.15825, + 2.16679, + 2.15247, + 2.15879, + 2.16848, + 2.16712, + 2.14151, + 2.14919, + 2.16636, + 2.19694, + 2.16746, + 2.15615, + 2.1801, + 2.18019, + 2.14781, + 2.14405, + 2.13878, + 2.14619, + 2.15067, + 2.14029, + 2.15864, + 2.15524, + 2.16666, + 2.16502, + 2.14454, + 2.14967, + 2.16244, + 2.14155, + 2.19212, + 2.18411, + 2.1545, + 2.13298, + 2.15686, + 2.13777, + 2.15407, + 2.15742, + 2.15722, + 2.14982, + 2.12737, + 2.15411, + 2.15453, + 2.14356, + 2.17199, + 2.15532, + 2.12601, + 2.12197, + 2.17268, + 2.13875, + 2.18042, + 2.13088, + 2.15764, + 2.17407, + 2.13045, + 2.15704, + 2.16287, + 2.1617, + 2.13503, + 2.15413, + 2.14423, + 2.14843, + 2.14099, + 2.16652, + 2.16624, + 2.16699, + 2.14701, + 2.14252, + 2.14079, + 2.15245, + 2.15248, + 2.16716, + 2.1652, + 2.17333, + 2.15225, + 2.15625, + 2.1559, + 2.15638, + 2.14564, + 2.13573, + 2.18864, + 2.14585, + 2.16181, + 2.14622, + 2.14284, + 2.14361, + 2.1353, + 2.13868, + 2.18464, + 2.13446, + 2.14149, + 2.15089, + 2.16825, + 2.15287, + 2.14872, + 2.11852, + 2.1368, + 2.1548, + 2.15594, + 2.15019, + 2.12168, + 2.14385, + 2.11972, + 2.12978, + 2.1364, + 2.15372, + 2.15559, + 2.14493, + 2.15871, + 2.14851, + 2.16254, + 2.15676, + 2.1324, + 2.13414, + 2.13716, + 2.15354, + 2.13055, + 2.14861, + 2.13414, + 2.13118, + 2.16083, + 2.14755, + 2.16996, + 2.15333, + 2.14687, + 2.13754, + 2.12017, + 2.12175, + 2.15103, + 2.12596, + 2.14087, + 2.15069, + 2.14017, + 2.14556, + 2.14779, + 2.11721, + 2.13546, + 2.14762, + 2.12142, + 2.11681, + 2.12942, + 2.16537, + 2.14594, + 2.14403, + 2.13581, + 2.14601, + 2.15087, + 2.13722, + 2.136, + 2.13283, + 2.15993, + 2.10791, + 2.12652, + 2.12944, + 2.12434, + 2.16751, + 2.1412, + 2.14415, + 2.1601, + 2.15032, + 2.15054, + 2.13025, + 2.12893, + 2.13228, + 2.12559, + 2.14819, + 2.1192, + 2.14483, + 2.13315, + 2.11682, + 2.11695, + 2.14524, + 2.11143, + 2.11339, + 2.11413, + 2.13984, + 2.13872, + 2.14782, + 2.14373, + 2.12765, + 2.12166, + 2.14038, + 2.1169, + 2.16891, + 2.11816, + 2.11764, + 2.10502, + 2.11715, + 2.16007, + 2.1139, + 2.12358, + 2.13892, + 2.15004, + 2.11246, + 2.12922, + 2.14736, + 2.13472, + 2.10951, + 2.12747, + 2.13798, + 2.12388, + 2.11521, + 2.10739, + 2.13998, + 2.13769, + 2.14859, + 2.13339, + 2.15248, + 2.14247, + 2.13312, + 2.14542, + 2.12039, + 2.11279, + 2.13326, + 2.14623, + 2.12046, + 2.12902, + 2.15093, + 2.14723, + 2.13488, + 2.15025, + 2.13168, + 2.14272, + 2.12932, + 2.13982, + 2.13424, + 2.11723, + 2.14033, + 2.11476, + 2.11145, + 2.12764, + 2.13232, + 2.11847, + 2.1461, + 2.10997, + 2.10156, + 2.1451, + 2.12625, + 2.13328, + 2.11557, + 2.1215, + 2.12135, + 2.15984, + 2.14912, + 2.12044, + 2.11027, + 2.10736, + 2.1285, + 2.13769, + 2.14091, + 2.10334, + 2.12345, + 2.12627, + 2.13376, + 2.14276, + 2.15602, + 2.15069, + 2.14161, + 2.1043, + 2.13112, + 2.11701, + 2.12521, + 2.08875, + 2.12792, + 2.13596, + 2.12691, + 2.12076, + 2.13896, + 2.13719, + 2.15087, + 2.11978, + 2.0985, + 2.12918, + 2.13974, + 2.12134, + 2.13189, + 2.12789, + 2.12962, + 2.13089, + 2.14811, + 2.12857, + 2.11768, + 2.12173, + 2.10441, + 2.14866, + 2.13166, + 2.12901, + 2.127, + 2.11426, + 2.12093, + 2.11143, + 2.11727, + 2.11241, + 2.12266, + 2.13044, + 2.10739, + 2.10831, + 2.15523, + 2.11048, + 2.13542, + 2.13614, + 2.12683, + 2.13448, + 2.12596, + 2.12179, + 2.12048, + 2.1139, + 2.10651, + 2.11425, + 2.11126, + 2.14146, + 2.11739, + 2.12012, + 2.09532, + 2.10843, + 2.09704, + 2.11482, + 2.11549, + 2.13335, + 2.12748, + 2.12996, + 2.12102, + 2.10231, + 2.121, + 2.08735, + 2.1264, + 2.13147, + 2.11565, + 2.13246, + 2.11584, + 2.13548, + 2.12057, + 2.13249, + 2.13311, + 2.13539, + 2.08873, + 2.15552, + 2.13632, + 2.1273, + 2.10797, + 2.10855, + 2.12145, + 2.09884, + 2.11454, + 2.10846, + 2.11284, + 2.11202, + 2.12415, + 2.10981, + 2.13325, + 2.11918, + 2.11938, + 2.10863, + 2.11764, + 2.12571, + 2.11926, + 2.11383, + 2.14034, + 2.11653, + 2.10883, + 2.11607, + 2.11223, + 2.13003, + 2.10391, + 2.09898, + 2.12297, + 2.11622, + 2.11255, + 2.11382, + 2.10276, + 2.0993, + 2.13575, + 2.10113, + 2.10347, + 2.13801, + 2.11259, + 2.1356, + 2.11331, + 2.14302, + 2.11484, + 2.1231, + 2.14666, + 2.09468, + 2.10025, + 2.11826, + 2.10354, + 2.12973, + 2.10786, + 2.10133, + 2.1188, + 2.12139, + 2.10567, + 2.10296, + 2.1229, + 2.13631, + 2.11626, + 2.09, + 2.09436, + 2.12306, + 2.12402, + 2.11397, + 2.11184, + 2.11068, + 2.1035, + 2.1186, + 2.12232, + 2.10365, + 2.11107, + 2.09657, + 2.10619, + 2.11737, + 2.10038, + 2.10319, + 2.13439, + 2.10429, + 2.07575, + 2.12834, + 2.11125, + 2.087, + 2.09909, + 2.13771, + 2.11033, + 2.09643, + 2.11279, + 2.11157, + 2.08541, + 2.11924, + 2.11518, + 2.11957, + 2.11874, + 2.08321, + 2.12935, + 2.09743, + 2.11283, + 2.10512, + 2.11416, + 2.10964, + 2.11671, + 2.07233, + 2.12294, + 2.09786, + 2.10687, + 2.1019, + 2.1202, + 2.11577, + 2.1137, + 2.08861, + 2.10085, + 2.10267, + 2.12121, + 2.10177, + 2.09619, + 2.09794, + 2.08094, + 2.08729, + 2.09336, + 2.09897, + 2.10286, + 2.07176, + 2.10334, + 2.12713, + 2.11912, + 2.11999, + 2.08836, + 2.10282, + 2.12619, + 2.0978, + 2.10238, + 2.10465, + 2.1121, + 2.12913, + 2.09269, + 2.11261, + 2.11606, + 2.07935, + 2.09366, + 2.12006, + 2.09347, + 2.07733, + 2.10526, + 2.10092, + 2.10797, + 2.10158, + 2.12027, + 2.10471, + 2.09255, + 2.0975, + 2.0737, + 2.11164, + 2.11574, + 2.09266, + 2.09184, + 2.09209, + 2.10541, + 2.09615, + 2.11114, + 2.08241, + 2.1174, + 2.11024, + 2.07316, + 2.09176, + 2.10127, + 2.08781, + 2.08613, + 2.09108, + 2.11006, + 2.10495, + 2.10946, + 2.07477, + 2.11336, + 2.09873, + 2.10383, + 2.14032, + 2.094, + 2.09863, + 2.11004, + 2.10177, + 2.09064, + 2.09376, + 2.09919, + 2.1078, + 2.10378, + 2.088, + 2.10266, + 2.0971, + 2.11202, + 2.06814, + 2.09322, + 2.10195, + 2.09977, + 2.08712, + 2.08943, + 2.0943, + 2.09088, + 2.07683, + 2.09816, + 2.0957, + 2.09438, + 2.08377, + 2.10353, + 2.09148, + 2.12309, + 2.07554, + 2.10233, + 2.10267, + 2.12013, + 2.07702, + 2.11946, + 2.09854, + 2.11316, + 2.10328, + 2.10833, + 2.12354, + 2.09029, + 2.08101, + 2.08138, + 2.10166, + 2.09347, + 2.12793, + 2.11543, + 2.09397, + 2.09456, + 2.07508, + 2.08559, + 2.10014, + 2.09946, + 2.0938, + 2.10062, + 2.08581, + 2.09366, + 2.10412, + 2.09658, + 2.12119, + 2.10416, + 2.10553, + 2.10884, + 2.10399, + 2.09831, + 2.07083, + 2.10862, + 2.08491, + 2.07786, + 2.06987, + 2.10105, + 2.08836, + 2.11082, + 2.08967, + 2.096, + 2.09845, + 2.11367, + 2.0919, + 2.08398, + 2.08567, + 2.10261, + 2.08733, + 2.07127, + 2.10659, + 2.10412, + 2.08127, + 2.0879, + 2.09321, + 2.0969, + 2.1155, + 2.09746, + 2.07711, + 2.09989, + 2.07658, + 2.08498, + 2.10385, + 2.09724, + 2.1108, + 2.09525, + 2.09183, + 2.1127, + 2.07946, + 2.09587, + 2.08618, + 2.05932, + 2.07322, + 2.09423, + 2.08995, + 2.08346, + 2.12977, + 2.08545, + 2.09628, + 2.08662, + 2.08522, + 2.09505, + 2.09735, + 2.08041, + 2.07145, + 2.11214, + 2.11189, + 2.07796, + 2.10217, + 2.08391, + 2.08151, + 2.08785, + 2.09681, + 2.07159, + 2.08265, + 2.09753, + 2.08791, + 2.10463, + 2.07866, + 2.07685, + 2.07439, + 2.12679, + 2.10319, + 2.07957, + 2.11112, + 2.09587, + 2.10383, + 2.08998, + 2.09877, + 2.08149, + 2.0726, + 2.09733, + 2.10202, + 2.05536, + 2.06957, + 2.07942, + 2.10035, + 2.07557, + 2.11221, + 2.10861, + 2.07354, + 2.08198, + 2.11816, + 2.10121, + 2.09839, + 2.08926, + 2.08913, + 2.06694, + 2.09322, + 2.12166, + 2.0856, + 2.10069, + 2.08259, + 2.088, + 2.06491, + 2.06815, + 2.05263, + 2.07064, + 2.09024, + 2.08155, + 2.07271, + 2.09329, + 2.07103, + 2.08115, + 2.09324, + 2.11059, + 2.09349, + 2.0868, + 2.09298, + 2.08033, + 2.11991, + 2.10219, + 2.08265, + 2.0745, + 2.08067, + 2.08228, + 2.07887, + 2.08947, + 2.08852, + 2.0846, + 2.10233, + 2.07347, + 2.09132, + 2.11081, + 2.07605, + 2.10372, + 2.09598, + 2.08573, + 2.06331, + 2.08668, + 2.07473, + 2.08458, + 2.08127, + 2.08422, + 2.11135, + 2.07743, + 2.08303, + 2.06754, + 2.08068, + 2.08845, + 2.07029, + 2.07641, + 2.09877, + 2.07114, + 2.06937, + 2.07108, + 2.08874, + 2.08498, + 2.08842, + 2.07386, + 2.08716, + 2.07466, + 2.07795, + 2.08073, + 2.08535, + 2.0606, + 2.09839, + 2.08545, + 2.0932, + 2.09564, + 2.08916, + 2.09524, + 2.06897, + 2.09949, + 2.06747, + 2.06616, + 2.08769, + 2.06691, + 2.08399, + 2.09025, + 2.08435, + 2.0922, + 2.08444, + 2.07771, + 2.1019, + 2.08006, + 2.10182, + 2.04187, + 2.06098, + 2.07087, + 2.08449, + 2.08222, + 2.0773, + 2.07871, + 2.06898, + 2.07074, + 2.08891, + 2.07142, + 2.0769, + 2.05867, + 2.08408, + 2.07476, + 2.08503, + 2.08507, + 2.09966, + 2.0936, + 2.08102, + 2.08051, + 2.08716, + 2.10569, + 2.04886, + 2.08287, + 2.08698, + 2.08574, + 2.08143, + 2.06543, + 2.09331, + 2.07571, + 2.08896, + 2.0924, + 2.09625, + 2.06282, + 2.07882, + 2.06549, + 2.09371, + 2.08219, + 2.07266, + 2.06664, + 2.06603, + 2.10642, + 2.07823, + 2.09126, + 2.06788, + 2.07061, + 2.06201, + 2.07877, + 2.07682, + 2.08231, + 2.08118, + 2.07654, + 2.06766, + 2.08435, + 2.05273, + 2.07367, + 2.08997, + 2.07393, + 2.10362, + 2.09741, + 2.07105, + 2.06079, + 2.08238, + 2.07444, + 2.08509, + 2.07566, + 2.08896, + 2.07058, + 2.08798, + 2.08435, + 2.06113, + 2.08116, + 2.06203, + 2.07101, + 2.06705, + 2.07565, + 2.04901, + 2.06124, + 2.06711, + 2.07743, + 2.05564, + 2.07932, + 2.09322, + 2.07225, + 2.07562, + 2.06527, + 2.0762, + 2.08281, + 2.0767, + 2.0748, + 2.07047, + 2.08225, + 2.06854, + 2.06512, + 2.0742, + 2.07513, + 2.06373, + 2.07743, + 2.08095, + 2.08841, + 2.07355, + 2.06643, + 2.07799, + 2.06675, + 2.07423, + 2.10812, + 2.06436, + 2.09897, + 2.07502, + 2.07737, + 2.04712, + 2.08047, + 2.04774, + 2.0649, + 2.09461, + 2.07892, + 2.0363, + 2.07714, + 2.05921, + 2.06925, + 2.07907, + 2.04963, + 2.09296, + 2.09086, + 2.06722, + 2.10081, + 2.09291, + 2.06089, + 2.06722, + 2.06642, + 2.09322, + 2.07335, + 2.07798, + 2.05836, + 2.07796, + 2.0808, + 2.06395, + 2.06751, + 2.05447, + 2.06104, + 2.06063, + 2.06766, + 2.06221, + 2.07257, + 2.06574, + 2.04905, + 2.03481, + 2.04832, + 2.05878, + 2.02979, + 2.07279, + 2.05071, + 2.0645, + 2.07826, + 2.07363, + 2.08398, + 2.07578, + 2.04699, + 2.06644, + 2.05969, + 2.05606, + 2.06473, + 2.04984, + 2.07189, + 2.05034, + 2.05124, + 2.06808, + 2.06996, + 2.06724, + 2.06324, + 2.05736, + 2.06497, + 2.04036, + 2.06733, + 2.05616, + 2.07322, + 2.05645, + 2.07276, + 2.05856, + 2.07256, + 2.03945, + 2.11163, + 2.0619, + 2.08546, + 2.07413, + 2.07061, + 2.04996, + 2.06793, + 2.07484, + 2.06008, + 2.06218, + 2.09877, + 2.06978, + 2.06143, + 2.06929, + 2.06508, + 2.07316, + 2.06215, + 2.07606, + 2.08038, + 2.06814, + 2.10101, + 2.07255, + 2.05784, + 2.08767, + 2.07738, + 2.03792, + 2.04016, + 2.06784, + 2.06786, + 2.06087, + 2.05665, + 2.06969, + 2.05982, + 2.07825, + 2.06744, + 2.06036, + 2.08139, + 2.08364, + 2.05996, + 2.05479, + 2.05167, + 2.05077, + 2.05922, + 2.07963, + 2.04633, + 2.061, + 2.07461, + 2.05146, + 2.08967, + 2.0543, + 2.06519, + 2.05693, + 2.06047, + 2.09078, + 2.06547, + 2.06655, + 2.04579, + 2.07219, + 2.05517, + 2.07714, + 2.07292, + 2.05494, + 2.08399, + 2.04845, + 2.0271, + 2.07541, + 2.08763, + 2.06062, + 2.06451, + 2.04971, + 2.06807, + 2.06973, + 2.04771, + 2.07481, + 2.04728, + 2.07123, + 2.10208, + 2.07216, + 2.04981, + 2.07723, + 2.0563, + 2.08333, + 2.05147, + 2.06321, + 2.04382, + 2.02393, + 2.05965, + 2.03862, + 2.05323, + 2.08049, + 2.08626, + 2.06566, + 2.07277, + 2.05743, + 2.05562, + 2.04274, + 2.06746, + 2.03728, + 2.05617, + 2.05681, + 2.06702, + 2.04731, + 2.05774, + 2.07996, + 2.05683, + 2.04402, + 2.04403, + 2.01992, + 2.04123, + 2.06046, + 2.04875, + 2.0466, + 2.06237, + 2.04971, + 2.04946, + 2.08544, + 2.05453, + 2.0264, + 2.06103, + 2.06825, + 2.07077, + 2.06739, + 2.07046, + 2.07204, + 2.07155, + 2.04056, + 2.06434, + 2.06275, + 2.06904, + 2.06548, + 2.06135, + 2.07188, + 2.06119, + 2.06055, + 2.0949, + 2.02424, + 2.05931, + 2.04845, + 2.07085, + 2.05544, + 2.06672, + 2.07003, + 2.03386, + 2.06494, + 2.08279, + 2.06862, + 2.04196, + 2.07868, + 2.04035, + 2.06889, + 2.02584, + 2.04468, + 2.0504, + 2.0388, + 2.05739, + 2.08007, + 2.0722, + 2.03968, + 2.06537, + 2.06581, + 2.03513, + 2.06123, + 2.05413, + 2.0505, + 2.04006, + 2.04391, + 2.05829, + 2.05854, + 2.03776, + 2.0529, + 2.04568, + 2.05123, + 2.04132, + 2.07814, + 2.03212, + 2.05699, + 2.04265, + 2.05987, + 2.0619, + 2.05647, + 2.04949, + 2.04947, + 2.03799, + 2.07108, + 2.03083, + 2.0576, + 2.07711, + 2.0508, + 2.04764, + 2.06956, + 2.0506, + 2.08523, + 2.05784, + 2.07594, + 2.06797, + 2.0562, + 2.04647, + 2.06524, + 2.02976, + 2.04842, + 2.07655, + 2.05525, + 2.03493, + 2.0666, + 2.05273, + 2.05187, + 2.04375, + 2.06658, + 2.05532, + 2.06008, + 2.0566, + 2.07965, + 2.08018, + 2.04848, + 2.03559, + 2.04089, + 2.0178, + 2.04963, + 2.04755, + 2.02811, + 2.06052, + 2.04175, + 2.05502, + 2.02278, + 2.04766, + 2.06112, + 2.03887, + 2.02798, + 2.04829, + 2.06336, + 2.04651, + 2.05795, + 2.05212, + 2.06047, + 2.0286, + 2.01909, + 2.06535, + 2.05403, + 2.0821, + 2.02458, + 2.05066, + 2.06295, + 2.0543, + 2.05905, + 2.04452, + 2.06969, + 2.06715, + 2.05956, + 2.05587, + 2.06945, + 2.03875, + 2.05269, + 2.05739, + 2.05056, + 2.04221, + 2.05828, + 2.06287, + 2.0695, + 2.08111, + 2.04066, + 2.04745, + 2.04967, + 2.0342, + 2.0318, + 2.02745, + 2.05636, + 2.04144, + 2.04963, + 2.03494, + 2.0634, + 2.05987, + 2.04363, + 2.03157, + 2.04925, + 2.05193, + 2.03998, + 2.06308, + 2.06588, + 2.04694, + 2.05157, + 2.05087, + 2.04383, + 2.06034, + 2.03071, + 2.03856, + 2.05594, + 2.04312, + 2.07479, + 2.07823, + 2.02631, + 2.04821, + 2.0792, + 2.04349, + 2.06049, + 2.04056, + 2.05241, + 2.04747, + 2.05308, + 2.03352, + 2.04522, + 2.06442, + 2.04325, + 2.05879, + 2.06124, + 2.04282, + 2.04139, + 2.05254, + 2.01988, + 2.07762, + 2.04611, + 2.03033, + 2.05727, + 2.05424, + 2.06047, + 2.04054, + 2.05252, + 2.04745, + 2.0531, + 2.0335, + 2.04512, + 2.06421, + 2.04357, + 2.05865, + 2.06117, + 2.04304, + 2.04141, + 2.05248, + 2.02, + 2.07693, + 2.04586, + 2.03029, + 2.05742, + 2.0541, + 2.06525, + 2.06902, + 2.0432, + 2.04453, + 2.06192, + 2.04707, + 2.04869, + 2.04354, + 2.05001, + 2.03991, + 2.0685, + 2.0549, + 2.05505, + 2.04703, + 2.03358, + 2.05194, + 2.05436, + 2.06724, + 2.05656, + 2.07674, + 2.07072, + 2.03293, + 2.03157, + 2.04006, + 2.04293, + 2.05827, + 2.03175, + 2.01841, + 2.05883, + 2.04812, + 2.03408, + 2.03289, + 2.03097, + 2.0434, + 2.04684, + 2.03107, + 2.06299, + 2.04331, + 2.04469, + 2.06301, + 2.0327, + 2.06513, + 2.03301, + 2.05957, + 2.04292, + 2.02398, + 2.04747, + 2.04785, + 2.03174, + 2.02171, + 2.05919, + 2.03983, + 2.05566, + 2.04248, + 2.03221, + 2.0759, + 2.05008, + 2.0214, + 2.06179, + 2.01749, + 2.04065, + 2.02708, + 2.05848, + 2.05042, + 2.05003, + 2.07077, + 2.04236, + 2.05066, + 2.03207, + 2.03696, + 2.03066, + 2.03533, + 2.0552, + 2.04942, + 2.04416, + 2.04847, + 2.03375, + 2.05024, + 2.02224, + 2.0599, + 2.03886, + 2.06545, + 2.05957, + 2.02021, + 2.06053, + 2.02396, + 2.03988, + 2.06241, + 2.01066, + 2.04243, + 2.05078, + 2.07304, + 2.04773, + 2.06107, + 2.04046, + 2.03072, + 2.06806, + 2.0502, + 2.05373, + 2.04114, + 2.02716, + 2.05167, + 2.04071, + 2.04664, + 2.04539, + 2.04807, + 2.01564, + 2.04137, + 2.03569, + 2.06744, + 2.07131, + 2.02967, + 2.01392, + 2.06078, + 2.05455, + 2.01983, + 2.02859, + 2.05341, + 2.01784, + 2.04694, + 2.04951, + 2.04892, + 2.06394, + 2.0479, + 2.03549, + 2.01551, + 2.04039, + 2.0363, + 2.03762, + 2.0608, + 2.01959, + 2.06367, + 2.04835, + 2.04411, + 2.02332, + 2.0585, + 2.04193, + 2.0603, + 2.0682, + 2.05464, + 2.02563, + 2.04411, + 2.04524, + 2.04669, + 2.03029, + 2.0362, + 2.02253, + 2.05388, + 2.05496, + 2.06212, + 2.04333, + 2.0413, + 2.02525, + 2.00874, + 2.0428, + 2.03114, + 2.03954, + 2.0378, + 2.04635, + 2.06999, + 2.05191, + 2.04536, + 2.03394, + 2.05732, + 2.04309, + 2.03061, + 2.05865, + 2.05048, + 2.03652, + 2.03049, + 2.01085, + 2.03067, + 2.01741, + 2.02034, + 2.04522, + 2.03736, + 2.06574, + 2.02185, + 2.03204, + 2.02819, + 2.05875, + 2.03848, + 2.07065, + 2.03875, + 2.01548, + 2.06044, + 2.0509, + 2.03823, + 2.03869, + 2.04014, + 2.03673, + 2.03314, + 2.01973, + 2.05239, + 2.06154, + 2.04174, + 2.03178, + 2.02154, + 2.00685, + 2.02756, + 2.03287, + 2.0427, + 2.05606, + 2.04018, + 2.01783, + 2.02935, + 2.016, + 2.05266, + 2.03158, + 2.04107, + 2.0517, + 2.03739, + 2.02115, + 2.0316, + 2.05073, + 2.04688, + 2.04303, + 2.0674, + 2.03838, + 2.01294, + 2.04581, + 2.02689, + 2.03504, + 2.01239, + 2.02324, + 2.05401, + 2.01266, + 2.03732, + 2.02325, + 2.04265, + 2.04579, + 2.00625, + 2.03277, + 2.03646, + 2.01592, + 2.03994, + 2.01572, + 2.01955, + 2.03168, + 2.02651, + 2.04041, + 2.0268, + 2.01381, + 2.05137, + 2.03582, + 2.01582, + 2.01213, + 2.01781, + 2.04045, + 2.0411, + 2.02934, + 2.03793, + 2.02468, + 2.0318, + 2.04112, + 2.0365, + 2.04224, + 2.05205, + 2.0668, + 2.04054, + 2.02819, + 2.0254, + 2.02306, + 2.04228, + 2.02134, + 2.05392, + 2.02807, + 2.02953, + 2.05391, + 2.05151, + 2.01489, + 2.03046, + 2.03306, + 2.03355, + 2.02705, + 2.00358, + 2.04511, + 2.03331, + 2.01168, + 2.02215, + 2.03613, + 2.03859, + 2.03608, + 2.04183, + 2.01935, + 2.04378, + 2.03376, + 2.04583, + 2.07143, + 2.03132, + 2.045, + 2.01276, + 2.05921, + 2.03287, + 2.04978, + 2.02679, + 2.04721, + 2.02158, + 2.04761, + 2.02592, + 2.01646, + 2.04388, + 2.05599, + 2.04995, + 2.01475, + 2.03737, + 2.03914, + 2.02618, + 2.01273, + 2.03062, + 2.0391, + 2.05022, + 2.02877, + 2.06806, + 2.0398, + 2.02339, + 2.02826, + 2.0283, + 2.05834, + 2.02902, + 1.99534, + 2.0505, + 2.00959, + 2.02836, + 2.00366, + 2.04647, + 2.03224, + 2.0056, + 2.04715, + 2.038, + 2.01394, + 2.02793, + 2.03377, + 2.02536, + 2.04284, + 2.03622, + 2.04047, + 2.04737, + 2.0126, + 2.04873, + 2.01303, + 2.04299, + 2.03197, + 2.02903, + 2.01212, + 2.02437, + 2.01794, + 2.02022, + 2.04984, + 2.04139, + 2.05848, + 2.03098, + 2.02086, + 2.00389, + 2.0592, + 2.01986, + 1.99799, + 2.04708, + 2.04642, + 2.05958, + 2.05049, + 2.03111, + 2.03582, + 2.02262, + 2.03563, + 2.03222, + 2.04899, + 2.02787, + 2.03317, + 2.04468, + 2.03544, + 2.01406, + 2.05183, + 2.03062, + 2.02943, + 2.03072, + 2.02441, + 2.01968, + 2.03337, + 2.01212, + 2.01679, + 2.03688, + 2.00323, + 2.05195, + 2.03035, + 2.0453, + 2.03253, + 2.05581, + 2.01793, + 2.03642, + 2.03252, + 2.0387, + 2.04706, + 2.02217, + 2.03086, + 2.02223, + 2.04418, + 2.03613, + 2.02383, + 2.02233, + 2.01692, + 2.03767, + 2.02427, + 2.01682, + 2.02529, + 2.00427, + 2.02606, + 2.03293, + 2.04867, + 2.04001, + 2.0225, + 2.03806, + 2.01906, + 2.03452, + 2.03287, + 2.00488, + 2.02604, + 2.02431, + 2.01111, + 2.0092, + 2.02263, + 2.01799, + 2.03186, + 2.02335, + 2.04214, + 2.03045, + 2.02994, + 2.01811, + 2.03178, + 2.05296, + 2.05152, + 2.00785, + 2.01546, + 2.05441, + 2.01446, + 2.00887, + 2.04831, + 2.01926, + 2.01434, + 2.02356, + 2.0183, + 2.03328, + 2.01008, + 2.02262, + 2.04957, + 2.02712, + 2.01721, + 2.04747, + 2.02184, + 2.02848, + 2.05733, + 2.03521, + 2.0195, + 2.04916, + 2.03439, + 2.02555, + 2.03685, + 2.00242, + 2.03878, + 2.04221, + 2.03542, + 2.02895, + 2.04015, + 2.02528, + 2.02639, + 2.04139, + 2.03501, + 2.0306, + 2.0051, + 2.02541, + 2.02449, + 2.02796, + 2.00731, + 2.01045, + 2.01817, + 2.04808, + 2.03134, + 2.02478, + 2.00888, + 1.99585, + 2.04413, + 2.0439, + 2.02972, + 2.04554, + 2.02551, + 2.02213, + 2.01853, + 2.0138, + 2.0115, + 2.02771, + 2.00542, + 2.04709, + 2.01674, + 2.02613, + 2.02933, + 1.99911, + 2.014, + 2.01743, + 1.99774, + 2.06495, + 2.0163, + 2.0329, + 2.03451, + 2.00671, + 2.02704, + 2.00913, + 2.00733, + 2.0169, + 2.02783, + 2.04017, + 2.0208, + 2.01728, + 2.03693, + 2.03491, + 2.00363, + 2.01592, + 2.02132, + 1.99621, + 2.01636, + 2.03577, + 2.05908, + 2.03387, + 2.00804, + 2.01834, + 2.01652, + 2.01748, + 2.02298, + 2.01874, + 2.00515, + 2.01887, + 2.04895, + 2.02251, + 2.01912, + 2.01777, + 2.02806, + 2.0269, + 2.02511, + 2.00423, + 2.0156, + 2.04654, + 2.02458, + 2.0275, + 2.01452, + 2.05435, + 1.99932, + 2.01555, + 2.00119, + 2.0053, + 2.00118, + 2.01676, + 2.03184, + 2.02566, + 2.01218, + 2.04158, + 2.01946, + 2.02495, + 2.00391, + 2.02647, + 2.04178, + 2.03745, + 2.01808, + 2.02752, + 2.03446, + 2.02934, + 2.02554, + 2.03386, + 2.03394, + 2.04926, + 2.02909, + 2.01161, + 2.03058, + 2.02171, + 2.02723, + 2.00443, + 2.03198, + 2.01503, + 2.03542, + 2.00337, + 2.02797, + 2.02077, + 2.04468, + 2.02087, + 2.03417, + 2.02033, + 1.99726, + 2.0323, + 2.02571, + 2.00141, + 2.00281, + 2.02224, + 2.01187, + 2.01136, + 1.9966, + 2.02486, + 2.0454, + 1.99753, + 2.03451, + 2.00934, + 1.99168, + 2.02524, + 1.99821, + 2.00111, + 2.03213, + 2.02918, + 2.00051, + 2.00875, + 2.01081, + 2.02113, + 1.99404, + 2.01046, + 2.01033, + 2.01276, + 2.0307, + 2.0092, + 2.00691, + 2.01202, + 2.04273, + 2.00016, + 2.01178, + 2.03478, + 2.02252, + 2.03838, + 1.99518, + 2.02079, + 2.04536, + 1.98687, + 2.02205, + 2.00979, + 2.04894, + 2.01404, + 2.03524, + 2.00443, + 2.02494, + 2.04453, + 2.00302, + 2.04026, + 2.03446, + 2.02769, + 2.01116, + 2.03618, + 2.061, + 2.02197, + 2.02747, + 2.03101, + 2.00854, + 2.02438, + 2.05939, + 2.02841, + 2.02124, + 2.00556, + 1.99604, + 2.02265, + 2.03088, + 2.00321, + 2.03285, + 2.01809, + 1.99459, + 2.02022, + 2.0229, + 2.01434, + 2.01916, + 2.02617, + 2.02603, + 2.01054, + 2.03832, + 1.98517, + 1.99417, + 2.01887, + 2.01682, + 2.02548, + 2.00015, + 2.03368, + 2.00086, + 2.01037, + 2.01429, + 2.00769, + 2.01118, + 2.00724, + 1.99551, + 2.01562, + 2.01609, + 2.00438, + 2.00593, + 2.02104, + 1.99666, + 2.01457, + 2.02156, + 1.9999, + 2.01153, + 2.00066, + 2.01639, + 2.02296, + 2.03506, + 2.00573, + 2.02935, + 2.04206, + 1.9967, + 2.02594, + 2.01435, + 2.0098, + 1.99997, + 2.01668, + 2.01697, + 2.01821, + 2.01434, + 2.01171, + 2.0176, + 2.00208, + 1.99654, + 2.00702, + 2.04028, + 2.01667, + 2.0269, + 2.01935, + 2.00899, + 2.01318, + 2.00988, + 2.0243, + 2.02081, + 2.00014, + 2.00777, + 2.03004, + 2.03963, + 2.03199, + 2.01695, + 1.99405, + 2.02884, + 2.02228, + 2.0097, + 2.02368, + 2.00031, + 1.97936, + 2.03661, + 1.99792, + 2.01396, + 2.00069, + 2.00372, + 2.01857, + 1.99959, + 2.00549, + 2.00833, + 2.00331, + 2.01386, + 2.01692, + 2.01799, + 2.0099, + 2.01079, + 2.03109, + 2.01696, + 2.01297, + 2.02409, + 2.02104, + 2.00718, + 2.01694, + 2.03406, + 2.01178, + 2.02006, + 1.99202, + 2.03438, + 2.01452, + 2.01791, + 2.00299, + 2.02679, + 2.00163, + 1.99945, + 2.00887, + 2.00057, + 2.00117, + 2.01481, + 2.0096, + 2.01508, + 2.00965, + 2.0271, + 2.00588, + 2.01586, + 2.0164, + 1.9802, + 2.01347, + 2.00002, + 2.00323, + 2.00534, + 2.01073, + 2.02406, + 2.02117, + 2.03012, + 2.00444, + 2.02137, + 1.99835, + 2.0141, + 1.98976, + 2.00178, + 2.02313, + 1.99839, + 2.03356, + 2.00942, + 2.02542, + 2.02327, + 1.99888, + 2.0115, + 1.99114, + 2.00245, + 1.99929, + 2.0199, + 2.03375, + 2.00886, + 2.02669, + 2.00426, + 2.02167, + 2.01747, + 2.01655, + 2.02242, + 2.02559, + 2.03004, + 2.02225, + 2.00754, + 1.97787, + 2.01462, + 1.99438, + 2.00506, + 2.02177, + 2.02731, + 1.9834, + 1.99755, + 1.99039, + 1.99425, + 2.01127, + 1.99564, + 2.00543, + 2.00145, + 2.0029, + 2.02316, + 2.01676, + 2.02277, + 2.01266, + 2.02716, + 1.99984, + 2.01757, + 2.00437, + 2.02128, + 2.0105, + 1.98912, + 2.00272, + 2.00987, + 2.01566, + 2.00122, + 1.98888, + 2.02972, + 2.02648, + 2.00617, + 2.0047, + 2.00636, + 2.02052, + 1.97765, + 1.9983, + 2.01733, + 2.01399, + 1.98946, + 2.05508, + 1.98109, + 1.98817, + 1.98658, + 1.99598, + 2.02788, + 1.99796, + 1.99547, + 2.02652, + 1.98941, + 1.99852, + 1.99472, + 2.00705, + 1.98575, + 1.99383, + 2.03304, + 1.99509, + 1.98603, + 2.00891, + 1.99476, + 2.00099, + 2.00052, + 2.01095, + 1.98485, + 2.02779, + 2.01766, + 2.00527, + 2.00705, + 1.99733, + 1.99805, + 1.99989, + 2.03851, + 2.00999, + 2.00448, + 2.0579, + 2.02868, + 2.02933, + 2.01409, + 2.00733, + 1.99399, + 1.98921, + 2.02756, + 1.98632, + 1.99522, + 1.98417, + 2.03794, + 1.98576, + 2.00464, + 2.02554, + 1.99239, + 2.00178, + 2.02655, + 2.00645, + 1.99684, + 2.01606, + 2.01443, + 1.9893, + 1.99015, + 1.99984, + 1.99745, + 2.0214, + 2.00721, + 1.99406, + 2.00279, + 2.02279, + 2.01922, + 2.01888, + 1.99817, + 2.00661, + 2.00941, + 2.00641, + 2.02468, + 1.99389, + 2.02113, + 1.99036, + 1.99003, + 2.01775, + 1.97272, + 2.01412, + 2.01143, + 2.00612, + 2.0146, + 2.00421, + 1.97847, + 2.01189, + 2.00629, + 1.98394, + 1.98192, + 1.98684, + 2.02731, + 2.00926, + 1.98187, + 2.00506, + 1.99795, + 2.00851, + 1.98334, + 1.98238, + 2.04913, + 2.01102, + 2.02372, + 2.02041, + 2.01756, + 1.99475, + 1.99402, + 1.96987, + 2.00352, + 1.98591, + 2.01374, + 2.00922, + 2.04849, + 1.99265, + 2.02093, + 2.0265, + 2.01523, + 1.98564, + 2.00247, + 1.98999, + 1.98939, + 2.01501, + 1.9914, + 2.00423, + 2.00071, + 2.02579, + 1.99256, + 1.99939, + 1.98541, + 1.99062, + 1.99484, + 2.00761, + 1.98857, + 2.0126, + 2.02232, + 2.01144, + 1.99891, + 2.00123, + 1.98839, + 2.00482, + 2.01331, + 1.9949, + 2.01185, + 1.99291, + 1.987, + 1.99669, + 2.01233, + 1.995, + 1.99357, + 1.99618, + 2.00486, + 2.00775, + 2.01924, + 2.00946, + 1.99399, + 2.00289, + 1.99571, + 1.98544, + 1.98196, + 2.01932, + 2.00375, + 2.00328, + 2.01648, + 2.00601, + 2.00308, + 1.98958, + 1.98415, + 2.02451, + 1.97622, + 1.99278, + 2.00709, + 1.9868, + 1.99317, + 2.0123, + 1.97666, + 1.97333, + 1.98052, + 1.98892, + 1.98048, + 2.02524, + 2.01807, + 1.97017, + 1.99807, + 1.9883, + 1.99095, + 2.00642, + 2.00431, + 2.01061, + 2.0326, + 2.00601, + 1.99722, + 1.99716, + 2.0085, + 2.00989, + 2.0007, + 2.00165, + 2.0141, + 1.99425, + 2.01475, + 1.9979, + 1.9876, + 2.02655, + 1.98569, + 1.98635, + 1.97076, + 1.98299, + 1.99767, + 2.0068, + 2.00752, + 2.01987, + 2.00339, + 2.01815, + 1.9816, + 1.99435, + 2.01083, + 2.01796, + 2.01531, + 2.03965, + 2.00477, + 2.01696, + 1.99056, + 1.98327, + 1.97754, + 1.99461, + 2.00059, + 2.00292, + 2.00937, + 2.02811, + 1.99617, + 1.99303, + 1.98569, + 2.00092, + 2.00718, + 2.00535, + 2.004, + 2.00416, + 2.00602, + 1.99007, + 1.98861, + 2.01652, + 1.99676, + 1.99282, + 2.01531, + 2.01286, + 2.00251, + 1.9917, + 1.98763, + 1.99212, + 2.00956, + 1.99525, + 2.01498, + 1.99689, + 2.01323, + 1.99353, + 2.00582, + 1.9922, + 2.00139, + 1.99641, + 1.99755, + 2.00076, + 2.00369, + 2.00498, + 2.00312, + 1.98471, + 2.0274, + 2.00147, + 1.9983, + 1.98119, + 2.01039, + 2.00926, + 2.00267, + 2.00749, + 2.00973, + 1.99064, + 1.98996, + 2.02164, + 1.9959, + 1.98124, + 2.00078, + 1.97757, + 1.98484, + 2.03268, + 1.99141, + 2.00327, + 1.98188, + 1.98364, + 2.01089, + 1.9924, + 2.00753, + 1.98206, + 1.98813, + 2.00954, + 1.97593, + 1.9745, + 2.01673, + 1.98959, + 2.02987, + 1.99085, + 2.02622, + 1.99347, + 2.00147, + 1.9956, + 1.99497, + 2.00223, + 2.00453, + 1.98743, + 1.98802, + 2.00409, + 2.00746, + 2.00977, + 2.00103, + 1.988, + 2.01477, + 1.99461, + 1.97404, + 1.98651, + 1.99028, + 1.99109, + 1.96326, + 1.99836, + 2.01111, + 2.01581, + 1.99938, + 1.98806, + 2.00891, + 1.99398, + 1.97624, + 1.99773, + 2.00823, + 1.99673, + 2.00302, + 1.99769, + 2.00555, + 2.03036, + 1.98132, + 1.99229, + 1.99362, + 2.0112, + 1.98501, + 1.9797, + 2.02853, + 1.98163, + 1.96786, + 2.0283, + 1.99061, + 1.99207, + 1.99668, + 1.9965, + 1.99253, + 1.98392, + 2.01956, + 2.01446, + 1.97614, + 1.98919, + 2.00085, + 1.97105, + 1.98078, + 2.00407, + 1.99237, + 1.98181, + 1.99109, + 1.97399, + 1.98097, + 1.98522, + 2.01025, + 2.01331, + 1.9859, + 1.99829, + 2.01144, + 2.00631, + 1.98287, + 1.99957, + 1.98278, + 1.9945, + 1.99219, + 2.00339, + 2.02496, + 1.98643, + 1.98436, + 1.9627, + 2.00079, + 2.00263, + 1.99184, + 1.99782, + 1.96953, + 1.98637, + 2.01861, + 1.97249, + 2.00423, + 1.99863, + 1.9702, + 1.98323, + 2.00875, + 1.98979, + 2.00072, + 2.01774, + 1.97834, + 1.99512, + 2.01396, + 1.97102, + 1.95655, + 1.99876, + 1.97568, + 1.98228, + 2.01858, + 2.01429, + 2.00076, + 1.98709, + 1.98613, + 2.01134, + 1.9852, + 1.97227, + 1.98728, + 1.98726, + 1.99978, + 1.98708, + 2.00129, + 1.98729, + 1.99865, + 1.98798, + 1.97864, + 1.98159, + 1.97724, + 1.99481, + 1.97354, + 2.00312, + 1.96164, + 1.97868, + 1.97595, + 1.99928, + 1.99311, + 2.01131, + 1.97432, + 1.99207, + 1.98909, + 1.99246, + 1.96602, + 1.97762, + 1.99757, + 2.00961, + 1.9767, + 1.97187, + 1.96383, + 1.99208, + 1.99792, + 1.98571, + 1.98426, + 2.0025, + 1.9886, + 1.99308, + 1.99431, + 1.97669, + 1.97736, + 1.98303, + 1.98092, + 2.00043, + 1.98022, + 2.01022, + 2.01455, + 1.99816, + 1.98871, + 1.98828, + 2.00851, + 1.96608, + 1.98804, + 1.98792, + 2.00853, + 1.98868, + 2.01477, + 1.97169, + 1.99693, + 1.98185, + 1.99157, + 2.00689, + 1.98726, + 1.97279, + 1.97607, + 1.99306, + 1.95529, + 2.01146, + 1.98777, + 1.98887, + 1.99853, + 1.98238, + 1.98201, + 2.00866, + 1.98484, + 1.97555, + 1.98664, + 1.97711, + 1.97722, + 2.00163, + 1.96501, + 1.97489, + 1.95798, + 1.99451, + 2.00438, + 1.97202, + 1.96737, + 1.98471, + 1.99732, + 1.98041, + 1.98379, + 1.98053, + 1.99641, + 1.9982, + 2.01328, + 1.98576, + 2.0032, + 1.99804, + 1.98635, + 1.9723, + 2.00564, + 2.00397, + 1.98169, + 1.99382, + 1.98857, + 1.98617, + 1.99168, + 1.97545, + 2.0027, + 2.00172, + 1.97751, + 1.98791, + 1.9923, + 1.99519, + 1.98804, + 1.9836, + 1.97195, + 1.97929, + 2.00433, + 1.98983, + 1.99124, + 1.98435, + 1.98178, + 1.9847, + 1.97866, + 1.96976, + 2.00239, + 1.95769, + 1.98415, + 1.99727, + 1.97566, + 1.98747, + 1.99506, + 1.98033, + 1.99536, + 1.99391, + 1.98904, + 1.99856, + 1.97625, + 2.00373, + 1.97841, + 1.97855, + 1.98864, + 1.9855, + 2.00417, + 1.99105, + 1.98511, + 1.98772, + 1.96643, + 2.00789, + 1.99686, + 2.0118, + 1.98208, + 1.99895, + 1.97595, + 1.98534, + 1.99223, + 2.00952, + 2.01319, + 1.98188, + 1.98363, + 1.98229, + 1.98778, + 1.97717, + 1.98371, + 1.98789, + 1.96225, + 1.9968, + 1.98601, + 1.99461, + 1.98586, + 1.99986, + 1.98264, + 1.98036, + 1.969, + 1.97158, + 1.9879, + 2.00237, + 1.99451, + 1.98611, + 1.96552, + 1.99081, + 1.99038, + 1.99089, + 2.00337, + 1.96334, + 1.983, + 1.95732, + 2.00282, + 1.99067, + 1.98402, + 1.9872, + 1.9902, + 1.9943, + 1.9717, + 2.00013, + 1.98988, + 1.99439, + 2.00095, + 1.98589, + 1.9919, + 1.98123, + 1.97352, + 1.97565, + 1.99066, + 1.9955, + 1.98609, + 2.00386, + 1.97897, + 1.99454, + 1.98226, + 1.98498, + 1.96271, + 2.00686, + 2.00453, + 1.9649, + 2.00981, + 1.97186, + 1.99293, + 1.97264, + 1.99619, + 2.02632, + 1.97267, + 1.96717, + 1.98792, + 1.99683, + 1.99289, + 1.99649, + 1.97657, + 1.97365, + 1.98683, + 1.97917, + 2.00608, + 2.01071, + 2.0069, + 2.00026, + 2.0043, + 1.99967, + 1.9832, + 1.96642, + 2.00364, + 1.97538, + 1.98045, + 1.99331, + 2.00766, + 2.01853, + 1.97273, + 2.01051, + 1.99416, + 2.00261, + 2.00741, + 1.97464, + 1.97467, + 1.97655, + 1.9756, + 1.95839, + 1.99758, + 1.97169, + 2.00909, + 2.0063, + 1.98495, + 2.00171, + 1.99286, + 1.97807, + 1.98479, + 1.9771, + 1.9943, + 1.97175, + 2.00013, + 1.98967, + 1.99431, + 2.00086, + 1.98579, + 1.99182, + 1.98115, + 1.97357, + 1.97528, + 1.99092, + 1.99548, + 1.98627, + 2.00394, + 1.97918, + 1.99447, + 1.98197, + 1.98489, + 1.96278, + 2.00684, + 2.0045, + 1.96498, + 2.00965, + 1.97172, + 1.99271, + 1.97253, + 1.99606, + 2.02626, + 1.97262, + 1.96719, + 1.98802, + 1.99651, + 1.99298, + 1.99652, + 1.97639, + 1.97329, + 1.987, + 1.97916, + 2.00615, + 2.01054, + 2.0072, + 1.9998, + 2.00422, + 1.99935, + 1.9831, + 1.96587, + 2.00294, + 1.97508, + 1.98032, + 1.99288, + 2.00712, + 2.0182, + 1.97226, + 2.01042, + 1.99371, + 2.00243, + 2.00727, + 1.97448, + 1.97464, + 1.97609, + 1.97561, + 1.95871, + 1.99913, + 1.9729, + 2.00971, + 2.00666, + 1.98505, + 1.98455, + 1.99249, + 1.97757, + 1.98489, + 1.97755, + 1.99165, + 2.00795, + 1.97903, + 1.99561, + 1.99716, + 1.97597, + 1.98804, + 1.97229, + 1.98554, + 1.98359, + 1.96783, + 1.99351, + 1.99628, + 2.00636, + 1.97529, + 1.9645, + 1.9795, + 1.99802, + 1.98153, + 2.01646, + 2.00502, + 1.97651, + 1.96467, + 1.98538, + 1.97484, + 1.97258, + 1.99876, + 1.97798, + 1.95536, + 1.9648, + 1.9662, + 1.99113, + 1.97484, + 1.9693, + 1.9735, + 1.98358, + 1.98638, + 2.00481, + 1.98793, + 2.00433, + 1.98754, + 2.00651, + 1.97492, + 1.98932, + 1.96623, + 1.98071, + 1.99392, + 1.98575, + 1.98861, + 1.96117, + 2.00127, + 1.98909, + 1.98382, + 1.9622, + 2.00328, + 1.97404, + 1.97576, + 1.96676, + 1.97996, + 1.97118, + 1.98848, + 2.00312, + 1.97302, + 1.98437, + 1.96605, + 1.98589, + 1.97225, + 1.99622, + 1.9936, + 1.97503, + 1.99069, + 1.99038, + 1.9771, + 2.00708, + 1.96959, + 1.98315, + 1.99011, + 1.95911, + 1.98614, + 1.98645, + 2.00538, + 1.97181, + 1.98426, + 1.99817, + 1.9744, + 1.98926, + 1.95839, + 1.982, + 1.98206, + 1.97567, + 1.98474, + 1.9855, + 1.98157, + 1.9813, + 1.97829, + 1.98378, + 2.00878, + 1.98318, + 1.99073, + 1.99813, + 1.98265, + 1.97987, + 1.98524, + 1.99257, + 1.97869, + 1.98485, + 2.00174, + 1.98818, + 1.98683, + 1.9736, + 1.97434, + 1.99292, + 1.98882, + 1.96963, + 1.97404, + 1.98262, + 1.97464, + 1.98076, + 2.00526, + 1.9995, + 1.98502, + 1.99879, + 1.9635, + 1.97154, + 1.98464, + 1.9755, + 1.9701, + 1.97747, + 1.96825, + 1.97191, + 1.95972, + 1.97326, + 1.96545, + 1.99198, + 1.99267, + 1.97666, + 1.99272, + 1.98163, + 1.98814, + 1.97387, + 1.9937, + 1.99245, + 1.98775, + 1.97258, + 2.00928, + 1.98538, + 1.99269, + 1.95022, + 1.9893, + 1.97631, + 1.99963, + 1.95413, + 1.96557, + 1.99451, + 1.9618, + 1.98107, + 1.98544, + 1.97545, + 1.96815, + 2.00798, + 1.98341, + 1.96386, + 1.96991, + 1.9771, + 1.96925, + 1.98404, + 1.98587, + 1.96237, + 1.95556, + 2.01202, + 1.98558, + 1.96215, + 1.97795, + 1.96097, + 1.96226, + 1.97746, + 1.96483, + 2.0027, + 1.98065, + 1.96986, + 1.98146, + 1.95507, + 1.96814, + 1.95787, + 1.9922, + 2.00465, + 1.99461, + 1.96622, + 1.97541, + 1.9582, + 1.96199, + 1.95646, + 1.98649, + 1.97577, + 1.96806, + 1.99681, + 1.98368, + 1.97493, + 1.96493, + 1.98542, + 2.0028, + 1.98204, + 1.97053, + 1.97051, + 1.96748, + 1.95835, + 1.971, + 1.95626, + 1.98603, + 1.97422, + 2.00138, + 1.95297, + 1.97297, + 1.98101, + 1.99482, + 1.99712, + 1.96936, + 1.99282, + 1.96858, + 1.98167, + 1.97467, + 1.96191, + 1.99738, + 1.95675, + 1.9749, + 1.95954, + 1.98859, + 1.99459, + 1.99903, + 1.96739, + 1.98151, + 1.9794, + 1.97253, + 1.99918, + 1.97579, + 1.97503, + 1.96025, + 1.96986, + 1.96948, + 1.98609, + 1.97586, + 1.97815, + 1.99705, + 1.97278, + 1.95803, + 1.98839, + 1.97515, + 1.97986, + 1.98236, + 1.96523, + 1.94251, + 1.99873, + 1.98118, + 1.97671, + 1.98255, + 1.96328, + 1.98177, + 1.98727, + 2.01537, + 1.9762, + 1.98885, + 1.98333, + 1.98675, + 1.97591, + 1.98025, + 1.96073, + 1.96238, + 1.98245, + 1.9725, + 2.00569, + 1.98257, + 1.97134, + 1.96917, + 1.99463, + 1.99105, + 1.97196, + 1.98023, + 1.9641, + 1.96138, + 1.98619, + 1.98262, + 1.99244, + 1.99036, + 1.99788, + 1.98222, + 1.98048, + 1.99969, + 1.9594, + 1.9809, + 1.9755, + 1.97206, + 1.99469, + 1.98807, + 1.99204, + 1.99401, + 1.95878, + 1.99493, + 1.96649, + 1.97731, + 1.9754, + 1.9754, + 1.97617, + 1.9744, + 1.98489, + 1.96886, + 2.00684, + 1.99592, + 1.9705, + 1.93113, + 1.9588, + 1.98189, + 1.96977, + 1.97269, + 1.98538, + 2.01774, + 1.97998, + 2.00738, + 1.97844, + 1.9572, + 1.98586, + 1.97157, + 1.97045, + 1.97222, + 1.98839, + 1.9772, + 1.95744, + 1.98938, + 1.97459, + 1.99735, + 1.95376, + 1.961, + 1.99066, + 1.95808, + 1.96907, + 1.98435, + 1.9809, + 1.97695, + 2.00311, + 1.9777, + 1.96266, + 1.97628, + 1.97564, + 1.99391, + 1.9793, + 1.94884, + 1.95541, + 1.97429, + 1.9392, + 1.99286, + 2.00065, + 1.97458, + 1.97711, + 1.9856, + 1.99472, + 1.9714, + 1.97708, + 1.97306, + 1.97078, + 1.99141, + 1.96657, + 1.97138, + 1.97852, + 1.96772, + 1.98967, + 2.00586, + 1.98355, + 1.98048, + 1.99165, + 1.99138, + 1.99213, + 1.97628, + 1.96309, + 2.0017, + 1.9599, + 1.95549, + 1.99777, + 1.96126, + 1.99871, + 1.97656, + 1.98567, + 1.9758, + 1.99049, + 1.98399, + 1.9758, + 1.97488, + 1.97796, + 1.97353, + 1.96161, + 1.96738, + 1.98444, + 1.98228, + 1.94666, + 1.97055, + 1.97462, + 1.99476, + 1.97612, + 2.00026, + 1.97502, + 1.95661, + 1.96336, + 1.98773, + 1.9851, + 1.97208, + 1.98689, + 1.97892, + 1.97377, + 1.97999, + 2.01994, + 1.98484, + 1.97806, + 1.98171, + 1.98249, + 1.97804, + 1.98512, + 1.99712, + 1.95851, + 1.97592, + 1.98949, + 1.9661, + 1.99311, + 1.98943, + 2.00002, + 1.98275, + 1.98982, + 1.96812, + 1.9881, + 1.96642, + 1.97642, + 1.96986, + 1.96485, + 1.98819, + 1.95736, + 1.98679, + 1.97612, + 1.9838, + 1.9883, + 1.97728 ] }, "mem-allocated-bytes": { "start_step": 0, - "end_step": 502, + "end_step": 25809, "step_interval": 5, "values": [ 17448312832.0, @@ -212,12 +5273,5073 @@ 17448286208.0, 17448269824.0, 17448267776.0, - 17448247296.0 + 17448247296.0, + 17447884800.0, + 17447876608.0, + 17447878656.0, + 17447907328.0, + 17447874560.0, + 17447862272.0, + 17447847936.0, + 17447882752.0, + 17447886848.0, + 17447886848.0, + 17447870464.0, + 17447862272.0, + 17447862272.0, + 17447835648.0, + 17447903232.0, + 17447911424.0, + 17447843840.0, + 17447915520.0, + 17447847936.0, + 17447886848.0, + 17447897088.0, + 17447876608.0, + 17447890944.0, + 17447874560.0, + 17447892992.0, + 17447895040.0, + 17447860224.0, + 17447899136.0, + 17447892992.0, + 17447845888.0, + 17448572928.0, + 17447882752.0, + 17447907328.0, + 17447892992.0, + 17447866368.0, + 17447903232.0, + 17447886848.0, + 17447903232.0, + 17447864320.0, + 17447866368.0, + 17447880704.0, + 17447864320.0, + 17447856128.0, + 17447874560.0, + 17447854080.0, + 17447878656.0, + 17447892992.0, + 17447874560.0, + 17447892992.0, + 17447886848.0, + 17447876608.0, + 17447870464.0, + 17447878656.0, + 17447897088.0, + 17447907328.0, + 17447890944.0, + 17447866368.0, + 17447901184.0, + 17447886848.0, + 17447886848.0, + 17447895040.0, + 17447876608.0, + 17447854080.0, + 17447874560.0, + 17447886848.0, + 17447882752.0, + 17447890944.0, + 17447886848.0, + 17447886848.0, + 17447890944.0, + 17447868416.0, + 17447888896.0, + 17447895040.0, + 17447890944.0, + 17447870464.0, + 17447862272.0, + 17447876608.0, + 17447870464.0, + 17447870464.0, + 17447882752.0, + 17447886848.0, + 17447878656.0, + 17447876608.0, + 17447874560.0, + 17447874560.0, + 17448663040.0, + 17447874560.0, + 17447886848.0, + 17447872512.0, + 17447899136.0, + 17447907328.0, + 17447868416.0, + 17447886848.0, + 17447874560.0, + 17447858176.0, + 17447880704.0, + 17447895040.0, + 17447870464.0, + 17447868416.0, + 17447884800.0, + 17447874560.0, + 17447882752.0, + 17447890944.0, + 17447862272.0, + 17447890944.0, + 17447901184.0, + 17448677376.0, + 17447895040.0, + 17447866368.0, + 17447890944.0, + 17447870464.0, + 17447895040.0, + 17447874560.0, + 17447854080.0, + 17447870464.0, + 17447890944.0, + 17447892992.0, + 17447940096.0, + 17447882752.0, + 17447874560.0, + 17447874560.0, + 17447880704.0, + 17447868416.0, + 17447888896.0, + 17447890944.0, + 17447890944.0, + 17447862272.0, + 17447882752.0, + 17447876608.0, + 17448890368.0, + 17448923136.0, + 17448880128.0, + 17448890368.0, + 17448894464.0, + 17448882176.0, + 17448914944.0, + 17448886272.0, + 17448892416.0, + 17448890368.0, + 17448878080.0, + 17448871936.0, + 17448890368.0, + 17448906752.0, + 17448863744.0, + 17448886272.0, + 17448894464.0, + 17448884224.0, + 17448869888.0, + 17448898560.0, + 17448890368.0, + 17448890368.0, + 17448892416.0, + 17448906752.0, + 17448871936.0, + 17448853504.0, + 17448892416.0, + 17449691136.0, + 17448900608.0, + 17448970240.0, + 17448902656.0, + 17448876032.0, + 17448873984.0, + 17448869888.0, + 17448861696.0, + 17448906752.0, + 17448904704.0, + 17448904704.0, + 17448894464.0, + 17448853504.0, + 17448845312.0, + 17448865792.0, + 17448869888.0, + 17448896512.0, + 17448886272.0, + 17448882176.0, + 17448869888.0, + 17448882176.0, + 17448894464.0, + 17448888320.0, + 17448884224.0, + 17448890368.0, + 17448902656.0, + 17448896512.0, + 17448890368.0, + 17448880128.0, + 17448898560.0, + 17448878080.0, + 17448880128.0, + 17448896512.0, + 17448888320.0, + 17448900608.0, + 17448884224.0, + 17448892416.0, + 17448906752.0, + 17448888320.0, + 17448890368.0, + 17448890368.0, + 17448873984.0, + 17448898560.0, + 17448921088.0, + 17448910848.0, + 17448898560.0, + 17448867840.0, + 17448884224.0, + 17448886272.0, + 17448894464.0, + 17448906752.0, + 17448898560.0, + 17448890368.0, + 17448886272.0, + 17448896512.0, + 17448902656.0, + 17448888320.0, + 17448888320.0, + 17448878080.0, + 17448890368.0, + 17448902656.0, + 17448890368.0, + 17448921088.0, + 17448873984.0, + 17448894464.0, + 17448878080.0, + 17448904704.0, + 17448849408.0, + 17448890368.0, + 17448890368.0, + 17448894464.0, + 17448890368.0, + 17448882176.0, + 17448900608.0, + 17448882176.0, + 17448878080.0, + 17448898560.0, + 17448902656.0, + 17448894464.0, + 17448900608.0, + 17448890368.0, + 17448882176.0, + 17448902656.0, + 17448867840.0, + 17448906752.0, + 17448886272.0, + 17447884800.0, + 17447849984.0, + 17447870464.0, + 17447923712.0, + 17447845888.0, + 17447735296.0, + 17447874560.0, + 17447929856.0, + 17447868416.0, + 17447895040.0, + 17447890944.0, + 17447890944.0, + 17447880704.0, + 17447901184.0, + 17447888896.0, + 17447890944.0, + 17447884800.0, + 17447866368.0, + 17447899136.0, + 17448316928.0, + 17447872512.0, + 17447880704.0, + 17447897088.0, + 17447903232.0, + 17447880704.0, + 17447862272.0, + 17447884800.0, + 17447895040.0, + 17447888896.0, + 17447890944.0, + 17447876608.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447870464.0, + 17447872512.0, + 17447942144.0, + 17447886848.0, + 17447868416.0, + 17447874560.0, + 17447868416.0, + 17447878656.0, + 17447886848.0, + 17447880704.0, + 17447862272.0, + 17447888896.0, + 17447864320.0, + 17447890944.0, + 17447880704.0, + 17447892992.0, + 17447888896.0, + 17447874560.0, + 17447874560.0, + 17447870464.0, + 17447897088.0, + 17447870464.0, + 17447878656.0, + 17447882752.0, + 17447856128.0, + 17447858176.0, + 17447899136.0, + 17447897088.0, + 17447858176.0, + 17447862272.0, + 17447864320.0, + 17447872512.0, + 17447868416.0, + 17447895040.0, + 17447880704.0, + 17447886848.0, + 17447927808.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447890944.0, + 17447872512.0, + 17447882752.0, + 17447874560.0, + 17447888896.0, + 17447874560.0, + 17447874560.0, + 17447886848.0, + 17447870464.0, + 17447884800.0, + 17447880704.0, + 17447888896.0, + 17447862272.0, + 17447895040.0, + 17447882752.0, + 17448146944.0, + 17447880704.0, + 17447872512.0, + 17447888896.0, + 17447888896.0, + 17447886848.0, + 17447890944.0, + 17447880704.0, + 17447903232.0, + 17447890944.0, + 17447874560.0, + 17447899136.0, + 17447874560.0, + 17447868416.0, + 17447901184.0, + 17447876608.0, + 17447866368.0, + 17447880704.0, + 17447874560.0, + 17447866368.0, + 17447903232.0, + 17447882752.0, + 17447862272.0, + 17447860224.0, + 17447860224.0, + 17447882752.0, + 17447895040.0, + 17447866368.0, + 17447878656.0, + 17447890944.0, + 17447870464.0, + 17447870464.0, + 17447890944.0, + 17447862272.0, + 17447884800.0, + 17447852032.0, + 17447874560.0, + 17447882752.0, + 17447895040.0, + 17447915520.0, + 17447903232.0, + 17447890944.0, + 17447862272.0, + 17447882752.0, + 17447886848.0, + 17447878656.0, + 17447895040.0, + 17447890944.0, + 17447874560.0, + 17447872512.0, + 17447874560.0, + 17447886848.0, + 17447882752.0, + 17447792640.0, + 17447829504.0, + 17447892992.0, + 17447876608.0, + 17447870464.0, + 17447882752.0, + 17447876608.0, + 17447899136.0, + 17447858176.0, + 17447886848.0, + 17447886848.0, + 17447864320.0, + 17447862272.0, + 17447860224.0, + 17447852032.0, + 17447899136.0, + 17447845888.0, + 17447886848.0, + 17447888896.0, + 17447886848.0, + 17448161280.0, + 17447890944.0, + 17447878656.0, + 17447882752.0, + 17447872512.0, + 17447886848.0, + 17447872512.0, + 17447886848.0, + 17447886848.0, + 17447870464.0, + 17448452096.0, + 17447876608.0, + 17447892992.0, + 17447882752.0, + 17447854080.0, + 17447882752.0, + 17447888896.0, + 17447880704.0, + 17447890944.0, + 17447886848.0, + 17447872512.0, + 17447882752.0, + 17447884800.0, + 17447874560.0, + 17447886848.0, + 17447882752.0, + 17447874560.0, + 17447888896.0, + 17447895040.0, + 17447870464.0, + 17447919616.0, + 17447888896.0, + 17447880704.0, + 17447882752.0, + 17447854080.0, + 17447899136.0, + 17447882752.0, + 17447858176.0, + 17447874560.0, + 17447886848.0, + 17447882752.0, + 17447870464.0, + 17447886848.0, + 17447862272.0, + 17447876608.0, + 17447876608.0, + 17447890944.0, + 17447884800.0, + 17447878656.0, + 17447905280.0, + 17447864320.0, + 17447886848.0, + 17447919616.0, + 17447888896.0, + 17447858176.0, + 17447868416.0, + 17447876608.0, + 17448615936.0, + 17447897088.0, + 17447872512.0, + 17447884800.0, + 17447868416.0, + 17447903232.0, + 17447880704.0, + 17447882752.0, + 17447872512.0, + 17447864320.0, + 17447880704.0, + 17447882752.0, + 17447868416.0, + 17447878656.0, + 17447888896.0, + 17447890944.0, + 17447890944.0, + 17447882752.0, + 17447901184.0, + 17447892992.0, + 17447890944.0, + 17447878656.0, + 17447872512.0, + 17447878656.0, + 17447884800.0, + 17447884800.0, + 17447882752.0, + 17447886848.0, + 17447882752.0, + 17447866368.0, + 17447882752.0, + 17447882752.0, + 17447874560.0, + 17447882752.0, + 17447872512.0, + 17447886848.0, + 17447872512.0, + 17447911424.0, + 17447878656.0, + 17447849984.0, + 17447911424.0, + 17447854080.0, + 17447876608.0, + 17447884800.0, + 17447876608.0, + 17447880704.0, + 17447880704.0, + 17447876608.0, + 17447888896.0, + 17447864320.0, + 17447870464.0, + 17447878656.0, + 17447862272.0, + 17447876608.0, + 17447886848.0, + 17447874560.0, + 17447880704.0, + 17447878656.0, + 17447874560.0, + 17447866368.0, + 17447872512.0, + 17447878656.0, + 17447899136.0, + 17447878656.0, + 17447870464.0, + 17447862272.0, + 17447890944.0, + 17447870464.0, + 17447866368.0, + 17448325120.0, + 17447874560.0, + 17447890944.0, + 17447888896.0, + 17447892992.0, + 17447886848.0, + 17447890944.0, + 17447895040.0, + 17447895040.0, + 17447864320.0, + 17447895040.0, + 17447864320.0, + 17447874560.0, + 17447878656.0, + 17447878656.0, + 17447874560.0, + 17447862272.0, + 17447880704.0, + 17447868416.0, + 17447882752.0, + 17447870464.0, + 17447895040.0, + 17447866368.0, + 17447888896.0, + 17447872512.0, + 17447886848.0, + 17447878656.0, + 17447862272.0, + 17447856128.0, + 17447880704.0, + 17447880704.0, + 17447886848.0, + 17447862272.0, + 17447876608.0, + 17447882752.0, + 17447870464.0, + 17447882752.0, + 17447880704.0, + 17447874560.0, + 17447868416.0, + 17447882752.0, + 17447864320.0, + 17447860224.0, + 17447882752.0, + 17447874560.0, + 17447858176.0, + 17447888896.0, + 17447872512.0, + 17447886848.0, + 17447845888.0, + 17448595456.0, + 17448609792.0, + 17448605696.0, + 17448591360.0, + 17448609792.0, + 17448603648.0, + 17448595456.0, + 17448615936.0, + 17448593408.0, + 17448611840.0, + 17448617984.0, + 17448599552.0, + 17448601600.0, + 17448622080.0, + 17448607744.0, + 17448611840.0, + 17448611840.0, + 17448611840.0, + 17448620032.0, + 17448599552.0, + 17448601600.0, + 17448603648.0, + 17448628224.0, + 17448611840.0, + 17448607744.0, + 17448611840.0, + 17448609792.0, + 17448607744.0, + 17448605696.0, + 17448574976.0, + 17448615936.0, + 17448607744.0, + 17448617984.0, + 17448628224.0, + 17448611840.0, + 17448615936.0, + 17448609792.0, + 17448587264.0, + 17448603648.0, + 17448624128.0, + 17448611840.0, + 17448615936.0, + 17448617984.0, + 17448620032.0, + 17448601600.0, + 17448624128.0, + 17448595456.0, + 17448611840.0, + 17448620032.0, + 17448605696.0, + 17448581120.0, + 17448605696.0, + 17448591360.0, + 17448607744.0, + 17449242624.0, + 17448583168.0, + 17448615936.0, + 17448607744.0, + 17448617984.0, + 17448589312.0, + 17448591360.0, + 17448603648.0, + 17448624128.0, + 17448609792.0, + 17448654848.0, + 17448609792.0, + 17448601600.0, + 17448615936.0, + 17448607744.0, + 17448622080.0, + 17448630272.0, + 17448615936.0, + 17448620032.0, + 17448562688.0, + 17448544256.0, + 17448611840.0, + 17448603648.0, + 17448611840.0, + 17448609792.0, + 17448617984.0, + 17448630272.0, + 17448605696.0, + 17448599552.0, + 17448615936.0, + 17448615936.0, + 17448626176.0, + 17448615936.0, + 17448599552.0, + 17448611840.0, + 17448628224.0, + 17448603648.0, + 17448624128.0, + 17448611840.0, + 17448597504.0, + 17448607744.0, + 17448603648.0, + 17448613888.0, + 17448591360.0, + 17448615936.0, + 17448603648.0, + 17448624128.0, + 17448620032.0, + 17448617984.0, + 17448595456.0, + 17448601600.0, + 17448605696.0, + 17448613888.0, + 17448599552.0, + 17448609792.0, + 17448624128.0, + 17448622080.0, + 17448601600.0, + 17448605696.0, + 17447880704.0, + 17447874560.0, + 17447890944.0, + 17447890944.0, + 17447849984.0, + 17447856128.0, + 17447903232.0, + 17447874560.0, + 17447884800.0, + 17447874560.0, + 17447868416.0, + 17447868416.0, + 17447878656.0, + 17447872512.0, + 17447866368.0, + 17447858176.0, + 17447874560.0, + 17447884800.0, + 17447882752.0, + 17447890944.0, + 17447876608.0, + 17447870464.0, + 17447884800.0, + 17447886848.0, + 17447870464.0, + 17447890944.0, + 17447895040.0, + 17447886848.0, + 17447878656.0, + 17447862272.0, + 17447890944.0, + 17447874560.0, + 17447876608.0, + 17447880704.0, + 17447890944.0, + 17447895040.0, + 17447874560.0, + 17447852032.0, + 17447892992.0, + 17447878656.0, + 17447874560.0, + 17447878656.0, + 17447866368.0, + 17447870464.0, + 17447892992.0, + 17447874560.0, + 17447866368.0, + 17447870464.0, + 17447872512.0, + 17447890944.0, + 17447880704.0, + 17447870464.0, + 17447882752.0, + 17447872512.0, + 17447880704.0, + 17447874560.0, + 17447888896.0, + 17447884800.0, + 17447874560.0, + 17447866368.0, + 17447886848.0, + 17447888896.0, + 17447872512.0, + 17447878656.0, + 17447878656.0, + 17447880704.0, + 17447862272.0, + 17447866368.0, + 17447878656.0, + 17447858176.0, + 17447890944.0, + 17447876608.0, + 17447866368.0, + 17447874560.0, + 17447892992.0, + 17447864320.0, + 17447876608.0, + 17447888896.0, + 17447882752.0, + 17447886848.0, + 17447872512.0, + 17447991296.0, + 17447878656.0, + 17447890944.0, + 17447882752.0, + 17447890944.0, + 17447880704.0, + 17447880704.0, + 17447874560.0, + 17447876608.0, + 17447870464.0, + 17447876608.0, + 17447890944.0, + 17447874560.0, + 17447874560.0, + 17447870464.0, + 17447882752.0, + 17447874560.0, + 17447890944.0, + 17447874560.0, + 17447878656.0, + 17447878656.0, + 17447874560.0, + 17447862272.0, + 17447886848.0, + 17447870464.0, + 17447880704.0, + 17447862272.0, + 17447874560.0, + 17447868416.0, + 17447880704.0, + 17447878656.0, + 17447882752.0, + 17447874560.0, + 17447888896.0, + 17447895040.0, + 17447872512.0, + 17447872512.0, + 17447895040.0, + 17447868416.0, + 17447878656.0, + 17447872512.0, + 17447886848.0, + 17447880704.0, + 17447890944.0, + 17447872512.0, + 17447874560.0, + 17447895040.0, + 17447858176.0, + 17447899136.0, + 17448153088.0, + 17447874560.0, + 17447886848.0, + 17447866368.0, + 17447895040.0, + 17447872512.0, + 17447882752.0, + 17447870464.0, + 17447882752.0, + 17447868416.0, + 17447886848.0, + 17447878656.0, + 17447870464.0, + 17447870464.0, + 17447876608.0, + 17447870464.0, + 17448894464.0, + 17448910848.0, + 17448882176.0, + 17448910848.0, + 17448894464.0, + 17448886272.0, + 17448902656.0, + 17448876032.0, + 17448910848.0, + 17448890368.0, + 17448906752.0, + 17448884224.0, + 17448902656.0, + 17448886272.0, + 17448900608.0, + 17448894464.0, + 17448882176.0, + 17448890368.0, + 17448892416.0, + 17448900608.0, + 17448894464.0, + 17448902656.0, + 17448892416.0, + 17448910848.0, + 17448894464.0, + 17448882176.0, + 17448890368.0, + 17448890368.0, + 17449883648.0, + 17448886272.0, + 17448908800.0, + 17448900608.0, + 17448898560.0, + 17448894464.0, + 17448894464.0, + 17448894464.0, + 17448882176.0, + 17448894464.0, + 17448910848.0, + 17448888320.0, + 17448898560.0, + 17448896512.0, + 17448896512.0, + 17448910848.0, + 17448886272.0, + 17448902656.0, + 17448906752.0, + 17448884224.0, + 17448906752.0, + 17448892416.0, + 17448894464.0, + 17448890368.0, + 17448904704.0, + 17448890368.0, + 17448894464.0, + 17448890368.0, + 17448900608.0, + 17448896512.0, + 17448894464.0, + 17448892416.0, + 17448890368.0, + 17448898560.0, + 17448878080.0, + 17448890368.0, + 17448892416.0, + 17448898560.0, + 17448873984.0, + 17448894464.0, + 17448886272.0, + 17448878080.0, + 17448894464.0, + 17448906752.0, + 17448888320.0, + 17448871936.0, + 17448904704.0, + 17448894464.0, + 17448898560.0, + 17448898560.0, + 17448892416.0, + 17448906752.0, + 17448896512.0, + 17448902656.0, + 17448894464.0, + 17449725952.0, + 17448894464.0, + 17448892416.0, + 17448896512.0, + 17448910848.0, + 17448888320.0, + 17448884224.0, + 17448878080.0, + 17448898560.0, + 17448884224.0, + 17448890368.0, + 17448898560.0, + 17448900608.0, + 17448882176.0, + 17448892416.0, + 17448904704.0, + 17448892416.0, + 17448894464.0, + 17448892416.0, + 17448900608.0, + 17448902656.0, + 17448910848.0, + 17448880128.0, + 17448906752.0, + 17448890368.0, + 17448906752.0, + 17448896512.0, + 17448890368.0, + 17448902656.0, + 17448900608.0, + 17448906752.0, + 17447888896.0, + 17447872512.0, + 17447888896.0, + 17447880704.0, + 17447878656.0, + 17447878656.0, + 17447888896.0, + 17447870464.0, + 17447878656.0, + 17447872512.0, + 17447878656.0, + 17447866368.0, + 17447880704.0, + 17447880704.0, + 17447880704.0, + 17447876608.0, + 17447868416.0, + 17447878656.0, + 17447895040.0, + 17447872512.0, + 17447888896.0, + 17447866368.0, + 17447878656.0, + 17447882752.0, + 17447884800.0, + 17447874560.0, + 17447862272.0, + 17447874560.0, + 17447880704.0, + 17447862272.0, + 17447878656.0, + 17447890944.0, + 17447874560.0, + 17447876608.0, + 17447890944.0, + 17447886848.0, + 17447884800.0, + 17447876608.0, + 17447870464.0, + 17447892992.0, + 17447886848.0, + 17447884800.0, + 17447866368.0, + 17447874560.0, + 17447874560.0, + 17447884800.0, + 17447892992.0, + 17447878656.0, + 17447870464.0, + 17447874560.0, + 17447882752.0, + 17447872512.0, + 17447897088.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447858176.0, + 17447874560.0, + 17447890944.0, + 17447874560.0, + 17447901184.0, + 17448857600.0, + 17447874560.0, + 17447872512.0, + 17447878656.0, + 17447911424.0, + 17447878656.0, + 17447890944.0, + 17447876608.0, + 17447874560.0, + 17447868416.0, + 17447876608.0, + 17447874560.0, + 17447862272.0, + 17447870464.0, + 17447888896.0, + 17447884800.0, + 17447886848.0, + 17447874560.0, + 17447874560.0, + 17447892992.0, + 17447878656.0, + 17447888896.0, + 17447880704.0, + 17447878656.0, + 17447880704.0, + 17447870464.0, + 17447886848.0, + 17447876608.0, + 17447884800.0, + 17447874560.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447874560.0, + 17447874560.0, + 17447872512.0, + 17447866368.0, + 17447895040.0, + 17447874560.0, + 17447876608.0, + 17447874560.0, + 17447878656.0, + 17447882752.0, + 17447884800.0, + 17447870464.0, + 17447884800.0, + 17447884800.0, + 17447892992.0, + 17447888896.0, + 17447870464.0, + 17447870464.0, + 17447880704.0, + 17447878656.0, + 17447876608.0, + 17447874560.0, + 17447864320.0, + 17447890944.0, + 17447876608.0, + 17447884800.0, + 17447872512.0, + 17447884800.0, + 17447874560.0, + 17447872512.0, + 17447878656.0, + 17447882752.0, + 17447876608.0, + 17447882752.0, + 17447878656.0, + 17447884800.0, + 17447870464.0, + 17447872512.0, + 17447892992.0, + 17447886848.0, + 17447878656.0, + 17447888896.0, + 17447870464.0, + 17447882752.0, + 17447903232.0, + 17447882752.0, + 17447886848.0, + 17447868416.0, + 17447886848.0, + 17447872512.0, + 17447888896.0, + 17447872512.0, + 17447876608.0, + 17447878656.0, + 17447888896.0, + 17447868416.0, + 17447895040.0, + 17447876608.0, + 17447870464.0, + 17447882752.0, + 17447876608.0, + 17447874560.0, + 17447868416.0, + 17447870464.0, + 17447882752.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447870464.0, + 17447874560.0, + 17447899136.0, + 17447876608.0, + 17447878656.0, + 17447876608.0, + 17447880704.0, + 17447880704.0, + 17447878656.0, + 17447878656.0, + 17447897088.0, + 17447880704.0, + 17447882752.0, + 17447874560.0, + 17447872512.0, + 17447876608.0, + 17447870464.0, + 17447886848.0, + 17447872512.0, + 17447880704.0, + 17447878656.0, + 17447882752.0, + 17447884800.0, + 17447874560.0, + 17447886848.0, + 17447874560.0, + 17447876608.0, + 17447878656.0, + 17448779776.0, + 17447890944.0, + 17447866368.0, + 17447870464.0, + 17447874560.0, + 17447987200.0, + 17447878656.0, + 17447895040.0, + 17447874560.0, + 17447886848.0, + 17447866368.0, + 17447884800.0, + 17447895040.0, + 17447884800.0, + 17447888896.0, + 17447874560.0, + 17447880704.0, + 17447868416.0, + 17447895040.0, + 17447880704.0, + 17447872512.0, + 17447852032.0, + 17447890944.0, + 17447890944.0, + 17447868416.0, + 17447892992.0, + 17447876608.0, + 17447890944.0, + 17447874560.0, + 17447882752.0, + 17447872512.0, + 17447895040.0, + 17447888896.0, + 17447874560.0, + 17447886848.0, + 17447878656.0, + 17447886848.0, + 17447870464.0, + 17447890944.0, + 17447874560.0, + 17447862272.0, + 17447880704.0, + 17447886848.0, + 17447890944.0, + 17447890944.0, + 17447880704.0, + 17447884800.0, + 17447890944.0, + 17447886848.0, + 17447862272.0, + 17447882752.0, + 17447876608.0, + 17447874560.0, + 17447880704.0, + 17447882752.0, + 17447880704.0, + 17447878656.0, + 17447895040.0, + 17447876608.0, + 17447866368.0, + 17447886848.0, + 17447882752.0, + 17447886848.0, + 17447874560.0, + 17447866368.0, + 17447886848.0, + 17447886848.0, + 17447884800.0, + 17447882752.0, + 17447882752.0, + 17447874560.0, + 17447890944.0, + 17447878656.0, + 17447897088.0, + 17447897088.0, + 17447876608.0, + 17447901184.0, + 17447890944.0, + 17447866368.0, + 17447874560.0, + 17447862272.0, + 17447890944.0, + 17447878656.0, + 17447870464.0, + 17447878656.0, + 17447876608.0, + 17447870464.0, + 17447880704.0, + 17447876608.0, + 17447888896.0, + 17447882752.0, + 17447899136.0, + 17447870464.0, + 17447876608.0, + 17447882752.0, + 17447866368.0, + 17447878656.0, + 17447868416.0, + 17447886848.0, + 17447870464.0, + 17447890944.0, + 17447880704.0, + 17447874560.0, + 17447878656.0, + 17447886848.0, + 17447876608.0, + 17447880704.0, + 17447880704.0, + 17447876608.0, + 17447880704.0, + 17447882752.0, + 17447880704.0, + 17447882752.0, + 17447897088.0, + 17447874560.0, + 17447878656.0, + 17447870464.0, + 17447880704.0, + 17447864320.0, + 17447872512.0, + 17447876608.0, + 17447878656.0, + 17447878656.0, + 17447884800.0, + 17447890944.0, + 17447870464.0, + 17447874560.0, + 17447890944.0, + 17447882752.0, + 17447868416.0, + 17447876608.0, + 17447870464.0, + 17447864320.0, + 17447870464.0, + 17447880704.0, + 17447880704.0, + 17447862272.0, + 17447892992.0, + 17447870464.0, + 17447872512.0, + 17447884800.0, + 17447878656.0, + 17447878656.0, + 17447874560.0, + 17447882752.0, + 17447874560.0, + 17447870464.0, + 17447890944.0, + 17447997440.0, + 17447997440.0, + 17448005632.0, + 17448007680.0, + 17448001536.0, + 17448013824.0, + 17448017920.0, + 17447997440.0, + 17448005632.0, + 17448019968.0, + 17447989248.0, + 17448001536.0, + 17448017920.0, + 17447985152.0, + 17448003584.0, + 17447991296.0, + 17448003584.0, + 17447997440.0, + 17448009728.0, + 17448009728.0, + 17447997440.0, + 17448001536.0, + 17448007680.0, + 17447983104.0, + 17448017920.0, + 17448001536.0, + 17448007680.0, + 17448005632.0, + 17448005632.0, + 17447999488.0, + 17448003584.0, + 17448009728.0, + 17448005632.0, + 17448009728.0, + 17448003584.0, + 17447993344.0, + 17448011776.0, + 17448001536.0, + 17448017920.0, + 17448007680.0, + 17448019968.0, + 17448009728.0, + 17447995392.0, + 17447997440.0, + 17448005632.0, + 17448052736.0, + 17448017920.0, + 17447985152.0, + 17447999488.0, + 17447997440.0, + 17448013824.0, + 17447993344.0, + 17447997440.0, + 17448017920.0, + 17447995392.0, + 17447993344.0, + 17448022016.0, + 17447997440.0, + 17448005632.0, + 17447993344.0, + 17448001536.0, + 17448009728.0, + 17448011776.0, + 17448009728.0, + 17448005632.0, + 17448005632.0, + 17448007680.0, + 17447987200.0, + 17447999488.0, + 17447993344.0, + 17448011776.0, + 17448005632.0, + 17447995392.0, + 17448001536.0, + 17447989248.0, + 17448005632.0, + 17448228864.0, + 17448007680.0, + 17447999488.0, + 17448001536.0, + 17447997440.0, + 17448007680.0, + 17447999488.0, + 17447985152.0, + 17448005632.0, + 17447995392.0, + 17448013824.0, + 17448003584.0, + 17448013824.0, + 17447995392.0, + 17447991296.0, + 17448017920.0, + 17448009728.0, + 17447989248.0, + 17448001536.0, + 17448007680.0, + 17447976960.0, + 17448009728.0, + 17448017920.0, + 17448001536.0, + 17448001536.0, + 17448005632.0, + 17448007680.0, + 17448007680.0, + 17448005632.0, + 17448005632.0, + 17448005632.0, + 17447997440.0, + 17448005632.0, + 17448009728.0, + 17448007680.0, + 17448017920.0, + 17448005632.0, + 17448009728.0, + 17448122368.0, + 17448122368.0, + 17448114176.0, + 17448110080.0, + 17448114176.0, + 17448132608.0, + 17448122368.0, + 17448112128.0, + 17448103936.0, + 17448110080.0, + 17448118272.0, + 17448118272.0, + 17448118272.0, + 17448103936.0, + 17448124416.0, + 17448134656.0, + 17448120320.0, + 17448114176.0, + 17448118272.0, + 17448103936.0, + 17448134656.0, + 17448128512.0, + 17448116224.0, + 17448120320.0, + 17448118272.0, + 17448120320.0, + 17448120320.0, + 17448116224.0, + 17448120320.0, + 17448118272.0, + 17448118272.0, + 17448108032.0, + 17448112128.0, + 17448116224.0, + 17448140800.0, + 17448110080.0, + 17448116224.0, + 17448118272.0, + 17448128512.0, + 17448091648.0, + 17448128512.0, + 17448116224.0, + 17448118272.0, + 17448112128.0, + 17448105984.0, + 17448120320.0, + 17448128512.0, + 17448114176.0, + 17448116224.0, + 17448128512.0, + 17448108032.0, + 17448116224.0, + 17448124416.0, + 17448103936.0, + 17448097792.0, + 17448122368.0, + 17448116224.0, + 17448112128.0, + 17448122368.0, + 17448114176.0, + 17448130560.0, + 17448636416.0, + 17448116224.0, + 17448120320.0, + 17448134656.0, + 17448116224.0, + 17448108032.0, + 17448128512.0, + 17448116224.0, + 17448120320.0, + 17448120320.0, + 17448108032.0, + 17448130560.0, + 17448122368.0, + 17448118272.0, + 17448124416.0, + 17448114176.0, + 17448116224.0, + 17448116224.0, + 17448128512.0, + 17448118272.0, + 17448099840.0, + 17448114176.0, + 17448116224.0, + 17448112128.0, + 17448118272.0, + 17448112128.0, + 17448116224.0, + 17448116224.0, + 17448126464.0, + 17448112128.0, + 17448112128.0, + 17448120320.0, + 17448118272.0, + 17448120320.0, + 17448132608.0, + 17448103936.0, + 17448116224.0, + 17448124416.0, + 17448118272.0, + 17448112128.0, + 17448132608.0, + 17448118272.0, + 17448116224.0, + 17448108032.0, + 17448114176.0, + 17448120320.0, + 17448122368.0, + 17448114176.0, + 17448126464.0, + 17448114176.0, + 17448114176.0, + 17448124416.0, + 17447862272.0, + 17447880704.0, + 17447876608.0, + 17447880704.0, + 17447872512.0, + 17447884800.0, + 17447864320.0, + 17447895040.0, + 17447876608.0, + 17447866368.0, + 17447886848.0, + 17447880704.0, + 17447874560.0, + 17447862272.0, + 17447870464.0, + 17447868416.0, + 17447864320.0, + 17447876608.0, + 17447858176.0, + 17447870464.0, + 17447866368.0, + 17447870464.0, + 17447890944.0, + 17447895040.0, + 17447876608.0, + 17447884800.0, + 17447872512.0, + 17447870464.0, + 17447878656.0, + 17447892992.0, + 17447870464.0, + 17447872512.0, + 17447878656.0, + 17447880704.0, + 17447890944.0, + 17447888896.0, + 17447872512.0, + 17447874560.0, + 17447878656.0, + 17447886848.0, + 17447878656.0, + 17447876608.0, + 17447884800.0, + 17447868416.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447876608.0, + 17447878656.0, + 17447878656.0, + 17448871936.0, + 17447880704.0, + 17447880704.0, + 17447866368.0, + 17447886848.0, + 17447876608.0, + 17447882752.0, + 17447876608.0, + 17447886848.0, + 17447886848.0, + 17447882752.0, + 17447886848.0, + 17447886848.0, + 17447876608.0, + 17447866368.0, + 17447874560.0, + 17447884800.0, + 17447882752.0, + 17447882752.0, + 17447890944.0, + 17447858176.0, + 17447895040.0, + 17447872512.0, + 17447874560.0, + 17447886848.0, + 17447878656.0, + 17447886848.0, + 17447870464.0, + 17447876608.0, + 17447882752.0, + 17447880704.0, + 17447870464.0, + 17447866368.0, + 17447874560.0, + 17447897088.0, + 17447874560.0, + 17447897088.0, + 17447880704.0, + 17447874560.0, + 17447895040.0, + 17447878656.0, + 17447895040.0, + 17447866368.0, + 17447880704.0, + 17447876608.0, + 17447876608.0, + 17447882752.0, + 17447876608.0, + 17447872512.0, + 17447874560.0, + 17447876608.0, + 17448566784.0, + 17447866368.0, + 17447874560.0, + 17447886848.0, + 17448607744.0, + 17447886848.0, + 17447872512.0, + 17447862272.0, + 17447884800.0, + 17447876608.0, + 17447890944.0, + 17447890944.0, + 17447868416.0, + 17447895040.0, + 17447882752.0, + 17447864320.0, + 17447890944.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447895040.0, + 17447886848.0, + 17447872512.0, + 17447874560.0, + 17447886848.0, + 17447862272.0, + 17447884800.0, + 17447874560.0, + 17447882752.0, + 17447866368.0, + 17447919616.0, + 17447876608.0, + 17447886848.0, + 17447923712.0, + 17447880704.0, + 17447892992.0, + 17447878656.0, + 17447878656.0, + 17447884800.0, + 17447884800.0, + 17447878656.0, + 17447884800.0, + 17447876608.0, + 17447880704.0, + 17447874560.0, + 17447888896.0, + 17447870464.0, + 17447886848.0, + 17447868416.0, + 17447884800.0, + 17447880704.0, + 17447884800.0, + 17447868416.0, + 17447872512.0, + 17447890944.0, + 17447870464.0, + 17447874560.0, + 17447874560.0, + 17447890944.0, + 17447880704.0, + 17447886848.0, + 17447878656.0, + 17447870464.0, + 17447876608.0, + 17447880704.0, + 17447895040.0, + 17447849984.0, + 17447876608.0, + 17447876608.0, + 17447876608.0, + 17447890944.0, + 17447878656.0, + 17447874560.0, + 17447858176.0, + 17447948288.0, + 17447870464.0, + 17447870464.0, + 17447876608.0, + 17447874560.0, + 17447880704.0, + 17448407040.0, + 17447874560.0, + 17447890944.0, + 17447870464.0, + 17447878656.0, + 17447868416.0, + 17447874560.0, + 17447874560.0, + 17447899136.0, + 17447880704.0, + 17447878656.0, + 17447888896.0, + 17447882752.0, + 17447866368.0, + 17447882752.0, + 17447878656.0, + 17447870464.0, + 17447888896.0, + 17447870464.0, + 17447882752.0, + 17447872512.0, + 17447854080.0, + 17447892992.0, + 17447886848.0, + 17447903232.0, + 17447878656.0, + 17447888896.0, + 17447876608.0, + 17447862272.0, + 17447884800.0, + 17447874560.0, + 17447882752.0, + 17447890944.0, + 17447872512.0, + 17447888896.0, + 17447884800.0, + 17447886848.0, + 17447870464.0, + 17447886848.0, + 17447868416.0, + 17447882752.0, + 17447882752.0, + 17447882752.0, + 17447872512.0, + 17447876608.0, + 17447890944.0, + 17447870464.0, + 17447872512.0, + 17447868416.0, + 17447878656.0, + 17447882752.0, + 17447882752.0, + 17447886848.0, + 17447868416.0, + 17447872512.0, + 17447878656.0, + 17447897088.0, + 17447854080.0, + 17447866368.0, + 17447870464.0, + 17447874560.0, + 17447892992.0, + 17447874560.0, + 17447866368.0, + 17447874560.0, + 17447905280.0, + 17447866368.0, + 17447878656.0, + 17447878656.0, + 17447872512.0, + 17447878656.0, + 17448136704.0, + 17447882752.0, + 17447884800.0, + 17447866368.0, + 17447884800.0, + 17447866368.0, + 17447866368.0, + 17447878656.0, + 17447892992.0, + 17447872512.0, + 17447882752.0, + 17447886848.0, + 17447872512.0, + 17447866368.0, + 17447868416.0, + 17447884800.0, + 17447878656.0, + 17447878656.0, + 17447860224.0, + 17447892992.0, + 17448552448.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447886848.0, + 17447874560.0, + 17448427520.0, + 17447872512.0, + 17447872512.0, + 17447870464.0, + 17447870464.0, + 17447872512.0, + 17447899136.0, + 17447880704.0, + 17447882752.0, + 17447888896.0, + 17447870464.0, + 17447880704.0, + 17447862272.0, + 17447884800.0, + 17447884800.0, + 17447886848.0, + 17448183808.0, + 17447864320.0, + 17447882752.0, + 17447895040.0, + 17447878656.0, + 17447882752.0, + 17447886848.0, + 17447882752.0, + 17447874560.0, + 17447892992.0, + 17447866368.0, + 17447880704.0, + 17447860224.0, + 17447882752.0, + 17447870464.0, + 17447878656.0, + 17447876608.0, + 17447878656.0, + 17447876608.0, + 17447868416.0, + 17447888896.0, + 17447868416.0, + 17447878656.0, + 17447876608.0, + 17447882752.0, + 17447866368.0, + 17447897088.0, + 17447888896.0, + 17447890944.0, + 17447880704.0, + 17447886848.0, + 17447862272.0, + 17447892992.0, + 17447874560.0, + 17447880704.0, + 17447874560.0, + 17447886848.0, + 17447878656.0, + 17447872512.0, + 17447874560.0, + 17447878656.0, + 17447892992.0, + 17447874560.0, + 17447872512.0, + 17447874560.0, + 17447888896.0, + 17447886848.0, + 17447886848.0, + 17447882752.0, + 17447878656.0, + 17447864320.0, + 17447892992.0, + 17447878656.0, + 17447878656.0, + 17447892992.0, + 17447872512.0, + 17447862272.0, + 17447886848.0, + 17447872512.0, + 17447876608.0, + 17447878656.0, + 17447882752.0, + 17447888896.0, + 17447874560.0, + 17447866368.0, + 17447866368.0, + 17447874560.0, + 17447866368.0, + 17447895040.0, + 17447882752.0, + 17447882752.0, + 17447895040.0, + 17447878656.0, + 17447876608.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447895040.0, + 17447882752.0, + 17448458240.0, + 17447884800.0, + 17447886848.0, + 17447874560.0, + 17447876608.0, + 17447874560.0, + 17447882752.0, + 17447884800.0, + 17447884800.0, + 17447882752.0, + 17447880704.0, + 17447878656.0, + 17447886848.0, + 17447872512.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447882752.0, + 17447882752.0, + 17447884800.0, + 17447876608.0, + 17447874560.0, + 17447888896.0, + 17447878656.0, + 17447870464.0, + 17447876608.0, + 17447872512.0, + 17447874560.0, + 17447872512.0, + 17447866368.0, + 17447874560.0, + 17447870464.0, + 17447882752.0, + 17447886848.0, + 17447878656.0, + 17447878656.0, + 17447876608.0, + 17447880704.0, + 17447878656.0, + 17447876608.0, + 17447876608.0, + 17447872512.0, + 17447884800.0, + 17447882752.0, + 17447876608.0, + 17447870464.0, + 17447886848.0, + 17447868416.0, + 17447901184.0, + 17447886848.0, + 17447886848.0, + 17447878656.0, + 17447874560.0, + 17447886848.0, + 17447880704.0, + 17447868416.0, + 17447890944.0, + 17447878656.0, + 17447874560.0, + 17447874560.0, + 17447876608.0, + 17447872512.0, + 17447878656.0, + 17447892992.0, + 17447864320.0, + 17447880704.0, + 17447892992.0, + 17447870464.0, + 17447884800.0, + 17447874560.0, + 17447876608.0, + 17447876608.0, + 17447892992.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447890944.0, + 17447882752.0, + 17447876608.0, + 17447878656.0, + 17447886848.0, + 17447876608.0, + 17447858176.0, + 17447868416.0, + 17447866368.0, + 17447874560.0, + 17447882752.0, + 17447878656.0, + 17447880704.0, + 17447884800.0, + 17447874560.0, + 17447872512.0, + 17447884800.0, + 17447890944.0, + 17447886848.0, + 17447874560.0, + 17447882752.0, + 17447895040.0, + 17447862272.0, + 17447868416.0, + 17447864320.0, + 17448421376.0, + 17447876608.0, + 17447876608.0, + 17447874560.0, + 17447874560.0, + 17447878656.0, + 17447878656.0, + 17447880704.0, + 17447897088.0, + 17447880704.0, + 17447874560.0, + 17447890944.0, + 17447880704.0, + 17447899136.0, + 17448837120.0, + 17447870464.0, + 17447890944.0, + 17447856128.0, + 17447890944.0, + 17447878656.0, + 17447886848.0, + 17447874560.0, + 17447878656.0, + 17447868416.0, + 17447876608.0, + 17447888896.0, + 17447882752.0, + 17447872512.0, + 17447880704.0, + 17447907328.0, + 17447876608.0, + 17447886848.0, + 17447878656.0, + 17447876608.0, + 17447874560.0, + 17447892992.0, + 17447886848.0, + 17447878656.0, + 17447874560.0, + 17447892992.0, + 17447882752.0, + 17447886848.0, + 17447874560.0, + 17447890944.0, + 17447878656.0, + 17447874560.0, + 17447854080.0, + 17447862272.0, + 17447882752.0, + 17447878656.0, + 17447882752.0, + 17447876608.0, + 17447856128.0, + 17447866368.0, + 17447890944.0, + 17447880704.0, + 17447872512.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447890944.0, + 17447878656.0, + 17447849984.0, + 17447878656.0, + 17447882752.0, + 17447886848.0, + 17447874560.0, + 17447882752.0, + 17447870464.0, + 17447895040.0, + 17447878656.0, + 17447899136.0, + 17447895040.0, + 17447872512.0, + 17447880704.0, + 17447874560.0, + 17447886848.0, + 17447876608.0, + 17447878656.0, + 17447882752.0, + 17447882752.0, + 17447866368.0, + 17447878656.0, + 17447888896.0, + 17447874560.0, + 17447878656.0, + 17447882752.0, + 17447874560.0, + 17447884800.0, + 17447884800.0, + 17447866368.0, + 17447895040.0, + 17447991296.0, + 17447886848.0, + 17447888896.0, + 17447866368.0, + 17447872512.0, + 17447884800.0, + 17448570880.0, + 17447890944.0, + 17447884800.0, + 17447874560.0, + 17447880704.0, + 17447890944.0, + 17447882752.0, + 17447868416.0, + 17447880704.0, + 17447882752.0, + 17447886848.0, + 17447880704.0, + 17447892992.0, + 17447886848.0, + 17447890944.0, + 17447874560.0, + 17447880704.0, + 17447874560.0, + 17447876608.0, + 17447870464.0, + 17447886848.0, + 17447870464.0, + 17447882752.0, + 17447884800.0, + 17447892992.0, + 17447880704.0, + 17447882752.0, + 17447890944.0, + 17447882752.0, + 17447882752.0, + 17447882752.0, + 17447876608.0, + 17447876608.0, + 17447874560.0, + 17447878656.0, + 17447870464.0, + 17447870464.0, + 17447870464.0, + 17447892992.0, + 17447876608.0, + 17447878656.0, + 17447870464.0, + 17447878656.0, + 17447880704.0, + 17447870464.0, + 17447890944.0, + 17447888896.0, + 17447872512.0, + 17447878656.0, + 17447884800.0, + 17447878656.0, + 17447878656.0, + 17447876608.0, + 17447888896.0, + 17447874560.0, + 17447866368.0, + 17447876608.0, + 17447868416.0, + 17447886848.0, + 17447872512.0, + 17447870464.0, + 17447878656.0, + 17447878656.0, + 17447886848.0, + 17447860224.0, + 17447874560.0, + 17447874560.0, + 17447874560.0, + 17447878656.0, + 17447882752.0, + 17447882752.0, + 17447874560.0, + 17447884800.0, + 17448579072.0, + 17447886848.0, + 17447874560.0, + 17447876608.0, + 17447886848.0, + 17447886848.0, + 17447872512.0, + 17447878656.0, + 17447886848.0, + 17447870464.0, + 17447874560.0, + 17447878656.0, + 17447874560.0, + 17447868416.0, + 17447888896.0, + 17447886848.0, + 17447866368.0, + 17447886848.0, + 17447884800.0, + 17447858176.0, + 17447878656.0, + 17447880704.0, + 17448126464.0, + 17447878656.0, + 17447890944.0, + 17447880704.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447870464.0, + 17447872512.0, + 17447892992.0, + 17447878656.0, + 17447868416.0, + 17447888896.0, + 17447884800.0, + 17447882752.0, + 17447858176.0, + 17447892992.0, + 17447882752.0, + 17448316928.0, + 17447882752.0, + 17447864320.0, + 17447876608.0, + 17447880704.0, + 17447874560.0, + 17447864320.0, + 17447876608.0, + 17447874560.0, + 17447872512.0, + 17447882752.0, + 17447892992.0, + 17447890944.0, + 17447880704.0, + 17447892992.0, + 17447870464.0, + 17447874560.0, + 17447870464.0, + 17447870464.0, + 17447888896.0, + 17447878656.0, + 17447876608.0, + 17447866368.0, + 17447862272.0, + 17447884800.0, + 17447890944.0, + 17447864320.0, + 17447882752.0, + 17447882752.0, + 17447882752.0, + 17447866368.0, + 17447870464.0, + 17447886848.0, + 17447878656.0, + 17447880704.0, + 17447880704.0, + 17447878656.0, + 17447860224.0, + 17447874560.0, + 17447868416.0, + 17447876608.0, + 17447886848.0, + 17447874560.0, + 17447886848.0, + 17447878656.0, + 17447864320.0, + 17447882752.0, + 17447882752.0, + 17447882752.0, + 17447866368.0, + 17447888896.0, + 17447876608.0, + 17447874560.0, + 17447874560.0, + 17447874560.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447884800.0, + 17447878656.0, + 17447874560.0, + 17447874560.0, + 17447876608.0, + 17447880704.0, + 17447870464.0, + 17447876608.0, + 17447882752.0, + 17447882752.0, + 17447874560.0, + 17447884800.0, + 17447897088.0, + 17447874560.0, + 17447860224.0, + 17447903232.0, + 17447899136.0, + 17447921664.0, + 17447915520.0, + 17447905280.0, + 17447901184.0, + 17447903232.0, + 17447905280.0, + 17447899136.0, + 17447919616.0, + 17447911424.0, + 17447903232.0, + 17447886848.0, + 17447915520.0, + 17447903232.0, + 17447890944.0, + 17447913472.0, + 17447890944.0, + 17447909376.0, + 17447913472.0, + 17447905280.0, + 17447911424.0, + 17447909376.0, + 17447903232.0, + 17447913472.0, + 17447897088.0, + 17447907328.0, + 17447911424.0, + 17447901184.0, + 17447903232.0, + 17447909376.0, + 17447899136.0, + 17447911424.0, + 17447897088.0, + 17447915520.0, + 17447899136.0, + 17447911424.0, + 17447899136.0, + 17447907328.0, + 17447907328.0, + 17447911424.0, + 17447911424.0, + 17447903232.0, + 17447915520.0, + 17447919616.0, + 17447903232.0, + 17447895040.0, + 17447911424.0, + 17447915520.0, + 17447899136.0, + 17447899136.0, + 17447911424.0, + 17447907328.0, + 17447905280.0, + 17447909376.0, + 17447915520.0, + 17447905280.0, + 17447892992.0, + 17447925760.0, + 17447913472.0, + 17447907328.0, + 17448826880.0, + 17447892992.0, + 17447901184.0, + 17447921664.0, + 17447907328.0, + 17447915520.0, + 17447903232.0, + 17447919616.0, + 17447909376.0, + 17447921664.0, + 17447899136.0, + 17447895040.0, + 17447909376.0, + 17447903232.0, + 17447913472.0, + 17447919616.0, + 17447917568.0, + 17447905280.0, + 17447905280.0, + 17447913472.0, + 17447899136.0, + 17447911424.0, + 17447909376.0, + 17447915520.0, + 17447913472.0, + 17447905280.0, + 17447909376.0, + 17447897088.0, + 17447909376.0, + 17447890944.0, + 17447899136.0, + 17447919616.0, + 17447913472.0, + 17447913472.0, + 17447915520.0, + 17447919616.0, + 17447913472.0, + 17447901184.0, + 17447895040.0, + 17447903232.0, + 17447899136.0, + 17447892992.0, + 17447909376.0, + 17447909376.0, + 17447905280.0, + 17447903232.0, + 17447909376.0, + 17447907328.0, + 17447909376.0, + 17447895040.0, + 17447919616.0, + 17447907328.0, + 17447868416.0, + 17447870464.0, + 17447868416.0, + 17447870464.0, + 17447864320.0, + 17447874560.0, + 17447878656.0, + 17447876608.0, + 17447876608.0, + 17447874560.0, + 17447876608.0, + 17447888896.0, + 17447866368.0, + 17447876608.0, + 17447874560.0, + 17447878656.0, + 17447886848.0, + 17447878656.0, + 17447868416.0, + 17447886848.0, + 17447862272.0, + 17447888896.0, + 17447882752.0, + 17447884800.0, + 17447886848.0, + 17447880704.0, + 17447897088.0, + 17447882752.0, + 17447882752.0, + 17447878656.0, + 17447874560.0, + 17447872512.0, + 17447888896.0, + 17447884800.0, + 17447876608.0, + 17447882752.0, + 17447890944.0, + 17447876608.0, + 17447886848.0, + 17447895040.0, + 17447876608.0, + 17447884800.0, + 17447870464.0, + 17447886848.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447882752.0, + 17447866368.0, + 17447886848.0, + 17447890944.0, + 17447868416.0, + 17447876608.0, + 17447882752.0, + 17448462336.0, + 17447886848.0, + 17447868416.0, + 17447864320.0, + 17447882752.0, + 17447890944.0, + 17447878656.0, + 17447874560.0, + 17447874560.0, + 17447870464.0, + 17447878656.0, + 17447862272.0, + 17447874560.0, + 17447882752.0, + 17447864320.0, + 17447886848.0, + 17447874560.0, + 17447882752.0, + 17447886848.0, + 17447878656.0, + 17447870464.0, + 17447866368.0, + 17447882752.0, + 17447882752.0, + 17447866368.0, + 17447892992.0, + 17447890944.0, + 17447886848.0, + 17447882752.0, + 17447901184.0, + 17447862272.0, + 17447876608.0, + 17447878656.0, + 17447870464.0, + 17447878656.0, + 17447874560.0, + 17447882752.0, + 17447882752.0, + 17447882752.0, + 17447876608.0, + 17447878656.0, + 17448341504.0, + 17447870464.0, + 17447872512.0, + 17447882752.0, + 17447876608.0, + 17447901184.0, + 17447868416.0, + 17447888896.0, + 17447892992.0, + 17447868416.0, + 17447878656.0, + 17447899136.0, + 17447878656.0, + 17447880704.0, + 17447870464.0, + 17447868416.0, + 17447874560.0, + 17447882752.0, + 17447862272.0, + 17447886848.0, + 17447882752.0, + 17447899136.0, + 17447874560.0, + 17447866368.0, + 17447878656.0, + 17447878656.0, + 17447880704.0, + 17447870464.0, + 17447862272.0, + 17447884800.0, + 17447876608.0, + 17447876608.0, + 17447886848.0, + 17447884800.0, + 17447882752.0, + 17447874560.0, + 17447876608.0, + 17447878656.0, + 17448806400.0, + 17448820736.0, + 17448804352.0, + 17448808448.0, + 17448816640.0, + 17448816640.0, + 17448835072.0, + 17448810496.0, + 17448826880.0, + 17448804352.0, + 17448812544.0, + 17448814592.0, + 17448806400.0, + 17448826880.0, + 17448824832.0, + 17448798208.0, + 17448814592.0, + 17448816640.0, + 17448804352.0, + 17448818688.0, + 17448816640.0, + 17448810496.0, + 17448820736.0, + 17448822784.0, + 17448806400.0, + 17448794112.0, + 17448794112.0, + 17448828928.0, + 17448808448.0, + 17448802304.0, + 17448800256.0, + 17448820736.0, + 17448816640.0, + 17448808448.0, + 17448808448.0, + 17448812544.0, + 17448804352.0, + 17448796160.0, + 17448822784.0, + 17448818688.0, + 17448833024.0, + 17448804352.0, + 17448796160.0, + 17448800256.0, + 17448802304.0, + 17448820736.0, + 17448806400.0, + 17448814592.0, + 17449668608.0, + 17448792064.0, + 17448816640.0, + 17448808448.0, + 17448792064.0, + 17448804352.0, + 17448820736.0, + 17448812544.0, + 17448812544.0, + 17448806400.0, + 17448808448.0, + 17448814592.0, + 17448820736.0, + 17448816640.0, + 17448802304.0, + 17448802304.0, + 17448810496.0, + 17448812544.0, + 17448808448.0, + 17448802304.0, + 17448824832.0, + 17448806400.0, + 17448802304.0, + 17449644032.0, + 17448826880.0, + 17448808448.0, + 17448794112.0, + 17448820736.0, + 17448812544.0, + 17448808448.0, + 17448800256.0, + 17448814592.0, + 17448810496.0, + 17448810496.0, + 17448808448.0, + 17448814592.0, + 17448824832.0, + 17448804352.0, + 17448808448.0, + 17448806400.0, + 17448802304.0, + 17448804352.0, + 17448816640.0, + 17448804352.0, + 17448812544.0, + 17448810496.0, + 17448810496.0, + 17448812544.0, + 17448792064.0, + 17448816640.0, + 17448796160.0, + 17448816640.0, + 17448800256.0, + 17448812544.0, + 17448816640.0, + 17448812544.0, + 17448816640.0, + 17448816640.0, + 17448814592.0, + 17448792064.0, + 17448816640.0, + 17447880704.0, + 17447888896.0, + 17447882752.0, + 17447852032.0, + 17447882752.0, + 17447874560.0, + 17447888896.0, + 17447880704.0, + 17447866368.0, + 17448683520.0, + 17447882752.0, + 17447880704.0, + 17447878656.0, + 17447866368.0, + 17447874560.0, + 17447866368.0, + 17447882752.0, + 17447884800.0, + 17447876608.0, + 17447866368.0, + 17447856128.0, + 17447888896.0, + 17447897088.0, + 17447878656.0, + 17447864320.0, + 17447888896.0, + 17447882752.0, + 17447872512.0, + 17447880704.0, + 17447880704.0, + 17447890944.0, + 17447870464.0, + 17447872512.0, + 17447878656.0, + 17447866368.0, + 17447886848.0, + 17447892992.0, + 17447878656.0, + 17447872512.0, + 17447866368.0, + 17447874560.0, + 17447864320.0, + 17448878080.0, + 17447870464.0, + 17447882752.0, + 17447878656.0, + 17447864320.0, + 17447880704.0, + 17447884800.0, + 17447878656.0, + 17447886848.0, + 17447878656.0, + 17447886848.0, + 17447866368.0, + 17447876608.0, + 17447872512.0, + 17447886848.0, + 17447858176.0, + 17447874560.0, + 17447886848.0, + 17447892992.0, + 17447868416.0, + 17447878656.0, + 17447886848.0, + 17447878656.0, + 17447866368.0, + 17447866368.0, + 17447880704.0, + 17447876608.0, + 17447878656.0, + 17447886848.0, + 17447901184.0, + 17447882752.0, + 17447878656.0, + 17447884800.0, + 17447892992.0, + 17447874560.0, + 17447880704.0, + 17447874560.0, + 17447872512.0, + 17447886848.0, + 17447880704.0, + 17447866368.0, + 17447886848.0, + 17447862272.0, + 17447880704.0, + 17447884800.0, + 17447874560.0, + 17447890944.0, + 17447880704.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447886848.0, + 17447878656.0, + 17447880704.0, + 17447884800.0, + 17447897088.0, + 17447878656.0, + 17447872512.0, + 17447845888.0, + 17447870464.0, + 17447876608.0, + 17447882752.0, + 17447880704.0, + 17447866368.0, + 17447886848.0, + 17447862272.0, + 17447886848.0, + 17447882752.0, + 17447880704.0, + 17447882752.0, + 17447882752.0, + 17447870464.0, + 17447882752.0, + 17447890944.0, + 17447866368.0, + 17447880704.0, + 17447862272.0, + 17447868416.0, + 17447874560.0, + 17447882752.0, + 17447874560.0, + 17447862272.0, + 17447876608.0, + 17447882752.0, + 17447880704.0, + 17447872512.0, + 17447888896.0, + 17447874560.0, + 17447882752.0, + 17447874560.0, + 17447886848.0, + 17447882752.0, + 17447864320.0, + 17447872512.0, + 17447882752.0, + 17447874560.0, + 17447884800.0, + 17447882752.0, + 17447876608.0, + 17447874560.0, + 17447886848.0, + 17447886848.0, + 17447878656.0, + 17447878656.0, + 17447868416.0, + 17447862272.0, + 17447876608.0, + 17447878656.0, + 17447882752.0, + 17447864320.0, + 17447882752.0, + 17447876608.0, + 17447878656.0, + 17447874560.0, + 17447872512.0, + 17447888896.0, + 17447874560.0, + 17447870464.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447874560.0, + 17447868416.0, + 17447880704.0, + 17447878656.0, + 17448001536.0, + 17447868416.0, + 17447874560.0, + 17447884800.0, + 17447870464.0, + 17447884800.0, + 17447895040.0, + 17447892992.0, + 17447870464.0, + 17447872512.0, + 17447870464.0, + 17447866368.0, + 17447886848.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447886848.0, + 17447872512.0, + 17447882752.0, + 17447878656.0, + 17447880704.0, + 17447868416.0, + 17447878656.0, + 17447886848.0, + 17447876608.0, + 17447911424.0, + 17447884800.0, + 17447876608.0, + 17447888896.0, + 17447880704.0, + 17447880704.0, + 17447882752.0, + 17447882752.0, + 17447878656.0, + 17447870464.0, + 17447874560.0, + 17447886848.0, + 17447868416.0, + 17447874560.0, + 17447876608.0, + 17447878656.0, + 17447882752.0, + 17447862272.0, + 17447888896.0, + 17447874560.0, + 17447886848.0, + 17448714240.0, + 17447895040.0, + 17447880704.0, + 17447878656.0, + 17447884800.0, + 17447864320.0, + 17448050688.0, + 17447882752.0, + 17447886848.0, + 17447876608.0, + 17447866368.0, + 17447882752.0, + 17447895040.0, + 17447866368.0, + 17447890944.0, + 17447880704.0, + 17447890944.0, + 17447872512.0, + 17447878656.0, + 17447880704.0, + 17447882752.0, + 17447870464.0, + 17447892992.0, + 17447888896.0, + 17447880704.0, + 17447882752.0, + 17447884800.0, + 17447880704.0, + 17447882752.0, + 17447888896.0, + 17447888896.0, + 17447890944.0, + 17447878656.0, + 17447886848.0, + 17447886848.0, + 17447870464.0, + 17447874560.0, + 17447874560.0, + 17447878656.0, + 17447872512.0, + 17447882752.0, + 17447886848.0, + 17447874560.0, + 17447880704.0, + 17447884800.0, + 17447872512.0, + 17447882752.0, + 17447874560.0, + 17447884800.0, + 17447876608.0, + 17447895040.0, + 17447874560.0, + 17447872512.0, + 17447880704.0, + 17447882752.0, + 17447882752.0, + 17447890944.0, + 17447892992.0, + 17447878656.0, + 17447876608.0, + 17447870464.0, + 17447866368.0, + 17447876608.0, + 17447882752.0, + 17447872512.0, + 17447878656.0, + 17447872512.0, + 17447895040.0, + 17447882752.0, + 17447876608.0, + 17447874560.0, + 17447888896.0, + 17447884800.0, + 17447880704.0, + 17447872512.0, + 17447874560.0, + 17447878656.0, + 17447874560.0, + 17447876608.0, + 17447888896.0, + 17447866368.0, + 17447880704.0, + 17447895040.0, + 17447884800.0, + 17447872512.0, + 17447884800.0, + 17447874560.0, + 17447876608.0, + 17447876608.0, + 17447874560.0, + 17447876608.0, + 17447897088.0, + 17447872512.0, + 17447874560.0, + 17447878656.0, + 17447866368.0, + 17447897088.0, + 17447870464.0, + 17447862272.0, + 17447890944.0, + 17447874560.0, + 17447886848.0, + 17447864320.0, + 17447888896.0, + 17447882752.0, + 17447882752.0, + 17447890944.0, + 17447886848.0, + 17447876608.0, + 17447890944.0, + 17447854080.0, + 17447878656.0, + 17447870464.0, + 17447888896.0, + 17447884800.0, + 17447878656.0, + 17447884800.0, + 17447854080.0, + 17447878656.0, + 17447882752.0, + 17447882752.0, + 17447876608.0, + 17447882752.0, + 17447872512.0, + 17447878656.0, + 17447870464.0, + 17447874560.0, + 17447886848.0, + 17447890944.0, + 17447882752.0, + 17447878656.0, + 17447866368.0, + 17447878656.0, + 17447866368.0, + 17447884800.0, + 17447874560.0, + 17447874560.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447874560.0, + 17447876608.0, + 17447868416.0, + 17447882752.0, + 17447882752.0, + 17447876608.0, + 17447876608.0, + 17447968768.0, + 17447892992.0, + 17447882752.0, + 17447862272.0, + 17447878656.0, + 17447878656.0, + 17447862272.0, + 17447886848.0, + 17447868416.0, + 17447876608.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447872512.0, + 17447878656.0, + 17447868416.0, + 17447884800.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447880704.0, + 17447886848.0, + 17447882752.0, + 17447866368.0, + 17447880704.0, + 17447886848.0, + 17447884800.0, + 17447878656.0, + 17447890944.0, + 17447884800.0, + 17447880704.0, + 17447890944.0, + 17447874560.0, + 17447876608.0, + 17447880704.0, + 17447886848.0, + 17447884800.0, + 17447866368.0, + 17447882752.0, + 17447874560.0, + 17447862272.0, + 17447878656.0, + 17447878656.0, + 17447882752.0, + 17447864320.0, + 17447890944.0, + 17447890944.0, + 17447874560.0, + 17447878656.0, + 17447880704.0, + 17447878656.0, + 17447880704.0, + 17447862272.0, + 17447882752.0, + 17447878656.0, + 17447884800.0, + 17447882752.0, + 17447884800.0, + 17447886848.0, + 17447882752.0, + 17447870464.0, + 17447880704.0, + 17447884800.0, + 17447878656.0, + 17447878656.0, + 17447895040.0, + 17447884800.0, + 17447880704.0, + 17447866368.0, + 17447880704.0, + 17447882752.0, + 17447882752.0, + 17447886848.0, + 17447874560.0, + 17447882752.0, + 17447872512.0, + 17447876608.0, + 17448185856.0, + 17447886848.0, + 17447858176.0, + 17447870464.0, + 17447890944.0, + 17447864320.0, + 17447864320.0, + 17447876608.0, + 17447874560.0, + 17447882752.0, + 17447882752.0, + 17447876608.0, + 17447882752.0, + 17447876608.0, + 17447890944.0, + 17447876608.0, + 17447882752.0, + 17447878656.0, + 17447872512.0, + 17447886848.0, + 17447870464.0, + 17447868416.0, + 17447882752.0, + 17447874560.0, + 17447860224.0, + 17447868416.0, + 17447878656.0, + 17447866368.0, + 17447892992.0, + 17447878656.0, + 17447878656.0, + 17447876608.0, + 17447895040.0, + 17447880704.0, + 17447866368.0, + 17447874560.0, + 17447897088.0, + 17447868416.0, + 17447901184.0, + 17447880704.0, + 17447862272.0, + 17447874560.0, + 17447886848.0, + 17447876608.0, + 17447872512.0, + 17447878656.0, + 17447882752.0, + 17447886848.0, + 17447882752.0, + 17447876608.0, + 17447874560.0, + 17447880704.0, + 17447868416.0, + 17447876608.0, + 17447874560.0, + 17447878656.0, + 17447888896.0, + 17447866368.0, + 17447876608.0, + 17447886848.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447899136.0, + 17447884800.0, + 17447878656.0, + 17447870464.0, + 17447860224.0, + 17447878656.0, + 17447886848.0, + 17447870464.0, + 17447882752.0, + 17447870464.0, + 17447882752.0, + 17447878656.0, + 17447888896.0, + 17447876608.0, + 17447882752.0, + 17447882752.0, + 17447874560.0, + 17447874560.0, + 17447872512.0, + 17447870464.0, + 17447872512.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447880704.0, + 17447878656.0, + 17447860224.0, + 17447886848.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447872512.0, + 17447878656.0, + 17447882752.0, + 17447876608.0, + 17447886848.0, + 17447882752.0, + 17447868416.0, + 17447874560.0, + 17447890944.0, + 17447866368.0, + 17447882752.0, + 17447872512.0, + 17447874560.0, + 17447868416.0, + 17447886848.0, + 17447876608.0, + 17447870464.0, + 17447874560.0, + 17447882752.0, + 17447886848.0, + 17447868416.0, + 17447878656.0, + 17447866368.0, + 17447876608.0, + 17447878656.0, + 17447868416.0, + 17447874560.0, + 17447862272.0, + 17447864320.0, + 17447862272.0, + 17447864320.0, + 17447884800.0, + 17447872512.0, + 17447886848.0, + 17447880704.0, + 17447876608.0, + 17447868416.0, + 17447874560.0, + 17448923136.0, + 17447866368.0, + 17447874560.0, + 17447878656.0, + 17447890944.0, + 17447888896.0, + 17447876608.0, + 17447884800.0, + 17447897088.0, + 17447876608.0, + 17447868416.0, + 17447888896.0, + 17447874560.0, + 17447882752.0, + 17447874560.0, + 17448142848.0, + 17447884800.0, + 17447874560.0, + 17447874560.0, + 17447884800.0, + 17447878656.0, + 17447897088.0, + 17447895040.0, + 17448318976.0, + 17447899136.0, + 17447886848.0, + 17447895040.0, + 17447890944.0, + 17447886848.0, + 17447888896.0, + 17447882752.0, + 17447890944.0, + 17447907328.0, + 17447884800.0, + 17447890944.0, + 17447882752.0, + 17447886848.0, + 17447895040.0, + 17447874560.0, + 17447880704.0, + 17447888896.0, + 17447895040.0, + 17447895040.0, + 17447903232.0, + 17447868416.0, + 17447892992.0, + 17447888896.0, + 17447890944.0, + 17448044544.0, + 17447890944.0, + 17447897088.0, + 17447886848.0, + 17447890944.0, + 17447907328.0, + 17447876608.0, + 17447892992.0, + 17447882752.0, + 17447880704.0, + 17447899136.0, + 17447888896.0, + 17447882752.0, + 17447907328.0, + 17447892992.0, + 17447911424.0, + 17447895040.0, + 17448478720.0, + 17447882752.0, + 17447899136.0, + 17447878656.0, + 17447880704.0, + 17447903232.0, + 17447892992.0, + 17447901184.0, + 17447895040.0, + 17447882752.0, + 17447899136.0, + 17447899136.0, + 17447888896.0, + 17447890944.0, + 17447886848.0, + 17447899136.0, + 17447880704.0, + 17447878656.0, + 17447876608.0, + 17447892992.0, + 17447895040.0, + 17447890944.0, + 17447892992.0, + 17447905280.0, + 17447888896.0, + 17447892992.0, + 17447890944.0, + 17447890944.0, + 17447888896.0, + 17447907328.0, + 17447899136.0, + 17447897088.0, + 17447890944.0, + 17447886848.0, + 17447886848.0, + 17447903232.0, + 17447899136.0, + 17447888896.0, + 17447897088.0, + 17447895040.0, + 17447892992.0, + 17447884800.0, + 17447890944.0, + 17447897088.0, + 17447876608.0, + 17447907328.0, + 17447882752.0, + 17447903232.0, + 17447903232.0, + 17447907328.0, + 17447888896.0, + 17447890944.0, + 17447876608.0, + 17447886848.0, + 17447882752.0, + 17447897088.0, + 17447895040.0, + 17447890944.0, + 17447895040.0, + 17447890944.0, + 17447878656.0, + 17447901184.0, + 17447903232.0, + 17447888896.0, + 17447884800.0, + 17447886848.0, + 17447888896.0, + 17447890944.0, + 17447895040.0, + 17447888896.0, + 17447913472.0, + 17448865792.0, + 17448259584.0, + 17448257536.0, + 17448278016.0, + 17448267776.0, + 17448269824.0, + 17448263680.0, + 17448278016.0, + 17448269824.0, + 17448278016.0, + 17448275968.0, + 17448271872.0, + 17448280064.0, + 17448259584.0, + 17448261632.0, + 17448284160.0, + 17448263680.0, + 17448259584.0, + 17448275968.0, + 17448271872.0, + 17448261632.0, + 17448267776.0, + 17448259584.0, + 17448284160.0, + 17448267776.0, + 17448280064.0, + 17448269824.0, + 17448462336.0, + 17448275968.0, + 17448263680.0, + 17448271872.0, + 17448280064.0, + 17448284160.0, + 17448286208.0, + 17448267776.0, + 17448271872.0, + 17448257536.0, + 17448275968.0, + 17448267776.0, + 17448267776.0, + 17448263680.0, + 17448271872.0, + 17448269824.0, + 17448282112.0, + 17448280064.0, + 17448280064.0, + 17448271872.0, + 17448267776.0, + 17448282112.0, + 17448275968.0, + 17448269824.0, + 17448267776.0, + 17448273920.0, + 17448278016.0, + 17448267776.0, + 17448275968.0, + 17448271872.0, + 17448280064.0, + 17448265728.0, + 17448273920.0, + 17448269824.0, + 17448265728.0, + 17448267776.0, + 17448265728.0, + 17448265728.0, + 17448275968.0, + 17448269824.0, + 17448263680.0, + 17448261632.0, + 17448267776.0, + 17448267776.0, + 17448269824.0, + 17448271872.0, + 17448271872.0, + 17448275968.0, + 17448284160.0, + 17448263680.0, + 17448275968.0, + 17448271872.0, + 17448280064.0, + 17448273920.0, + 17448282112.0, + 17448292352.0, + 17448271872.0, + 17448255488.0, + 17448269824.0, + 17448280064.0, + 17448263680.0, + 17448275968.0, + 17448278016.0, + 17448271872.0, + 17448255488.0, + 17448282112.0, + 17448280064.0, + 17448284160.0, + 17448265728.0, + 17448280064.0, + 17448261632.0, + 17448255488.0, + 17448263680.0, + 17448275968.0, + 17448280064.0, + 17448280064.0, + 17448273920.0, + 17448265728.0, + 17448271872.0, + 17448273920.0, + 17448280064.0, + 17448296448.0, + 17448280064.0, + 17448275968.0, + 17448261632.0, + 17448251392.0, + 17448247296.0, + 17448263680.0, + 17447874560.0, + 17447874560.0, + 17447880704.0, + 17447876608.0, + 17447874560.0, + 17447862272.0, + 17447884800.0, + 17447878656.0, + 17447886848.0, + 17447864320.0, + 17447876608.0, + 17447888896.0, + 17447876608.0, + 17447868416.0, + 17447872512.0, + 17447888896.0, + 17447882752.0, + 17447878656.0, + 17447872512.0, + 17447899136.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447864320.0, + 17447882752.0, + 17447874560.0, + 17447890944.0, + 17447874560.0, + 17447890944.0, + 17447872512.0, + 17447878656.0, + 17447890944.0, + 17447866368.0, + 17447872512.0, + 17447882752.0, + 17447876608.0, + 17447876608.0, + 17447872512.0, + 17447892992.0, + 17447880704.0, + 17447870464.0, + 17447888896.0, + 17447874560.0, + 17447858176.0, + 17447890944.0, + 17447878656.0, + 17447872512.0, + 17447884800.0, + 17447866368.0, + 17447880704.0, + 17448083456.0, + 17447870464.0, + 17447882752.0, + 17448239104.0, + 17447872512.0, + 17447870464.0, + 17447880704.0, + 17447884800.0, + 17447895040.0, + 17447866368.0, + 17447884800.0, + 17447862272.0, + 17447878656.0, + 17447876608.0, + 17447874560.0, + 17447882752.0, + 17447884800.0, + 17447880704.0, + 17447876608.0, + 17447890944.0, + 17447878656.0, + 17447874560.0, + 17447890944.0, + 17447882752.0, + 17447874560.0, + 17447874560.0, + 17447886848.0, + 17447876608.0, + 17447880704.0, + 17447874560.0, + 17447874560.0, + 17447876608.0, + 17447880704.0, + 17447882752.0, + 17447870464.0, + 17447876608.0, + 17447862272.0, + 17447870464.0, + 17447868416.0, + 17447876608.0, + 17447886848.0, + 17447880704.0, + 17447882752.0, + 17447868416.0, + 17447876608.0, + 17447882752.0, + 17447870464.0, + 17447882752.0, + 17447860224.0, + 17447876608.0, + 17447864320.0, + 17447884800.0, + 17447874560.0, + 17447878656.0, + 17447874560.0, + 17447874560.0, + 17447878656.0, + 17447870464.0, + 17447888896.0, + 17447880704.0, + 17447874560.0, + 17447866368.0, + 17447890944.0, + 17447864320.0, + 17447878656.0, + 17447858176.0, + 17447878656.0, + 17447872512.0, + 17447876608.0, + 17447880704.0, + 17447876608.0, + 17447882752.0, + 17447872512.0, + 17447884800.0, + 17447886848.0, + 17447870464.0, + 17447870464.0, + 17447882752.0, + 17447866368.0, + 17447886848.0, + 17447878656.0, + 17447870464.0, + 17447890944.0, + 17447876608.0, + 17447880704.0, + 17447870464.0, + 17447884800.0, + 17447886848.0, + 17447884800.0, + 17447882752.0, + 17447880704.0, + 17447872512.0, + 17447886848.0, + 17447866368.0, + 17447864320.0, + 17447870464.0, + 17447878656.0, + 17447886848.0, + 17447886848.0, + 17447886848.0, + 17447870464.0, + 17447874560.0, + 17447870464.0, + 17448024064.0, + 17447890944.0, + 17447878656.0, + 17447884800.0, + 17447874560.0, + 17447882752.0, + 17447862272.0, + 17447860224.0, + 17447868416.0, + 17447890944.0, + 17447874560.0, + 17447874560.0, + 17447870464.0, + 17447874560.0, + 17447874560.0, + 17447872512.0, + 17447874560.0, + 17447874560.0, + 17447880704.0, + 17447878656.0, + 17447874560.0, + 17447884800.0, + 17447874560.0, + 17447878656.0, + 17447895040.0, + 17447870464.0, + 17447874560.0, + 17447886848.0, + 17447888896.0, + 17447878656.0, + 17447870464.0, + 17447880704.0, + 17447880704.0, + 17447876608.0, + 17447870464.0, + 17447878656.0, + 17447890944.0, + 17447880704.0, + 17447862272.0, + 17447878656.0, + 17447888896.0, + 17447882752.0, + 17447864320.0, + 17447874560.0, + 17447882752.0, + 17447868416.0, + 17447892992.0, + 17447876608.0, + 17447878656.0, + 17447886848.0, + 17447866368.0, + 17447868416.0, + 17447874560.0, + 17447874560.0, + 17447882752.0, + 17447878656.0, + 17447870464.0, + 17447903232.0, + 17447874560.0, + 17447890944.0, + 17447882752.0, + 17447882752.0, + 17447882752.0, + 17447878656.0, + 17447884800.0, + 17447876608.0, + 17447882752.0, + 17447876608.0, + 17447890944.0, + 17447872512.0, + 17447874560.0, + 17447882752.0, + 17447890944.0, + 17447874560.0, + 17447888896.0, + 17447890944.0, + 17447860224.0, + 17447862272.0, + 17447884800.0, + 17447864320.0, + 17447890944.0, + 17447878656.0, + 17447862272.0, + 17448318976.0, + 17447886848.0, + 17447892992.0, + 17447876608.0, + 17447862272.0, + 17447872512.0, + 17447870464.0, + 17447890944.0, + 17447876608.0, + 17447872512.0, + 17447868416.0, + 17447872512.0, + 17447880704.0, + 17447882752.0, + 17447886848.0, + 17447882752.0, + 17447866368.0, + 17447874560.0, + 17447874560.0, + 17447874560.0, + 17447892992.0, + 17448849408.0, + 17447882752.0, + 17447874560.0, + 17447895040.0, + 17447876608.0, + 17447880704.0, + 17447892992.0, + 17447882752.0, + 17447862272.0, + 17447882752.0, + 17447876608.0, + 17447886848.0, + 17447888896.0, + 17447884800.0, + 17447878656.0, + 17447866368.0, + 17447884800.0, + 17447882752.0, + 17447876608.0, + 17447897088.0, + 17447895040.0, + 17447858176.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447886848.0, + 17447884800.0, + 17447890944.0, + 17447884800.0, + 17447870464.0, + 17447862272.0, + 17447876608.0, + 17447886848.0, + 17447884800.0, + 17447880704.0, + 17447870464.0, + 17447874560.0, + 17447890944.0, + 17447878656.0, + 17447882752.0, + 17447880704.0, + 17448357888.0, + 17447876608.0, + 17447874560.0, + 17447878656.0, + 17447874560.0, + 17447878656.0, + 17447884800.0, + 17447876608.0, + 17447874560.0, + 17447882752.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447870464.0, + 17447884800.0, + 17447868416.0, + 17447874560.0, + 17447901184.0, + 17447874560.0, + 17447882752.0, + 17447874560.0, + 17447895040.0, + 17447876608.0, + 17447880704.0, + 17447872512.0, + 17448165376.0, + 17447876608.0, + 17448275968.0, + 17447872512.0, + 17447878656.0, + 17447880704.0, + 17447882752.0, + 17447892992.0, + 17447874560.0, + 17447874560.0, + 17447880704.0, + 17447888896.0, + 17447880704.0, + 17447876608.0, + 17447882752.0, + 17447884800.0, + 17447872512.0, + 17447876608.0, + 17447874560.0, + 17447880704.0, + 17448116224.0, + 17447888896.0, + 17447907328.0, + 17447872512.0, + 17447895040.0, + 17447872512.0, + 17447862272.0, + 17447876608.0, + 17447870464.0, + 17447874560.0, + 17447882752.0, + 17447878656.0, + 17448624128.0, + 17448597504.0, + 17447878656.0, + 17447884800.0, + 17447886848.0, + 17447874560.0, + 17447862272.0, + 17447876608.0, + 17447878656.0, + 17447872512.0, + 17447876608.0, + 17447884800.0, + 17447886848.0, + 17447880704.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447878656.0, + 17447890944.0, + 17447878656.0, + 17447882752.0, + 17447884800.0, + 17447862272.0, + 17447884800.0, + 17447878656.0, + 17447872512.0, + 17447888896.0, + 17447878656.0, + 17447886848.0, + 17447878656.0, + 17447874560.0, + 17447870464.0, + 17447907328.0, + 17447884800.0, + 17447890944.0, + 17447862272.0, + 17447864320.0, + 17447882752.0, + 17447868416.0, + 17447882752.0, + 17447878656.0, + 17447874560.0, + 17447876608.0, + 17447876608.0, + 17447866368.0, + 17447882752.0, + 17447858176.0, + 17447874560.0, + 17447874560.0, + 17447864320.0, + 17447880704.0, + 17447886848.0, + 17447892992.0, + 17447874560.0, + 17447866368.0, + 17447880704.0, + 17447868416.0, + 17447888896.0, + 17447886848.0, + 17447878656.0, + 17447892992.0, + 17447888896.0, + 17447890944.0, + 17447886848.0, + 17447886848.0, + 17447890944.0, + 17447892992.0, + 17447874560.0, + 17447880704.0, + 17447878656.0, + 17447874560.0, + 17447874560.0, + 17447882752.0, + 17447872512.0, + 17447876608.0, + 17448359936.0, + 17447886848.0, + 17447870464.0, + 17447870464.0, + 17447878656.0, + 17447876608.0, + 17447880704.0, + 17447868416.0, + 17447880704.0, + 17447870464.0, + 17447882752.0, + 17447890944.0, + 17447872512.0, + 17447882752.0, + 17447876608.0, + 17447872512.0, + 17447882752.0, + 17447882752.0, + 17447886848.0, + 17447886848.0, + 17447874560.0, + 17447866368.0, + 17447880704.0, + 17447878656.0, + 17447876608.0, + 17448390656.0, + 17448382464.0, + 17448382464.0, + 17448380416.0, + 17448769536.0, + 17448390656.0, + 17448386560.0, + 17448394752.0, + 17448384512.0, + 17448388608.0, + 17449306112.0, + 17448386560.0, + 17448396800.0, + 17448402944.0, + 17448390656.0, + 17448392704.0, + 17448392704.0, + 17448398848.0, + 17448372224.0, + 17448384512.0, + 17448378368.0, + 17448390656.0, + 17448390656.0, + 17448396800.0, + 17448378368.0, + 17448384512.0, + 17448388608.0, + 17448390656.0, + 17448384512.0, + 17448378368.0, + 17448372224.0, + 17448402944.0, + 17448374272.0, + 17448388608.0, + 17448384512.0, + 17448400896.0, + 17448390656.0, + 17448384512.0, + 17448388608.0, + 17448386560.0, + 17448398848.0, + 17448372224.0, + 17448374272.0, + 17448400896.0, + 17448380416.0, + 17448398848.0, + 17448386560.0, + 17448378368.0, + 17449261056.0, + 17448382464.0, + 17448392704.0, + 17448392704.0, + 17448390656.0, + 17448380416.0, + 17448382464.0, + 17448394752.0, + 17448384512.0, + 17448378368.0, + 17448390656.0, + 17448380416.0, + 17448382464.0, + 17448388608.0, + 17448382464.0, + 17448382464.0, + 17448382464.0, + 17448394752.0, + 17448382464.0, + 17448378368.0, + 17448390656.0, + 17448388608.0, + 17448394752.0, + 17448394752.0, + 17448386560.0, + 17448382464.0, + 17448374272.0, + 17448376320.0, + 17448382464.0, + 17448384512.0, + 17448392704.0, + 17448964096.0, + 17448386560.0, + 17448374272.0, + 17448382464.0, + 17448394752.0, + 17448364032.0, + 17448394752.0, + 17448392704.0, + 17448392704.0, + 17448390656.0, + 17448390656.0, + 17448378368.0, + 17448382464.0, + 17448390656.0, + 17448382464.0, + 17448390656.0, + 17448386560.0, + 17448382464.0, + 17448394752.0, + 17448390656.0, + 17448390656.0, + 17448388608.0, + 17448398848.0, + 17448384512.0, + 17448386560.0, + 17448394752.0, + 17448386560.0, + 17448402944.0, + 17448386560.0, + 17448388608.0, + 17448396800.0, + 17448388608.0, + 17448390656.0, + 17448382464.0, + 17448386560.0, + 17447870464.0, + 17447878656.0, + 17447888896.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447888896.0, + 17447884800.0, + 17447870464.0, + 17447874560.0, + 17447872512.0, + 17447874560.0, + 17447878656.0, + 17447872512.0, + 17447880704.0, + 17447876608.0, + 17447874560.0, + 17447876608.0, + 17447868416.0, + 17447882752.0, + 17447882752.0, + 17447868416.0, + 17447886848.0, + 17447872512.0, + 17447886848.0, + 17447882752.0, + 17447880704.0, + 17447890944.0, + 17447876608.0, + 17447878656.0, + 17448468480.0, + 17447880704.0, + 17447886848.0, + 17447878656.0, + 17447874560.0, + 17447868416.0, + 17447870464.0, + 17447874560.0, + 17447874560.0, + 17447884800.0, + 17447880704.0, + 17447882752.0, + 17447864320.0, + 17447862272.0, + 17447878656.0, + 17447870464.0, + 17447862272.0, + 17447888896.0, + 17447880704.0, + 17447874560.0, + 17447901184.0, + 17447870464.0, + 17447882752.0, + 17447882752.0, + 17447886848.0, + 17447880704.0, + 17447874560.0, + 17447868416.0, + 17447878656.0, + 17447872512.0, + 17447884800.0, + 17447886848.0, + 17447864320.0, + 17447901184.0, + 17447880704.0, + 17447862272.0, + 17447876608.0, + 17447880704.0, + 17447876608.0, + 17447886848.0, + 17447868416.0, + 17447876608.0, + 17447880704.0, + 17447880704.0, + 17447878656.0, + 17447880704.0, + 17447890944.0, + 17447882752.0, + 17447870464.0, + 17447870464.0, + 17447888896.0, + 17447870464.0, + 17447876608.0, + 17447878656.0, + 17447864320.0, + 17447884800.0, + 17447870464.0, + 17447888896.0, + 17447882752.0, + 17447890944.0, + 17447882752.0, + 17447895040.0, + 17447874560.0, + 17447884800.0, + 17447888896.0, + 17447882752.0, + 17447872512.0, + 17447882752.0, + 17447870464.0, + 17447886848.0, + 17447870464.0, + 17447874560.0, + 17447866368.0, + 17447878656.0, + 17447876608.0, + 17447870464.0, + 17447876608.0, + 17447866368.0, + 17447878656.0, + 17447888896.0, + 17447874560.0, + 17447884800.0, + 17447874560.0, + 17447890944.0, + 17447878656.0, + 17447882752.0, + 17447866368.0, + 17447880704.0, + 17447884800.0, + 17447882752.0, + 17447872512.0, + 17447876608.0, + 17447886848.0, + 17447882752.0, + 17447878656.0, + 17447874560.0, + 17447890944.0, + 17447882752.0, + 17447886848.0, + 17447874560.0, + 17447876608.0, + 17447874560.0, + 17447884800.0, + 17447878656.0, + 17447864320.0, + 17447884800.0, + 17447874560.0, + 17447872512.0, + 17447880704.0, + 17447878656.0, + 17448693760.0, + 17447878656.0, + 17447890944.0, + 17447868416.0, + 17447878656.0, + 17447882752.0, + 17447892992.0, + 17447884800.0, + 17447888896.0, + 17447880704.0, + 17447880704.0, + 17447878656.0, + 17447868416.0, + 17447876608.0, + 17447890944.0, + 17447886848.0, + 17447876608.0, + 17447872512.0, + 17447888896.0, + 17447890944.0, + 17447866368.0, + 17447880704.0, + 17447864320.0, + 17447890944.0, + 17447886848.0, + 17447870464.0, + 17447878656.0, + 17447903232.0, + 17447876608.0, + 17447892992.0, + 17447866368.0, + 17447884800.0, + 17447852032.0, + 17447880704.0, + 17447882752.0, + 17447874560.0, + 17447866368.0, + 17447899136.0, + 17447872512.0, + 17447878656.0, + 17447880704.0, + 17447874560.0, + 17447856128.0, + 17447886848.0, + 17447895040.0, + 17447866368.0, + 17447874560.0, + 17447874560.0, + 17447878656.0, + 17447862272.0, + 17447870464.0, + 17448798208.0, + 17447878656.0, + 17447870464.0, + 17447870464.0, + 17447864320.0, + 17447886848.0, + 17447874560.0, + 17447878656.0, + 17447888896.0, + 17447899136.0, + 17447886848.0, + 17447882752.0, + 17447878656.0, + 17447864320.0, + 17447888896.0, + 17447882752.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447874560.0, + 17447868416.0, + 17447876608.0, + 17447888896.0, + 17447874560.0, + 17447884800.0, + 17447882752.0, + 17447874560.0, + 17447882752.0, + 17447872512.0, + 17447870464.0, + 17447874560.0, + 17447882752.0, + 17447886848.0, + 17447876608.0, + 17447878656.0, + 17447870464.0, + 17448114176.0, + 17447884800.0, + 17447878656.0, + 17447884800.0, + 17447874560.0, + 17447878656.0, + 17448140800.0, + 17447878656.0, + 17447870464.0, + 17447892992.0, + 17447870464.0, + 17447892992.0, + 17447890944.0, + 17447870464.0, + 17447890944.0, + 17447888896.0, + 17447878656.0, + 17447874560.0, + 17447880704.0, + 17447895040.0, + 17447872512.0, + 17447878656.0, + 17447874560.0, + 17447886848.0, + 17448515584.0, + 17448247296.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447872512.0, + 17447878656.0, + 17447878656.0, + 17447876608.0, + 17447884800.0, + 17447878656.0, + 17447866368.0, + 17447878656.0, + 17447864320.0, + 17447884800.0, + 17447878656.0, + 17447880704.0, + 17447878656.0, + 17447892992.0, + 17447870464.0, + 17447876608.0, + 17447878656.0, + 17447880704.0, + 17447880704.0, + 17447884800.0, + 17447876608.0, + 17447895040.0, + 17447870464.0, + 17447874560.0, + 17447872512.0, + 17447868416.0, + 17447890944.0, + 17447882752.0, + 17447892992.0, + 17447899136.0, + 17447866368.0, + 17447878656.0, + 17447868416.0, + 17447866368.0, + 17447890944.0, + 17447878656.0, + 17447866368.0, + 17447878656.0, + 17447876608.0, + 17447876608.0, + 17447874560.0, + 17447895040.0, + 17447866368.0, + 17447890944.0, + 17447882752.0, + 17447882752.0, + 17447868416.0, + 17447870464.0, + 17447880704.0, + 17447884800.0, + 17447876608.0, + 17447886848.0, + 17447870464.0, + 17447905280.0, + 17447884800.0, + 17447880704.0, + 17447878656.0, + 17447882752.0, + 17447870464.0, + 17447874560.0, + 17447870464.0, + 17447878656.0, + 17447878656.0, + 17447874560.0, + 17447862272.0, + 17447886848.0, + 17447884800.0, + 17447874560.0, + 17447884800.0, + 17447890944.0, + 17447872512.0, + 17447876608.0, + 17447878656.0, + 17447882752.0, + 17447878656.0, + 17447876608.0, + 17447895040.0, + 17447884800.0, + 17447882752.0, + 17447870464.0, + 17447872512.0, + 17447874560.0, + 17447878656.0, + 17447862272.0, + 17447892992.0, + 17447882752.0, + 17447872512.0, + 17447890944.0, + 17447870464.0, + 17447878656.0, + 17447874560.0, + 17447882752.0, + 17447878656.0, + 17447880704.0, + 17448763392.0, + 17447878656.0, + 17447878656.0, + 17447890944.0, + 17447862272.0, + 17447876608.0, + 17447884800.0, + 17447888896.0, + 17447895040.0, + 17447870464.0, + 17447878656.0, + 17447868416.0, + 17447872512.0, + 17447866368.0, + 17447880704.0, + 17447870464.0, + 17447864320.0, + 17447890944.0, + 17447872512.0, + 17447870464.0, + 17447884800.0, + 17447882752.0, + 17447890944.0, + 17447976960.0, + 17447874560.0, + 17447874560.0, + 17447870464.0, + 17447878656.0, + 17447866368.0, + 17447890944.0, + 17447870464.0, + 17447888896.0, + 17447890944.0, + 17447878656.0, + 17447882752.0, + 17447886848.0, + 17447886848.0, + 17447878656.0, + 17447880704.0, + 17447874560.0, + 17447886848.0, + 17447882752.0, + 17447878656.0, + 17447886848.0, + 17447868416.0, + 17447882752.0, + 17447874560.0, + 17447874560.0, + 17447890944.0, + 17447878656.0, + 17447866368.0, + 17447888896.0, + 17447878656.0, + 17447874560.0, + 17447892992.0, + 17447874560.0, + 17447886848.0, + 17447870464.0, + 17447880704.0, + 17447876608.0, + 17447886848.0, + 17447872512.0, + 17447884800.0, + 17447884800.0, + 17447888896.0, + 17447878656.0, + 17447862272.0, + 17447874560.0, + 17447882752.0, + 17447874560.0, + 17447882752.0, + 17447866368.0, + 17447880704.0, + 17447890944.0, + 17447876608.0, + 17447882752.0, + 17447868416.0, + 17447878656.0, + 17448085504.0, + 17447882752.0, + 17447882752.0, + 17447882752.0, + 17447880704.0, + 17447866368.0, + 17447886848.0, + 17447866368.0, + 17447858176.0, + 17447876608.0, + 17447878656.0, + 17447882752.0, + 17447876608.0, + 17447878656.0, + 17447888896.0, + 17447872512.0, + 17447866368.0, + 17447886848.0, + 17447876608.0, + 17447886848.0, + 17447870464.0, + 17447866368.0, + 17447882752.0, + 17447870464.0, + 17447892992.0, + 17447872512.0, + 17447882752.0, + 17447878656.0, + 17447862272.0, + 17447880704.0, + 17447886848.0, + 17447882752.0, + 17447872512.0, + 17447878656.0, + 17447872512.0, + 17447884800.0, + 17447884800.0, + 17447874560.0, + 17447872512.0, + 17447890944.0, + 17447886848.0, + 17447876608.0, + 17447878656.0, + 17447895040.0, + 17447880704.0, + 17447872512.0, + 17447884800.0, + 17447876608.0, + 17447884800.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447874560.0, + 17447882752.0, + 17447882752.0, + 17448259584.0, + 17447880704.0, + 17447876608.0, + 17447864320.0, + 17447882752.0, + 17447874560.0, + 17447878656.0, + 17447882752.0, + 17447870464.0, + 17447878656.0, + 17447882752.0, + 17447880704.0, + 17447878656.0, + 17447899136.0, + 17447884800.0, + 17447872512.0, + 17448570880.0, + 17447866368.0, + 17447888896.0, + 17447878656.0, + 17447866368.0, + 17447882752.0, + 17447895040.0, + 17447878656.0, + 17447878656.0, + 17447888896.0, + 17447884800.0, + 17447880704.0, + 17447874560.0, + 17447901184.0, + 17447878656.0, + 17447874560.0, + 17447878656.0, + 17447872512.0, + 17447880704.0, + 17447880704.0, + 17447872512.0, + 17447878656.0, + 17447868416.0, + 17447886848.0, + 17447870464.0, + 17447872512.0, + 17447890944.0, + 17447870464.0, + 17447882752.0, + 17447882752.0, + 17447862272.0, + 17447878656.0, + 17447886848.0, + 17447882752.0, + 17447874560.0, + 17447878656.0, + 17447874560.0, + 17447882752.0, + 17447882752.0, + 17447874560.0, + 17448110080.0, + 17447890944.0, + 17447886848.0, + 17447874560.0, + 17447878656.0, + 17447892992.0, + 17447878656.0, + 17447872512.0, + 17447886848.0, + 17447874560.0, + 17447886848.0, + 17447884800.0, + 17447878656.0, + 17447882752.0, + 17447876608.0, + 17447880704.0, + 17447876608.0, + 17447880704.0, + 17447882752.0, + 17447874560.0, + 17447862272.0, + 17447882752.0, + 17447876608.0, + 17447878656.0, + 17447876608.0, + 17447876608.0, + 17447876608.0, + 17447876608.0, + 17448497152.0, + 17447876608.0, + 17447899136.0, + 17447884800.0, + 17447870464.0, + 17447876608.0, + 17447862272.0, + 17447890944.0, + 17447874560.0, + 17447870464.0, + 17447882752.0, + 17447895040.0, + 17447876608.0, + 17447882752.0, + 17447888896.0, + 17447884800.0, + 17447880704.0, + 17447878656.0, + 17447897088.0, + 17447878656.0, + 17447872512.0, + 17447868416.0, + 17447872512.0, + 17447876608.0, + 17447878656.0, + 17447874560.0, + 17447870464.0, + 17447872512.0, + 17447890944.0, + 17447874560.0, + 17447864320.0, + 17447878656.0, + 17447870464.0, + 17448939520.0, + 17447858176.0, + 17447874560.0, + 17447882752.0, + 17447878656.0, + 17447866368.0, + 17447882752.0, + 17447864320.0, + 17447882752.0, + 17447862272.0, + 17447874560.0, + 17447882752.0, + 17447886848.0, + 17447872512.0, + 17447880704.0, + 17447862272.0, + 17447880704.0, + 17447868416.0, + 17447862272.0, + 17447874560.0, + 17448544256.0, + 17447895040.0, + 17447886848.0, + 17447895040.0, + 17447880704.0, + 17447874560.0, + 17447890944.0, + 17447882752.0, + 17447870464.0, + 17447870464.0, + 17447890944.0, + 17447882752.0, + 17447870464.0, + 17447880704.0, + 17447882752.0, + 17447895040.0, + 17447878656.0, + 17447886848.0, + 17447872512.0, + 17447886848.0, + 17447872512.0, + 17447878656.0, + 17447882752.0, + 17447876608.0, + 17447878656.0, + 17447878656.0, + 17447897088.0, + 17447872512.0, + 17447886848.0, + 17447870464.0, + 17447886848.0, + 17447866368.0, + 17447886848.0, + 17447874560.0, + 17447888896.0, + 17447870464.0, + 17447874560.0, + 17447878656.0, + 17447882752.0, + 17447868416.0, + 17447880704.0, + 17447872512.0, + 17447880704.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447874560.0, + 17447880704.0, + 17447880704.0, + 17447876608.0, + 17447888896.0, + 17447878656.0, + 17447868416.0, + 17447878656.0, + 17447874560.0, + 17447870464.0, + 17447866368.0, + 17447890944.0, + 17447872512.0, + 17447874560.0, + 17447880704.0, + 17447888896.0, + 17447874560.0, + 17447878656.0, + 17447872512.0, + 17447872512.0, + 17447878656.0, + 17447878656.0, + 17447876608.0, + 17447878656.0, + 17447884800.0, + 17447878656.0, + 17447880704.0, + 17447866368.0, + 17447874560.0, + 17447882752.0, + 17447874560.0, + 17447874560.0, + 17447870464.0, + 17447866368.0, + 17447886848.0, + 17447888896.0, + 17447882752.0, + 17447874560.0, + 17447882752.0, + 17447884800.0, + 17447882752.0, + 17447897088.0, + 17447878656.0, + 17447895040.0, + 17447886848.0, + 17447882752.0, + 17447870464.0, + 17447882752.0, + 17447868416.0, + 17447884800.0, + 17447882752.0, + 17447882752.0, + 17447864320.0, + 17447868416.0, + 17447880704.0, + 17447890944.0, + 17447876608.0, + 17447886848.0, + 17447886848.0, + 17447868416.0, + 17447874560.0, + 17447884800.0, + 17447866368.0, + 17447866368.0, + 17447872512.0, + 17447872512.0, + 17447868416.0, + 17447878656.0, + 17447874560.0, + 17447888896.0, + 17447880704.0, + 17447872512.0, + 17447886848.0, + 17447872512.0, + 17447890944.0, + 17447874560.0, + 17447888896.0, + 17447866368.0, + 17447880704.0, + 17447882752.0, + 17447878656.0, + 17447876608.0, + 17447878656.0, + 17447884800.0, + 17447876608.0, + 17447888896.0, + 17447870464.0, + 17447892992.0, + 17447870464.0, + 17447868416.0, + 17447886848.0, + 17447882752.0, + 17447884800.0, + 17447880704.0, + 17447882752.0, + 17447874560.0, + 17447886848.0, + 17447878656.0, + 17447862272.0, + 17447876608.0, + 17447878656.0, + 17447872512.0, + 17447882752.0, + 17447895040.0, + 17447886848.0, + 17447874560.0, + 17447860224.0, + 17447880704.0, + 17447882752.0, + 17447874560.0, + 17447874560.0, + 17447878656.0, + 17447876608.0, + 17447880704.0, + 17447878656.0, + 17447882752.0, + 17447874560.0, + 17447888896.0, + 17447886848.0, + 17447872512.0, + 17447882752.0, + 17447880704.0, + 17447880704.0, + 17447870464.0, + 17447866368.0, + 17447882752.0, + 17447874560.0, + 17447878656.0, + 17447884800.0, + 17447882752.0, + 17447874560.0, + 17447878656.0, + 17447878656.0, + 17447866368.0, + 17447880704.0, + 17447876608.0, + 17447874560.0, + 17447870464.0, + 17447880704.0, + 17447870464.0, + 17447884800.0, + 17447897088.0, + 17447878656.0, + 17447888896.0, + 17447870464.0, + 17447876608.0, + 17447874560.0, + 17447878656.0, + 17447886848.0, + 17447872512.0, + 17447868416.0, + 17447878656.0, + 17447884800.0, + 17447886848.0, + 17447872512.0, + 17447874560.0, + 17447874560.0, + 17447886848.0, + 17447872512.0, + 17447878656.0, + 17447876608.0, + 17447886848.0, + 17447870464.0, + 17447872512.0, + 17447872512.0, + 17447864320.0, + 17447880704.0, + 17447890944.0, + 17447884800.0, + 17447878656.0, + 17447907328.0, + 17447870464.0, + 17447870464.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447899136.0, + 17447882752.0, + 17448333312.0, + 17447874560.0, + 17447892992.0, + 17447874560.0, + 17447882752.0, + 17447878656.0, + 17447870464.0, + 17447874560.0, + 17447870464.0, + 17447874560.0, + 17447888896.0, + 17447878656.0, + 17447878656.0, + 17447886848.0, + 17447878656.0, + 17447882752.0, + 17447876608.0, + 17447936000.0, + 17447878656.0, + 17447884800.0, + 17447876608.0, + 17447880704.0, + 17447888896.0, + 17447866368.0, + 17447872512.0, + 17447874560.0, + 17447872512.0, + 17447882752.0, + 17447876608.0, + 17447862272.0, + 17448724480.0, + 17447878656.0, + 17447876608.0, + 17447876608.0, + 17447872512.0, + 17447880704.0, + 17447884800.0, + 17447882752.0, + 17447878656.0, + 17447880704.0, + 17447878656.0, + 17447864320.0, + 17447878656.0, + 17447880704.0, + 17447882752.0, + 17447878656.0, + 17447878656.0, + 17447870464.0, + 17447866368.0, + 17447878656.0, + 17447878656.0, + 17447876608.0, + 17447882752.0, + 17447880704.0, + 17447886848.0, + 17447895040.0, + 17447890944.0, + 17447862272.0, + 17447878656.0, + 17447878656.0, + 17447866368.0, + 17447876608.0, + 17447888896.0, + 17447884800.0, + 17447872512.0, + 17447882752.0, + 17447870464.0, + 17447892992.0, + 17447866368.0, + 17447878656.0, + 17447880704.0, + 17447870464.0, + 17447866368.0, + 17447876608.0, + 17447880704.0, + 17447892992.0, + 17447882752.0, + 17447884800.0, + 17447882752.0, + 17447874560.0, + 17447890944.0, + 17447895040.0, + 17447890944.0, + 17447886848.0, + 17447872512.0, + 17447882752.0, + 17447884800.0, + 17447882752.0, + 17447874560.0, + 17447882752.0, + 17447872512.0, + 17447888896.0, + 17447868416.0, + 17447878656.0, + 17447870464.0, + 17447880704.0, + 17447874560.0, + 17448169472.0, + 17447878656.0, + 17447880704.0, + 17447878656.0, + 17447882752.0, + 17447882752.0, + 17447874560.0, + 17447876608.0, + 17447880704.0, + 17447868416.0, + 17447878656.0, + 17447878656.0, + 17447878656.0, + 17447868416.0, + 17447880704.0, + 17447882752.0, + 17447878656.0, + 17447876608.0, + 17447878656.0, + 17447874560.0, + 17447884800.0, + 17447880704.0, + 17447882752.0, + 17447872512.0, + 17447880704.0, + 17447878656.0, + 17447870464.0, + 17447872512.0, + 17447886848.0, + 17448013824.0, + 17447872512.0, + 17447884800.0, + 17447880704.0, + 17447862272.0, + 17447886848.0, + 17447874560.0, + 17447890944.0, + 17447866368.0, + 17447884800.0, + 17447878656.0, + 17447864320.0, + 17447876608.0, + 17447870464.0, + 17447872512.0, + 17447882752.0, + 17447876608.0, + 17447882752.0, + 17447878656.0, + 17447880704.0, + 17447872512.0, + 17447874560.0, + 17447872512.0, + 17447876608.0, + 17447895040.0, + 17447874560.0, + 17447874560.0, + 17447870464.0, + 17447876608.0, + 17447872512.0, + 17447868416.0, + 17447878656.0, + 17447862272.0, + 17447878656.0, + 17447876608.0, + 17447880704.0, + 17447870464.0, + 17447876608.0, + 17447890944.0, + 17447874560.0, + 17447886848.0, + 17447882752.0, + 17447888896.0, + 17447880704.0, + 17448466432.0, + 17447882752.0, + 17447876608.0, + 17447868416.0, + 17447872512.0, + 17447890944.0, + 17447897088.0, + 17447876608.0, + 17447874560.0, + 17447890944.0, + 17447878656.0, + 17447870464.0, + 17447882752.0, + 17447872512.0, + 17447886848.0, + 17447888896.0, + 17447882752.0, + 17447872512.0, + 17447866368.0, + 17447878656.0 ] }, "iteration-time": { "start_step": 0, - "end_step": 502, + "end_step": 25809, "step_interval": 5, "values": [ 105.86866, @@ -320,7 +10442,5068 @@ 22.69756, 22.35847, 22.84454, - 22.16427 + 22.16427, + 22.42444, + 22.43595, + 22.46487, + 22.40865, + 22.44312, + 22.45533, + 22.71659, + 22.41388, + 22.36669, + 22.49695, + 22.49306, + 22.65398, + 22.64062, + 22.85151, + 22.6082, + 22.72738, + 22.56372, + 22.70258, + 22.43925, + 101.14027, + 22.5031, + 22.76764, + 22.67679, + 22.41643, + 22.6319, + 22.398, + 22.86879, + 22.67579, + 22.62794, + 22.53665, + 22.67882, + 22.5028, + 22.52929, + 23.00784, + 22.50065, + 22.44123, + 22.76723, + 22.51196, + 22.7051, + 22.76956, + 22.52012, + 22.43069, + 22.40474, + 22.38156, + 22.47368, + 22.32673, + 22.40841, + 22.2759, + 22.51299, + 22.3214, + 22.86805, + 22.57032, + 22.37732, + 22.69439, + 22.65036, + 34.68773, + 25.7873, + 23.00085, + 22.46626, + 22.42371, + 23.02043, + 22.3282, + 22.45572, + 23.16323, + 22.28081, + 22.40856, + 23.19218, + 22.47156, + 23.06928, + 23.54648, + 22.44444, + 22.51854, + 23.50013, + 25.00345, + 32.67469, + 23.51427, + 22.31341, + 22.34525, + 22.84754, + 22.49431, + 22.44482, + 23.15204, + 22.29314, + 22.3289, + 22.44074, + 22.36134, + 23.06536, + 22.62574, + 22.56191, + 22.75284, + 22.55342, + 22.49709, + 22.30702, + 23.17389, + 22.35194, + 22.47066, + 22.50252, + 22.38508, + 22.32332, + 22.29499, + 22.64989, + 25.34019, + 26.20888, + 34.42688, + 22.71979, + 22.34598, + 22.32874, + 22.40121, + 22.29541, + 22.49414, + 22.34285, + 22.72862, + 22.65599, + 22.53123, + 22.3385, + 22.85989, + 22.42258, + 22.65887, + 23.03068, + 22.46347, + 22.4894, + 22.7975, + 22.94465, + 22.49659, + 23.17386, + 22.3175, + 22.39908, + 23.28626, + 22.32511, + 109.73788, + 22.4802, + 22.72729, + 22.61836, + 22.47513, + 22.44307, + 22.47037, + 22.40571, + 22.39138, + 22.51142, + 22.45977, + 22.42165, + 22.36773, + 22.32747, + 22.62535, + 22.35597, + 22.31357, + 22.87909, + 22.61735, + 22.3368, + 22.48093, + 22.49195, + 22.29134, + 22.46662, + 22.28344, + 22.48509, + 22.3982, + 22.31272, + 22.54745, + 22.79593, + 22.66751, + 22.7888, + 22.44623, + 22.90924, + 22.94298, + 22.70551, + 22.59248, + 22.44114, + 23.25265, + 22.6757, + 22.81174, + 22.79008, + 22.40932, + 22.52846, + 22.74684, + 22.64011, + 22.24557, + 22.44391, + 22.22307, + 22.20709, + 22.96877, + 22.22865, + 22.5563, + 22.75453, + 22.27962, + 22.35249, + 22.90046, + 22.31525, + 22.21288, + 22.95827, + 22.21294, + 22.43736, + 22.93256, + 22.69221, + 22.29764, + 22.3734, + 22.82716, + 22.44497, + 22.37052, + 22.33652, + 22.42637, + 22.30613, + 22.42651, + 22.4247, + 22.33259, + 22.30497, + 22.42634, + 22.2886, + 22.26643, + 22.23274, + 22.21864, + 22.64359, + 22.24904, + 22.36227, + 22.47831, + 22.39154, + 22.28922, + 22.68583, + 22.69337, + 22.33331, + 22.66439, + 22.29401, + 22.32352, + 22.75153, + 22.30951, + 22.38224, + 22.95873, + 22.35417, + 22.30513, + 23.46101, + 22.2886, + 22.24117, + 23.07443, + 22.4121, + 22.32479, + 22.83049, + 22.32771, + 22.36772, + 22.60619, + 22.26879, + 22.70377, + 22.97411, + 22.54233, + 22.6727, + 22.69834, + 23.01474, + 23.07424, + 23.89499, + 23.76587, + 23.45024, + 23.09168, + 22.51823, + 22.40998, + 22.32227, + 22.41722, + 22.23259, + 22.38729, + 22.3257, + 22.41275, + 22.21203, + 22.39303, + 22.17919, + 22.20379, + 22.2459, + 22.23867, + 22.36098, + 22.4702, + 22.32046, + 22.27016, + 22.31057, + 22.24971, + 22.25786, + 22.61771, + 22.22671, + 22.34153, + 22.38118, + 22.26394, + 22.24669, + 22.44497, + 22.23526, + 23.25095, + 22.23099, + 25.221, + 23.47947, + 22.21563, + 22.54813, + 23.25688, + 22.40309, + 22.19077, + 23.20723, + 22.24504, + 22.50768, + 22.69789, + 22.26528, + 22.24601, + 22.57661, + 22.22315, + 22.30669, + 22.64958, + 22.19949, + 22.19627, + 22.16858, + 22.27374, + 22.22293, + 22.6518, + 22.50734, + 22.323, + 22.29104, + 22.24173, + 22.55919, + 22.26411, + 22.46166, + 22.28385, + 22.47416, + 22.31791, + 22.2841, + 22.59189, + 22.30555, + 22.46978, + 22.16957, + 22.3074, + 22.19136, + 22.15528, + 22.18854, + 22.2021, + 22.14673, + 22.60293, + 22.6481, + 22.46713, + 23.36876, + 22.39404, + 22.22066, + 23.27526, + 22.17223, + 22.62513, + 23.22205, + 22.2436, + 22.2744, + 22.87858, + 22.22367, + 22.19553, + 22.74681, + 22.33299, + 22.39791, + 22.68906, + 22.62132, + 22.23763, + 22.31749, + 22.23967, + 22.26274, + 22.16136, + 22.4197, + 22.49426, + 22.14672, + 22.28955, + 22.19643, + 22.3853, + 22.41279, + 22.23421, + 22.30954, + 22.26539, + 22.31166, + 22.32302, + 22.26051, + 22.51379, + 22.29998, + 22.31581, + 22.28776, + 22.21906, + 22.34208, + 22.24649, + 22.37438, + 22.30338, + 22.44025, + 22.29842, + 22.4917, + 22.25071, + 22.22369, + 22.37264, + 22.26021, + 22.22922, + 22.9261, + 22.55762, + 22.29391, + 23.25415, + 22.6554, + 22.46727, + 23.43125, + 22.33364, + 22.32415, + 23.30188, + 22.3106, + 22.30622, + 23.30781, + 22.29728, + 22.29022, + 22.5379, + 22.30253, + 22.36467, + 22.38128, + 22.44048, + 22.31472, + 22.48322, + 22.266, + 22.33748, + 22.36523, + 22.4067, + 22.24718, + 22.27639, + 22.26624, + 22.23374, + 22.46478, + 22.27094, + 22.24064, + 22.20455, + 22.28345, + 22.27359, + 22.22132, + 22.34988, + 22.26994, + 22.50601, + 22.34611, + 22.30626, + 22.33995, + 22.2312, + 22.27587, + 22.23085, + 22.54672, + 22.25329, + 22.43076, + 22.96232, + 22.36468, + 22.37718, + 23.43173, + 22.27805, + 23.78584, + 24.4831, + 22.90033, + 22.81812, + 23.65196, + 56.45613, + 22.51331, + 23.30863, + 22.29567, + 22.25118, + 22.94326, + 22.21761, + 22.17075, + 22.74069, + 22.27514, + 22.15032, + 22.50908, + 22.19934, + 22.55052, + 22.82322, + 22.28077, + 22.36117, + 22.44909, + 22.4424, + 22.22169, + 22.22557, + 22.22998, + 22.16221, + 22.38628, + 22.30353, + 22.23189, + 22.24877, + 22.3081, + 22.20495, + 22.2328, + 22.3289, + 22.26328, + 22.16943, + 22.22003, + 22.18421, + 22.13651, + 22.19386, + 22.33811, + 75.57841, + 22.83766, + 22.49433, + 22.90823, + 22.10073, + 22.17331, + 22.91005, + 22.0739, + 38.58989, + 23.2531, + 22.19735, + 22.1543, + 23.24873, + 22.21465, + 22.16186, + 23.30331, + 22.10781, + 22.24317, + 22.22847, + 22.15637, + 22.49435, + 22.30383, + 22.74896, + 22.72693, + 22.34111, + 22.2892, + 22.26019, + 22.18476, + 22.17116, + 22.27654, + 22.09598, + 22.25638, + 22.55965, + 22.13537, + 22.12425, + 22.12707, + 22.25503, + 22.3358, + 22.29519, + 22.13488, + 22.26938, + 22.19761, + 22.4934, + 22.24306, + 22.11744, + 22.28918, + 22.45942, + 22.64582, + 22.23536, + 22.71051, + 22.12984, + 22.15548, + 22.87831, + 22.04995, + 22.14385, + 23.33722, + 22.32115, + 22.13066, + 23.09654, + 22.25108, + 22.21047, + 23.01985, + 22.24864, + 22.14587, + 22.42055, + 22.24742, + 22.20138, + 22.66302, + 22.25027, + 22.321, + 22.18202, + 22.13944, + 22.08795, + 22.13778, + 22.72377, + 22.09366, + 22.25969, + 22.13122, + 22.12656, + 22.50283, + 22.11498, + 22.22658, + 22.11015, + 22.10616, + 22.53533, + 22.44845, + 22.11857, + 22.13022, + 22.2749, + 22.37151, + 22.15915, + 22.15242, + 22.27226, + 22.09876, + 22.40813, + 22.34806, + 22.06896, + 22.11633, + 22.45255, + 22.56616, + 22.19688, + 22.91029, + 22.23645, + 22.17638, + 22.39302, + 22.16422, + 22.13814, + 22.22944, + 22.15951, + 22.36833, + 22.11834, + 22.19846, + 22.15721, + 22.14138, + 22.24758, + 22.18874, + 22.29269, + 22.15148, + 22.5053, + 22.13033, + 22.1671, + 22.16595, + 22.51783, + 22.22311, + 22.13156, + 22.58138, + 22.57103, + 22.22161, + 23.10209, + 22.36046, + 22.2058, + 23.24473, + 22.1824, + 22.18779, + 23.21699, + 22.30294, + 22.32474, + 23.0402, + 22.13272, + 22.10887, + 22.34825, + 22.17337, + 22.08873, + 22.1289, + 22.69025, + 22.13729, + 22.16747, + 22.11914, + 22.22668, + 22.29111, + 22.32997, + 22.97981, + 22.32437, + 22.34959, + 22.32594, + 22.42304, + 22.26817, + 22.16518, + 22.24685, + 22.25327, + 22.2315, + 22.15087, + 22.75643, + 22.09856, + 22.23405, + 22.18762, + 22.08163, + 22.14593, + 22.31931, + 22.0885, + 22.1177, + 22.85615, + 22.06519, + 22.02122, + 23.03752, + 22.14087, + 22.17897, + 25.75191, + 22.93589, + 22.30614, + 23.35775, + 22.1795, + 22.19582, + 22.8428, + 22.08013, + 22.13661, + 22.37544, + 22.09806, + 22.17831, + 22.20607, + 22.09212, + 22.23389, + 22.07772, + 22.18924, + 22.0577, + 22.19938, + 22.09173, + 22.31145, + 22.36939, + 22.04991, + 22.18527, + 22.10738, + 22.18981, + 22.11068, + 22.07264, + 22.25061, + 22.12102, + 22.13982, + 22.15264, + 22.44484, + 22.07088, + 22.20173, + 22.14096, + 22.10879, + 22.71354, + 22.10233, + 96.94515, + 22.27471, + 22.32662, + 22.37228, + 22.32926, + 22.41883, + 22.3726, + 22.45572, + 22.3245, + 22.48049, + 22.32897, + 22.28501, + 22.26884, + 22.26314, + 22.35017, + 22.28479, + 22.25477, + 22.27602, + 22.41632, + 22.23596, + 22.30393, + 22.42352, + 22.2961, + 22.25686, + 22.29131, + 22.67199, + 22.26909, + 22.44259, + 22.23191, + 22.83599, + 22.25297, + 22.24627, + 22.22356, + 22.2168, + 22.34749, + 22.52471, + 22.71684, + 22.39006, + 22.88928, + 22.28347, + 22.25723, + 22.72161, + 22.28623, + 22.3949, + 22.99483, + 22.20708, + 22.2303, + 23.13258, + 22.29917, + 22.18401, + 23.22085, + 22.2282, + 22.2045, + 23.05483, + 22.23938, + 22.49996, + 23.0514, + 22.22065, + 22.25204, + 22.26876, + 22.25576, + 22.28014, + 22.73024, + 22.23362, + 22.21972, + 22.24227, + 22.33502, + 22.33718, + 22.22531, + 22.43032, + 22.18942, + 22.30852, + 22.20391, + 22.22912, + 22.5215, + 22.18131, + 22.70087, + 22.2394, + 22.24933, + 22.17265, + 22.22171, + 22.31515, + 22.21229, + 22.25623, + 22.53603, + 22.33367, + 22.28302, + 22.48313, + 22.32134, + 22.22671, + 22.57547, + 22.23061, + 22.52828, + 22.75087, + 22.20845, + 22.62729, + 23.00921, + 22.21634, + 22.29214, + 23.26728, + 22.21111, + 22.16872, + 23.18336, + 22.33585, + 22.19185, + 22.62865, + 22.20496, + 22.23197, + 23.11489, + 22.47825, + 22.53148, + 22.51105, + 22.22266, + 22.25352, + 22.14376, + 22.0836, + 22.17412, + 22.11997, + 22.19344, + 22.05511, + 22.41642, + 22.08454, + 22.05458, + 22.09809, + 22.04645, + 22.07869, + 22.46114, + 22.34058, + 22.19998, + 22.10085, + 22.14581, + 22.07247, + 22.06751, + 22.07777, + 22.02308, + 22.06044, + 22.08314, + 22.03106, + 22.04277, + 22.03313, + 22.04535, + 22.03092, + 22.06435, + 22.50131, + 22.04072, + 22.06748, + 22.81533, + 22.42007, + 23.23182, + 22.72823, + 22.48266, + 23.12468, + 22.27155, + 22.17339, + 22.59993, + 22.10201, + 22.43105, + 22.87855, + 22.1498, + 22.15655, + 22.61607, + 22.18304, + 22.16694, + 22.84842, + 22.18667, + 22.20254, + 22.13703, + 22.1425, + 22.61908, + 22.13857, + 22.28426, + 22.12005, + 22.24491, + 22.49138, + 22.13086, + 22.149, + 22.17474, + 22.31891, + 22.19635, + 22.27147, + 22.245, + 22.15662, + 22.15245, + 22.14748, + 22.31566, + 22.22819, + 22.0779, + 22.12848, + 22.07462, + 22.24551, + 22.30577, + 22.48118, + 22.14043, + 22.24871, + 22.18597, + 22.12547, + 22.45964, + 22.08512, + 22.19704, + 22.53797, + 22.15965, + 22.17251, + 22.9695, + 22.12164, + 22.0741, + 23.49174, + 22.13247, + 22.14514, + 23.55108, + 22.4328, + 22.1622, + 23.46092, + 22.09899, + 22.17376, + 22.93211, + 22.28347, + 22.24711, + 22.58224, + 22.12082, + 22.12964, + 22.19894, + 22.17617, + 22.31262, + 22.23008, + 22.22007, + 22.0912, + 22.12377, + 22.43474, + 22.12168, + 22.24844, + 22.11504, + 22.1172, + 22.1757, + 22.11972, + 22.25583, + 22.13457, + 22.483, + 22.20644, + 22.07216, + 22.2421, + 22.1586, + 22.14987, + 22.45692, + 22.07339, + 22.16737, + 22.97819, + 22.14034, + 22.24947, + 22.5672, + 22.13059, + 22.11391, + 23.27428, + 22.30972, + 22.14038, + 23.33258, + 22.14281, + 22.10126, + 23.25173, + 22.12643, + 22.11474, + 24.79832, + 36.35246, + 23.34236, + 22.45186, + 22.3505, + 24.35035, + 44.27159, + 24.09615, + 22.9735, + 22.12124, + 22.46562, + 23.01711, + 22.21056, + 22.13922, + 22.85934, + 22.16744, + 22.21346, + 23.04249, + 22.16884, + 22.16901, + 23.10603, + 22.17805, + 22.22349, + 22.6018, + 22.62306, + 22.13406, + 22.16456, + 22.21091, + 22.96232, + 22.16914, + 22.1363, + 22.90742, + 22.18831, + 22.17849, + 22.24841, + 22.12546, + 22.14582, + 22.17622, + 22.46786, + 22.13009, + 22.23982, + 22.50402, + 22.19722, + 22.17025, + 22.14417, + 22.46392, + 22.14668, + 22.16472, + 22.16134, + 22.15765, + 22.22708, + 22.27921, + 22.35847, + 22.30508, + 22.16849, + 22.11531, + 22.42502, + 22.2297, + 22.16406, + 22.99023, + 22.19672, + 22.12043, + 22.78069, + 22.14125, + 22.39803, + 22.86991, + 22.12276, + 22.0988, + 22.83719, + 22.18489, + 22.30305, + 23.35031, + 22.13494, + 22.18387, + 23.73687, + 22.18075, + 22.15899, + 23.37286, + 22.37316, + 22.30837, + 22.8721, + 22.16494, + 22.11476, + 22.16614, + 22.19855, + 22.444, + 22.15477, + 22.17651, + 22.27273, + 22.17506, + 22.20785, + 22.15306, + 22.1285, + 22.1735, + 22.12963, + 22.4039, + 22.16245, + 22.32606, + 22.15952, + 22.16066, + 22.07468, + 22.17447, + 22.16543, + 22.15152, + 22.39188, + 22.29308, + 22.44995, + 22.13458, + 22.11372, + 22.16205, + 22.11089, + 22.25243, + 22.23583, + 22.44207, + 22.20432, + 22.33517, + 22.16782, + 22.50783, + 22.2033, + 22.19896, + 22.22855, + 22.22321, + 22.25639, + 22.29443, + 22.37464, + 22.23139, + 22.22269, + 22.30537, + 22.44663, + 22.19866, + 22.16419, + 22.16455, + 22.18301, + 22.32632, + 22.31321, + 22.27201, + 22.19892, + 22.30745, + 22.34024, + 22.17171, + 22.39589, + 22.18993, + 22.46068, + 22.25658, + 24.16375, + 23.92321, + 22.30729, + 22.13935, + 23.24818, + 22.11272, + 22.10558, + 23.38726, + 22.22758, + 22.10861, + 23.46488, + 22.10426, + 22.20886, + 22.9758, + 22.32598, + 22.20423, + 30.33943, + 22.15539, + 22.1042, + 22.45416, + 22.11073, + 22.268, + 22.69603, + 22.0952, + 22.11685, + 22.07027, + 22.10584, + 22.15115, + 22.30869, + 22.11352, + 23.48902, + 22.14596, + 22.149, + 22.16693, + 22.11947, + 22.11702, + 22.13901, + 22.10284, + 22.06163, + 22.09249, + 22.75618, + 22.20965, + 22.08725, + 22.26911, + 22.1724, + 22.08987, + 22.11494, + 22.18181, + 22.11005, + 22.19859, + 22.25121, + 22.23181, + 22.16117, + 22.4684, + 22.37384, + 22.13467, + 22.68775, + 22.09272, + 22.5173, + 22.99537, + 22.1063, + 22.27278, + 23.52777, + 22.10268, + 22.24326, + 23.17265, + 22.24969, + 22.26817, + 22.77222, + 22.26385, + 22.27297, + 22.24592, + 22.08224, + 22.23805, + 22.12017, + 22.10214, + 22.47179, + 22.08924, + 22.10815, + 22.13634, + 22.27741, + 104.73205, + 22.60669, + 22.28951, + 22.27221, + 22.25025, + 22.25406, + 22.23855, + 22.22173, + 22.46257, + 22.23242, + 22.32552, + 22.68991, + 22.19059, + 22.31979, + 22.82085, + 22.2321, + 22.32698, + 23.67177, + 22.3209, + 22.2611, + 23.40699, + 22.24295, + 22.20141, + 23.44636, + 22.30075, + 22.34236, + 22.58054, + 22.26764, + 22.32465, + 22.37762, + 22.3666, + 22.19189, + 22.31503, + 22.20973, + 22.43682, + 22.42813, + 22.23632, + 22.34831, + 22.22889, + 22.2004, + 22.3289, + 26.72219, + 22.20693, + 22.24854, + 22.29241, + 23.95484, + 22.32646, + 24.94179, + 22.45592, + 22.32752, + 22.23483, + 22.27381, + 22.1432, + 22.36125, + 22.16894, + 22.19653, + 22.33387, + 22.23896, + 22.30297, + 22.19481, + 22.22981, + 22.16392, + 22.17665, + 22.64811, + 22.47699, + 22.30692, + 22.83654, + 22.20083, + 22.23779, + 23.31463, + 22.35145, + 22.37234, + 23.6638, + 22.19647, + 22.33292, + 23.40368, + 22.21014, + 22.26415, + 23.00915, + 22.19072, + 22.2352, + 23.30064, + 22.20064, + 22.17496, + 22.65209, + 22.27287, + 22.16402, + 22.45403, + 22.20753, + 22.47796, + 22.37768, + 22.29129, + 22.19474, + 22.35811, + 22.25567, + 22.52566, + 22.34757, + 22.21695, + 22.29704, + 22.18918, + 22.19948, + 22.16968, + 22.24769, + 22.35874, + 22.18427, + 22.18135, + 22.18106, + 22.36706, + 22.20303, + 22.70529, + 22.22367, + 22.34332, + 22.85867, + 99.16663, + 22.14855, + 22.30119, + 22.16039, + 22.15292, + 22.12516, + 22.12736, + 22.4271, + 22.08621, + 22.17026, + 22.0794, + 22.20969, + 22.07803, + 22.39676, + 22.27253, + 22.08304, + 22.14433, + 22.26805, + 22.17376, + 22.19201, + 22.80214, + 22.13867, + 22.13145, + 22.4191, + 22.39882, + 22.45801, + 22.73377, + 22.09249, + 22.09398, + 22.94902, + 22.07003, + 22.14707, + 23.43768, + 22.07171, + 22.23931, + 22.98679, + 22.05136, + 22.17919, + 22.69357, + 22.17714, + 22.06069, + 22.31436, + 22.85199, + 22.02283, + 22.05677, + 22.05839, + 22.21271, + 22.08224, + 22.02952, + 22.14142, + 22.04819, + 22.08117, + 22.0568, + 22.14012, + 22.04499, + 22.02592, + 22.04916, + 22.0291, + 22.26844, + 22.00714, + 22.5877, + 22.08651, + 22.07325, + 22.16063, + 22.53217, + 22.33549, + 22.34411, + 22.34349, + 22.13511, + 22.7202, + 22.03777, + 22.06087, + 22.8264, + 22.09564, + 22.105, + 22.78717, + 22.07502, + 22.04396, + 23.41358, + 22.17254, + 22.31907, + 23.13572, + 22.06482, + 22.05608, + 22.54637, + 22.05076, + 22.32453, + 22.32633, + 22.04345, + 22.03181, + 22.68133, + 22.23248, + 22.04517, + 22.44096, + 22.02191, + 22.05021, + 22.9038, + 22.13408, + 22.22483, + 22.1612, + 22.01901, + 22.06094, + 22.04995, + 22.00261, + 22.03177, + 22.33237, + 22.06599, + 22.18676, + 22.27066, + 22.06088, + 22.10319, + 22.3554, + 22.43029, + 22.08364, + 101.82247, + 22.26788, + 22.41176, + 22.31658, + 22.22171, + 22.26953, + 22.38897, + 22.35295, + 22.26078, + 22.38658, + 22.22511, + 22.23323, + 22.19975, + 22.21646, + 22.20002, + 22.21175, + 22.22125, + 22.23533, + 22.22544, + 22.21968, + 22.38773, + 22.25294, + 22.29129, + 22.19592, + 22.56338, + 22.1982, + 22.50022, + 22.22738, + 22.17314, + 22.58518, + 22.20907, + 22.56643, + 22.95884, + 22.17963, + 22.17697, + 22.86739, + 22.26982, + 22.19184, + 23.14527, + 22.61316, + 22.19651, + 23.51628, + 22.3513, + 22.21668, + 23.052, + 22.21562, + 22.69276, + 22.84265, + 22.26288, + 22.36787, + 22.3193, + 22.24286, + 22.27066, + 22.45911, + 22.17954, + 22.20463, + 22.20747, + 22.43776, + 22.22131, + 22.20975, + 22.31592, + 22.1724, + 22.27687, + 22.1971, + 22.18341, + 22.44957, + 22.30224, + 22.41065, + 22.26056, + 22.22036, + 36.63224, + 22.20904, + 22.62301, + 22.2281, + 22.24924, + 22.23617, + 22.26707, + 22.18614, + 22.38173, + 22.68426, + 22.2443, + 22.467, + 22.23016, + 22.2359, + 22.74637, + 22.36831, + 22.48382, + 23.08908, + 22.20741, + 22.19456, + 23.7286, + 22.42771, + 22.27004, + 23.24859, + 22.28664, + 22.23396, + 23.71086, + 22.33778, + 22.20401, + 22.92546, + 22.28126, + 22.27238, + 22.53488, + 22.45289, + 22.26193, + 22.18085, + 22.23294, + 22.20978, + 22.24332, + 22.23108, + 22.27663, + 22.22038, + 22.66624, + 27.24293, + 52.30522, + 23.02974, + 22.1045, + 22.12346, + 22.54548, + 22.10596, + 22.08834, + 22.92914, + 22.13263, + 22.07696, + 23.18525, + 22.0615, + 22.07617, + 23.05637, + 22.54091, + 22.06504, + 23.16941, + 22.22867, + 22.09883, + 23.03754, + 22.07617, + 22.29193, + 22.07632, + 22.06766, + 22.09401, + 22.08058, + 22.5305, + 22.23272, + 22.20265, + 22.05807, + 22.10015, + 22.09801, + 22.04708, + 22.12919, + 22.03309, + 22.19255, + 22.06617, + 22.15741, + 22.14409, + 22.10266, + 22.14514, + 22.06529, + 22.03475, + 22.36857, + 22.51011, + 22.07271, + 22.43132, + 22.13092, + 22.07945, + 22.88389, + 22.02914, + 22.0468, + 23.04355, + 22.06601, + 22.32512, + 23.21267, + 22.05052, + 22.115, + 22.91224, + 22.02027, + 22.43867, + 23.37655, + 23.97474, + 71.25984, + 41.91306, + 22.15816, + 22.07058, + 22.80718, + 22.19788, + 22.10942, + 22.20605, + 22.14482, + 22.13974, + 22.17241, + 22.13096, + 22.08317, + 22.04396, + 22.08633, + 22.12318, + 22.08804, + 22.3781, + 22.09858, + 22.08912, + 22.06697, + 22.05695, + 22.06694, + 22.20087, + 22.27139, + 22.01606, + 22.16132, + 22.06047, + 22.09811, + 22.24228, + 22.24337, + 22.22391, + 22.36936, + 22.18073, + 22.05798, + 22.66177, + 22.03016, + 22.05562, + 22.4316, + 22.13376, + 22.04187, + 22.69404, + 22.06206, + 22.03522, + 23.21941, + 22.19, + 22.18488, + 23.02859, + 22.24261, + 22.46124, + 22.22919, + 22.21079, + 22.23019, + 22.1716, + 22.417, + 22.23801, + 22.19394, + 22.18927, + 22.16575, + 22.41394, + 22.33403, + 22.41359, + 22.25564, + 22.6107, + 22.2107, + 22.25703, + 22.24578, + 22.21567, + 22.43124, + 22.16546, + 22.26442, + 22.15163, + 22.23296, + 22.16571, + 22.15903, + 22.33734, + 22.22511, + 22.15729, + 22.28251, + 22.22234, + 22.15715, + 22.19457, + 22.41853, + 22.1707, + 22.16528, + 22.90154, + 22.104, + 22.15706, + 22.87638, + 22.25481, + 22.13235, + 22.8171, + 22.17582, + 22.16652, + 22.94389, + 22.42742, + 22.29331, + 23.01847, + 22.16805, + 22.13573, + 23.13758, + 22.25339, + 22.34294, + 22.89067, + 22.16572, + 22.16828, + 22.28816, + 22.49986, + 22.23072, + 22.38644, + 22.12899, + 22.11739, + 22.28425, + 22.16946, + 22.1681, + 22.1273, + 22.12382, + 22.10526, + 22.1646, + 22.16154, + 22.11507, + 22.57757, + 22.10374, + 22.12166, + 22.15047, + 22.50162, + 22.14833, + 22.17366, + 22.25464, + 22.26551, + 23.50498, + 22.73041, + 22.40403, + 23.29862, + 22.22557, + 22.13617, + 22.76498, + 22.20274, + 22.56885, + 22.75225, + 22.1825, + 22.15018, + 22.67589, + 22.35103, + 22.22574, + 22.83882, + 22.17659, + 22.17158, + 22.15542, + 22.18397, + 22.93985, + 22.15892, + 22.40788, + 22.4053, + 22.14476, + 22.64534, + 22.28369, + 22.21493, + 22.12785, + 22.11922, + 22.18312, + 22.10741, + 22.1438, + 22.14304, + 22.09958, + 22.19423, + 22.28677, + 22.14581, + 22.16098, + 22.15689, + 22.16352, + 22.23832, + 22.14916, + 22.55257, + 22.13931, + 22.12494, + 22.18276, + 22.14001, + 22.44161, + 22.17003, + 22.10938, + 22.42749, + 22.17772, + 22.21296, + 22.68479, + 22.14385, + 22.11939, + 23.23298, + 22.15392, + 22.15043, + 23.08218, + 22.55487, + 22.17844, + 23.12339, + 22.10373, + 22.15551, + 23.02888, + 22.19445, + 22.14878, + 22.94901, + 22.14322, + 22.1313, + 22.56967, + 22.11371, + 22.34008, + 22.37412, + 22.16953, + 22.23321, + 22.12283, + 22.58849, + 22.18116, + 22.40851, + 22.14007, + 22.40728, + 22.1991, + 22.18819, + 22.19996, + 22.17234, + 22.31612, + 22.17664, + 22.14698, + 22.1763, + 22.1763, + 22.24207, + 22.15693, + 22.16315, + 22.16435, + 22.81799, + 22.29942, + 22.20296, + 22.54365, + 25.52235, + 22.15784, + 22.4192, + 22.26017, + 22.16298, + 22.47279, + 22.36483, + 22.11842, + 22.69941, + 22.11577, + 22.16863, + 22.01176, + 22.22205, + 21.9872, + 22.00834, + 22.02707, + 22.04397, + 22.1899, + 22.01313, + 21.9813, + 21.95711, + 22.12524, + 21.96139, + 22.03709, + 22.11153, + 21.94281, + 22.37319, + 21.99951, + 22.00521, + 22.02443, + 21.97954, + 22.16246, + 21.99, + 22.10315, + 21.95831, + 21.94283, + 22.05901, + 22.18657, + 21.98883, + 21.98006, + 22.00507, + 22.11073, + 22.20488, + 21.94916, + 22.41868, + 22.71345, + 21.96047, + 21.96431, + 23.44101, + 21.92707, + 21.94534, + 23.01024, + 21.97376, + 21.94591, + 22.32252, + 21.95587, + 21.98852, + 22.4774, + 22.04141, + 22.07168, + 22.3629, + 22.02193, + 21.94847, + 22.52133, + 21.99339, + 21.97651, + 22.85852, + 21.94556, + 22.20845, + 22.20076, + 22.00715, + 21.99645, + 22.15719, + 21.96518, + 21.96064, + 22.10975, + 21.95919, + 22.27851, + 22.11466, + 21.95557, + 21.96246, + 22.26892, + 21.94298, + 22.12448, + 22.58432, + 22.13183, + 22.04597, + 21.98188, + 22.27192, + 21.94932, + 21.94599, + 22.71998, + 22.15013, + 21.95332, + 22.53628, + 22.06499, + 22.03487, + 22.92728, + 21.9577, + 21.93391, + 22.37597, + 21.95252, + 22.33879, + 22.43639, + 21.90894, + 21.91037, + 22.35445, + 21.95373, + 21.98795, + 22.50773, + 22.1386, + 21.97501, + 22.23404, + 22.345, + 21.96362, + 22.03652, + 21.96132, + 22.1345, + 22.05909, + 21.9686, + 22.36273, + 22.37979, + 21.9539, + 21.94893, + 22.19798, + 22.11944, + 22.15162, + 22.26939, + 22.14744, + 22.14287, + 22.63964, + 22.17126, + 22.15165, + 23.0408, + 22.13841, + 22.13303, + 23.27403, + 22.12087, + 22.10168, + 23.23486, + 22.15747, + 22.14743, + 23.27978, + 22.16347, + 22.08691, + 23.23901, + 22.16133, + 22.14168, + 23.17455, + 22.06886, + 22.13114, + 23.16213, + 22.30783, + 22.11336, + 23.26329, + 22.06549, + 22.07211, + 22.16437, + 22.08932, + 22.42285, + 22.0994, + 22.09114, + 22.15689, + 22.47469, + 22.0947, + 28.55794, + 69.96193, + 22.13434, + 62.76445, + 22.35301, + 22.20417, + 22.10021, + 22.09851, + 22.09592, + 22.14601, + 22.30364, + 22.07823, + 22.50219, + 22.21628, + 22.06474, + 22.10215, + 22.22407, + 22.29054, + 22.1174, + 26.53686, + 31.20536, + 22.06892, + 23.04956, + 24.16646, + 22.31828, + 22.80315, + 22.10885, + 22.17754, + 23.01577, + 22.13133, + 24.1609, + 30.29538, + 22.11376, + 22.09667, + 23.02923, + 22.09142, + 22.07874, + 22.80915, + 22.24058, + 22.13542, + 22.65468, + 22.38559, + 22.11647, + 22.22066, + 22.29338, + 22.11706, + 22.3686, + 22.09114, + 22.39197, + 22.12928, + 22.37087, + 22.09104, + 22.09063, + 22.11654, + 22.13602, + 22.1319, + 22.24958, + 22.30654, + 22.17007, + 22.54044, + 22.22475, + 22.14091, + 22.39241, + 22.0842, + 22.3842, + 22.18687, + 22.39611, + 22.1278, + 22.3284, + 22.1154, + 22.09646, + 22.81691, + 22.18181, + 23.37869, + 22.1495, + 22.14219, + 22.97886, + 22.17331, + 22.12148, + 22.64005, + 22.27992, + 22.28979, + 22.32475, + 22.12771, + 22.09844, + 22.40401, + 22.1298, + 22.19422, + 22.12317, + 22.20042, + 22.11794, + 22.12467, + 22.17046, + 22.09319, + 22.25505, + 22.10802, + 22.2528, + 22.12938, + 22.14415, + 22.29464, + 22.11598, + 22.12429, + 22.14322, + 22.22054, + 22.15059, + 22.1426, + 22.08842, + 22.13187, + 22.09539, + 22.12463, + 22.99156, + 22.2206, + 22.17205, + 22.54719, + 22.10391, + 22.23367, + 22.76334, + 22.07503, + 22.0559, + 23.17775, + 22.2461, + 22.25501, + 23.40468, + 22.08451, + 22.1167, + 22.96407, + 22.29052, + 22.23662, + 22.80043, + 22.07867, + 22.14055, + 22.80778, + 22.15202, + 22.13095, + 22.30886, + 22.09829, + 22.1017, + 22.30188, + 22.13423, + 22.2188, + 22.11035, + 22.11863, + 22.13763, + 22.26758, + 22.145, + 22.14197, + 22.28991, + 22.09615, + 22.0942, + 22.14376, + 22.08656, + 22.0449, + 22.09098, + 22.16193, + 22.11937, + 22.11731, + 22.09497, + 22.40587, + 22.10351, + 22.24368, + 22.29861, + 22.0891, + 22.45905, + 22.10118, + 22.28831, + 23.44521, + 22.18075, + 22.15478, + 23.5301, + 22.10188, + 22.07687, + 23.14587, + 22.1344, + 22.10284, + 22.46515, + 22.25157, + 22.07917, + 22.74706, + 22.10004, + 22.15853, + 22.56626, + 22.1016, + 22.30594, + 22.71221, + 22.05101, + 22.1266, + 22.18213, + 22.27545, + 23.55767, + 22.50461, + 22.37307, + 23.35459, + 22.13143, + 22.80335, + 22.11602, + 22.36897, + 22.56225, + 22.17821, + 22.14066, + 22.63053, + 22.25814, + 22.34772, + 22.18425, + 22.15824, + 22.18433, + 22.21728, + 22.3493, + 22.14707, + 22.14056, + 22.13981, + 22.26034, + 22.15999, + 22.11378, + 22.3432, + 22.12814, + 22.2546, + 22.14994, + 22.42207, + 22.17741, + 22.13358, + 22.18267, + 22.33383, + 22.15626, + 22.23825, + 22.95492, + 22.2781, + 22.13766, + 23.11202, + 22.14552, + 22.13851, + 23.22779, + 22.12749, + 22.1852, + 23.11909, + 22.14341, + 22.44931, + 23.18979, + 22.3004, + 22.15336, + 22.93739, + 22.10766, + 22.11832, + 22.32259, + 22.09604, + 22.15343, + 22.14026, + 22.28667, + 22.17037, + 22.10376, + 22.25451, + 22.10846, + 22.14132, + 22.14843, + 22.56039, + 22.09906, + 22.1378, + 22.1043, + 22.25665, + 22.08482, + 22.1022, + 22.1219, + 22.12338, + 22.11497, + 22.09806, + 22.37114, + 22.1223, + 22.11381, + 22.7123, + 22.13471, + 22.11115, + 22.80238, + 22.45191, + 22.28952, + 23.10402, + 22.13401, + 22.12466, + 23.15631, + 22.1558, + 22.11168, + 23.17534, + 22.12859, + 22.11271, + 23.08121, + 22.13197, + 22.1515, + 22.65207, + 22.30597, + 22.10917, + 22.24205, + 22.60878, + 22.09097, + 22.14094, + 22.14458, + 22.17201, + 22.13523, + 22.12548, + 22.16414, + 22.12026, + 22.12175, + 22.19186, + 22.29485, + 22.33278, + 23.3078, + 22.73304, + 22.44956, + 22.97514, + 22.28443, + 22.26082, + 22.75869, + 22.27789, + 22.48981, + 22.90584, + 22.24257, + 22.95042, + 22.29124, + 22.47709, + 22.7493, + 22.24822, + 22.23141, + 22.3471, + 22.34644, + 22.23412, + 22.33865, + 22.24652, + 22.44773, + 22.21963, + 22.29181, + 22.3559, + 22.21869, + 22.38225, + 22.19857, + 22.1889, + 22.18033, + 22.18476, + 22.29452, + 22.17247, + 22.18145, + 22.20088, + 22.61408, + 22.27509, + 22.20253, + 22.44377, + 22.2188, + 22.25543, + 22.65273, + 22.3446, + 22.14042, + 22.85975, + 22.35525, + 22.22577, + 22.76614, + 22.21959, + 22.20517, + 22.91721, + 22.19556, + 22.33519, + 23.31486, + 22.2228, + 22.25852, + 23.22495, + 22.23761, + 22.29332, + 22.99736, + 22.36848, + 22.2271, + 22.52477, + 22.28017, + 22.17957, + 22.41324, + 22.27419, + 22.26945, + 22.53473, + 22.28682, + 22.24526, + 22.68783, + 22.24592, + 22.32056, + 22.3266, + 22.24701, + 22.33195, + 22.34563, + 22.60168, + 22.287, + 22.36203, + 22.2186, + 22.45632, + 22.27663, + 22.41838, + 22.43779, + 22.29759, + 22.60786, + 22.23216, + 22.35389, + 22.54415, + 22.30203, + 22.31045, + 22.56062, + 22.25634, + 22.23882, + 22.89479, + 22.26127, + 22.17792, + 23.28277, + 22.21611, + 22.30095, + 22.99949, + 22.1849, + 22.22575, + 22.60047, + 22.2124, + 22.36786, + 22.2244, + 22.21203, + 98.3119, + 22.25833, + 22.33984, + 22.30907, + 22.23459, + 22.23605, + 22.21159, + 22.50951, + 22.31761, + 22.43768, + 22.16603, + 22.15476, + 22.18377, + 22.18599, + 22.34574, + 22.20304, + 22.18814, + 22.21121, + 22.36342, + 22.26305, + 22.32367, + 23.75264, + 22.46272, + 22.38041, + 23.13616, + 22.27755, + 22.23242, + 22.94668, + 22.16014, + 22.53244, + 22.92565, + 22.20641, + 22.23453, + 22.8928, + 22.27049, + 22.20821, + 22.79067, + 22.16702, + 22.62054, + 22.15549, + 22.18171, + 22.64815, + 22.27023, + 22.2545, + 22.1845, + 22.17325, + 22.55884, + 22.17352, + 22.24216, + 22.13593, + 22.14586, + 22.20862, + 22.17643, + 22.12239, + 22.16304, + 22.14181, + 22.09371, + 22.41703, + 22.29277, + 22.14284, + 22.10438, + 22.16169, + 22.25554, + 22.29576, + 22.5565, + 22.13078, + 22.41166, + 22.26812, + 22.25377, + 22.76081, + 22.12841, + 22.3889, + 23.38486, + 22.30836, + 22.30256, + 23.05643, + 22.28499, + 22.20536, + 23.07939, + 22.23701, + 22.16145, + 23.01979, + 22.56773, + 22.40174, + 22.60494, + 22.30154, + 22.15902, + 22.51167, + 22.34958, + 22.19127, + 22.28122, + 22.16833, + 22.18465, + 22.15229, + 22.1467, + 22.28804, + 22.15804, + 22.21382, + 22.13951, + 22.16174, + 22.44447, + 22.15885, + 22.30613, + 22.15337, + 22.30589, + 22.1999, + 22.1745, + 22.27547, + 22.33437, + 22.28582, + 22.1519, + 22.3119, + 22.8598, + 22.16582, + 22.23767, + 23.01784, + 22.33382, + 22.15389, + 23.28004, + 22.14173, + 22.15368, + 23.09755, + 22.22303, + 22.15798, + 22.78196, + 22.2945, + 22.1587, + 22.73261, + 22.17113, + 22.30944, + 22.71167, + 22.10199, + 22.14638, + 22.30165, + 22.19011, + 22.32598, + 22.15787, + 22.27633, + 22.18818, + 22.29677, + 22.19943, + 22.15767, + 22.19997, + 22.48665, + 22.14347, + 22.17856, + 22.3226, + 22.18066, + 22.14245, + 22.2881, + 22.31239, + 22.13641, + 22.14189, + 22.1446, + 22.16268, + 22.39175, + 22.14793, + 22.19722, + 23.45894, + 22.13176, + 22.1367, + 23.44023, + 22.1299, + 22.4474, + 24.83104, + 22.16282, + 22.17059, + 23.12659, + 22.54311, + 22.14508, + 22.87791, + 22.29035, + 22.10859, + 22.60427, + 22.32424, + 22.14501, + 22.2353, + 22.11713, + 23.62788, + 76.19838, + 35.15617, + 53.52323, + 22.13418, + 22.11021, + 22.1342, + 22.27757, + 22.11459, + 22.13136, + 22.11779, + 22.38937, + 22.21383, + 22.12602, + 22.31502, + 22.15772, + 22.15176, + 22.12988, + 22.18483, + 22.23671, + 22.12091, + 22.46193, + 22.39495, + 22.09328, + 22.12302, + 22.3467, + 22.52687, + 22.13686, + 22.26756, + 22.67041, + 22.11642, + 22.11507, + 23.23445, + 22.19371, + 22.11082, + 23.07766, + 22.1318, + 22.13628, + 22.75204, + 22.44869, + 22.2348, + 23.24037, + 22.12242, + 22.099, + 23.1955, + 22.08957, + 22.09665, + 22.25121, + 22.12469, + 22.16928, + 22.36078, + 22.11298, + 22.25122, + 22.13628, + 22.17261, + 22.11671, + 22.11718, + 22.58086, + 22.29782, + 22.30813, + 22.10063, + 22.30149, + 22.1296, + 22.11914, + 22.21392, + 22.19986, + 23.48234, + 22.49181, + 22.45885, + 23.25093, + 22.21008, + 22.14938, + 23.1092, + 22.17394, + 22.65149, + 22.96326, + 22.1142, + 22.11965, + 22.84835, + 22.18065, + 22.29337, + 23.03745, + 22.14559, + 22.18902, + 23.22768, + 22.22001, + 22.13229, + 22.6899, + 22.64023, + 22.16417, + 22.70918, + 22.22631, + 22.10449, + 22.76635, + 22.11324, + 22.48252, + 22.20778, + 22.09545, + 22.21494, + 22.37453, + 22.1122, + 23.61911, + 22.24059, + 22.12228, + 22.88989, + 22.29422, + 22.21959, + 22.4712, + 22.12836, + 22.20519, + 22.22461, + 22.33928, + 22.55437, + 22.13461, + 22.11088, + 22.13063, + 22.24762, + 22.14007, + 22.1073, + 22.15536, + 22.15056, + 22.2833, + 22.17607, + 22.45576, + 22.12186, + 22.11487, + 22.28336, + 22.12592, + 22.39547, + 22.42283, + 22.65163, + 22.24287, + 22.62111, + 22.30455, + 22.13848, + 22.693, + 22.17488, + 22.27557, + 23.01438, + 22.11642, + 22.17809, + 22.93026, + 22.23291, + 22.41226, + 22.91538, + 22.13111, + 22.09849, + 23.16933, + 22.40582, + 22.13057, + 23.20319, + 22.09818, + 22.1228, + 26.65474, + 22.51962, + 22.09971, + 22.97486, + 22.13328, + 22.25854, + 22.71712, + 22.11959, + 22.11576, + 22.2498, + 22.48635, + 22.14451, + 22.28473, + 22.5087, + 22.11036, + 22.39715, + 22.14277, + 22.47507, + 22.10215, + 22.29449, + 22.41286, + 22.12502, + 22.64326, + 22.24268, + 22.69601, + 22.64694, + 22.12512, + 22.06712, + 22.27097, + 22.04664, + 22.02911, + 22.08369, + 22.06847, + 22.2674, + 22.05704, + 22.03395, + 22.02212, + 22.01405, + 22.10292, + 22.04765, + 22.1624, + 22.01057, + 22.42028, + 22.04494, + 22.04976, + 22.1887, + 23.97383, + 28.59691, + 27.46884, + 22.09613, + 22.00944, + 23.47335, + 22.03805, + 22.02014, + 22.19552, + 22.05961, + 22.02592, + 22.0102, + 22.23346, + 22.04236, + 22.02031, + 22.0292, + 22.01072, + 22.01593, + 22.00968, + 22.36829, + 22.02921, + 22.15732, + 22.00256, + 22.1639, + 22.54104, + 22.27217, + 22.02895, + 23.10168, + 22.26862, + 22.01213, + 23.25629, + 22.07204, + 22.27703, + 22.89068, + 22.05503, + 22.04289, + 22.69295, + 22.12263, + 21.98553, + 22.57166, + 22.01637, + 22.021, + 22.22902, + 22.39313, + 22.13025, + 21.99196, + 22.01081, + 22.01796, + 22.03293, + 22.07697, + 22.18752, + 21.99396, + 22.33779, + 22.02495, + 22.05429, + 21.98904, + 22.11115, + 22.04974, + 22.02577, + 22.07866, + 21.98906, + 22.39023, + 21.96216, + 22.2517, + 22.23386, + 22.00722, + 22.06658, + 22.58047, + 22.26459, + 22.00987, + 23.29017, + 22.0715, + 22.02243, + 23.29697, + 21.98552, + 22.00917, + 23.33665, + 22.15608, + 22.03961, + 22.96184, + 22.03391, + 22.16316, + 22.40831, + 22.01907, + 22.13336, + 22.22098, + 22.01658, + 21.99148, + 22.07202, + 22.05245, + 22.06187, + 22.02708, + 22.0033, + 22.03901, + 22.02391, + 22.02047, + 22.23359, + 22.13673, + 22.15379, + 23.38139, + 22.53242, + 22.40147, + 22.08361, + 22.35783, + 22.14361, + 22.08543, + 22.14679, + 22.06928, + 22.13064, + 22.09093, + 22.40817, + 22.0675, + 22.18981, + 22.06542, + 22.02903, + 22.07273, + 22.06194, + 22.22455, + 22.11695, + 22.07998, + 22.09878, + 22.24274, + 22.06553, + 22.18964, + 22.16847, + 22.08908, + 22.07437, + 22.07371, + 22.33582, + 22.13176, + 22.09109, + 22.08477, + 22.58906, + 22.18727, + 22.26394, + 22.89701, + 22.30961, + 22.08732, + 23.13605, + 22.25897, + 22.2024, + 23.02925, + 22.08079, + 22.32117, + 23.33656, + 22.0643, + 22.25512, + 22.97935, + 22.11083, + 22.06071, + 22.99703, + 22.0818, + 22.07658, + 23.13362, + 22.08196, + 22.06038, + 22.32988, + 22.40493, + 22.06483, + 22.08828, + 22.28645, + 22.05807, + 22.05097, + 22.0599, + 22.26943, + 22.05993, + 22.08459, + 22.22258, + 22.05577, + 22.06454, + 22.09444, + 22.07581, + 22.05407, + 22.05447, + 22.06135, + 22.19512, + 22.07505, + 22.08514, + 22.09018, + 22.03577, + 22.13656, + 22.06639, + 22.23185, + 22.22575, + 22.7029, + 22.08141, + 22.06996, + 22.79906, + 22.03634, + 22.08697, + 23.15145, + 22.08298, + 22.08974, + 22.98047, + 22.02896, + 22.0517, + 23.07168, + 22.23171, + 22.05078, + 22.92055, + 22.23906, + 22.04827, + 22.6036, + 22.03553, + 22.01876, + 22.14338, + 22.03045, + 22.04494, + 22.00404, + 22.06206, + 22.05579, + 22.0682, + 22.15569, + 22.25482, + 22.1522, + 22.20773, + 22.66793, + 22.10077, + 22.19864, + 22.92173, + 22.34613, + 22.16071, + 22.8627, + 22.15788, + 22.20913, + 22.80749, + 22.28639, + 22.22906, + 22.91712, + 22.21992, + 22.10009, + 22.63514, + 22.28119, + 22.30845, + 22.30034, + 22.33763, + 22.49121, + 22.22773, + 22.25148, + 23.10453, + 22.22005, + 22.21039, + 23.45073, + 22.23287, + 22.24615, + 23.33691, + 22.18674, + 22.19884, + 23.29456, + 22.30191, + 22.1693, + 22.5558, + 22.17962, + 22.34188, + 22.24404, + 22.2818, + 22.21408, + 22.17356, + 22.29799, + 22.20556, + 22.42003, + 22.20857, + 22.16794, + 22.17568, + 22.17021, + 22.19748, + 22.1858, + 22.3408, + 22.14927, + 22.64574, + 22.20172, + 22.19735, + 22.34011, + 22.151, + 22.30382, + 22.67393, + 22.16991, + 22.17891, + 22.78298, + 22.2694, + 22.1732, + 23.53723, + 22.1954, + 22.14768, + 23.44664, + 22.15861, + 22.3066, + 23.4678, + 22.28481, + 22.23692, + 22.38347, + 22.30437, + 22.17762, + 85.69357, + 26.05182, + 22.13464, + 22.68467, + 44.12211, + 23.60427, + 22.31894, + 22.41063, + 22.25844, + 22.31148, + 22.1811, + 22.20852, + 22.67125, + 22.15725, + 22.43416, + 22.18386, + 22.13535, + 22.20669, + 22.14434, + 22.20536, + 22.24916, + 22.2579, + 22.16569, + 22.14116, + 22.1251, + 22.21198, + 22.35962, + 22.20946, + 22.44267, + 22.14181, + 22.51004, + 22.35907, + 22.21569, + 22.28595, + 22.57448, + 22.22769, + 22.17286, + 23.22999, + 22.30339, + 22.16747, + 23.06975, + 22.15824, + 22.36233, + 23.52405, + 22.16982, + 22.29248, + 23.31461, + 22.45673, + 22.70834, + 22.21004, + 22.19858, + 23.55759, + 24.40048, + 25.45925, + 24.54799, + 22.18995, + 22.13705, + 22.72186, + 22.18616, + 22.4262, + 22.83306, + 22.17848, + 22.16509, + 22.56974, + 22.13345, + 22.17874, + 22.79739, + 22.12083, + 22.17191, + 22.72615, + 22.13304, + 22.14131, + 22.65316, + 22.60612, + 22.1221, + 22.64332, + 22.24281, + 22.11845, + 22.14797, + 22.11282, + 22.95388, + 22.18239, + 22.12427, + 22.90953, + 22.30593, + 22.1269, + 22.52787, + 22.52999, + 22.12977, + 22.50165, + 22.48586, + 22.14554, + 22.23868, + 22.15025, + 22.39545, + 22.25827, + 22.18327, + 22.16616, + 22.1267, + 22.2322, + 22.14647, + 22.64237, + 22.13994, + 22.13984, + 22.17054, + 22.16124, + 22.33446, + 22.16855, + 22.45479, + 22.15133, + 22.14805, + 22.28934, + 22.30565, + 22.1553, + 22.31481, + 22.1494, + 22.12694, + 22.35941, + 22.13386, + 22.29727, + 22.37743, + 22.15605, + 22.13509, + 22.83535, + 22.1416, + 22.13944, + 23.30813, + 22.2882, + 22.15638, + 23.09331, + 22.27967, + 22.10267, + 22.62005, + 22.22771, + 22.4854, + 22.56649, + 22.16047, + 22.26528, + 22.63041, + 22.21485, + 22.13182, + 22.50123, + 22.14634, + 22.25712, + 22.30221, + 22.27126, + 22.26131, + 22.38047, + 22.35531, + 22.17483, + 22.28327, + 22.15102, + 22.14006, + 22.34709, + 22.11255, + 22.57836, + 22.28582, + 22.3182, + 22.15333, + 22.25862, + 22.41736, + 22.14971, + 22.12798, + 22.05725, + 22.1189, + 22.08777, + 21.9871, + 22.02674, + 21.9652, + 22.3894, + 21.9629, + 21.96916, + 22.07084, + 21.98032, + 22.08787, + 21.95312, + 22.24151, + 21.96968, + 22.26092, + 22.0704, + 21.98896, + 21.97335, + 21.97108, + 22.30925, + 21.93133, + 22.01282, + 21.94382, + 21.94129, + 21.97435, + 21.96218, + 22.30664, + 21.97312, + 21.90781, + 21.9544, + 22.10328, + 22.10118, + 21.92638, + 22.10578, + 22.08087, + 21.95187, + 22.024, + 22.04781, + 21.93244, + 22.45586, + 21.94182, + 22.19126, + 22.44053, + 22.59145, + 21.94529, + 22.7998, + 22.02333, + 21.94346, + 23.28782, + 21.9172, + 21.98843, + 22.69191, + 21.9297, + 22.17068, + 22.45259, + 22.02197, + 21.94125, + 22.01171, + 21.92182, + 21.97643, + 22.22745, + 22.52596, + 21.93607, + 21.93634, + 22.18567, + 21.92693, + 21.87371, + 22.04253, + 22.06289, + 21.97397, + 22.04379, + 21.94728, + 21.96546, + 22.02505, + 22.21399, + 22.03585, + 22.14121, + 21.93058, + 21.91269, + 22.60924, + 21.94764, + 22.08557, + 22.05277, + 21.94981, + 21.92587, + 22.47698, + 22.05984, + 21.95058, + 22.64668, + 21.93809, + 22.23211, + 23.2016, + 21.9254, + 21.99674, + 22.713, + 21.92072, + 21.92595, + 23.10071, + 21.92868, + 21.92577, + 22.31107, + 21.91951, + 21.89878, + 22.04094, + 22.01412, + 21.91925, + 36.99743, + 22.07171, + 22.05684, + 21.99286, + 21.91086, + 21.95043, + 37.7659, + 23.23805, + 22.11635, + 22.06267, + 22.26073, + 22.04733, + 22.08739, + 22.04904, + 22.29041, + 22.02994, + 22.00787, + 22.07276, + 22.14648, + 22.03278, + 22.0057, + 22.01582, + 22.03705, + 22.03766, + 22.01802, + 22.0059, + 21.99902, + 22.06452, + 22.26234, + 22.14829, + 22.01105, + 21.96761, + 22.20418, + 22.02033, + 22.12236, + 22.11036, + 22.00084, + 22.2584, + 21.9891, + 22.12932, + 23.25622, + 21.985, + 22.0856, + 22.8834, + 22.01259, + 21.99641, + 22.95084, + 22.04333, + 22.01655, + 23.01243, + 22.19859, + 22.08599, + 22.5855, + 21.96317, + 22.0839, + 22.20175, + 22.14398, + 22.15551, + 21.97279, + 22.025, + 21.98846, + 21.93747, + 21.94308, + 21.98601, + 22.00131, + 22.10379, + 21.96197, + 21.99262, + 22.25563, + 21.99555, + 21.97565, + 22.0237, + 22.00526, + 22.09017, + 21.97322, + 22.28951, + 21.98999, + 21.96734, + 22.09062, + 21.99726, + 22.228, + 21.99841, + 22.17922, + 22.83472, + 22.00885, + 22.03252, + 23.54512, + 22.05196, + 21.99299, + 23.18927, + 21.95728, + 21.99422, + 23.08361, + 22.123, + 22.03043, + 22.49834, + 22.01993, + 21.98784, + 22.35422, + 22.01466, + 21.98565, + 22.1711, + 21.96919, + 22.03237, + 22.30408, + 22.00759, + 22.03562, + 22.01947, + 22.20849, + 21.98004, + 21.98386, + 22.14885, + 22.14906, + 22.13118, + 21.9956, + 22.33289, + 21.99279, + 21.99903, + 22.0232, + 22.00992, + 22.16997, + 21.99727, + 21.98512, + 22.0992, + 22.09843, + 23.11728, + 22.45273, + 22.2, + 21.98674, + 22.0368, + 22.16985, + 22.11212, + 22.0407, + 22.07895, + 22.6133, + 22.01129, + 22.07007, + 22.1428, + 21.98159, + 22.00739, + 22.00778, + 22.12806, + 22.00893, + 22.23254, + 22.06447, + 22.03369, + 21.98988, + 22.0062, + 22.26566, + 22.13457, + 21.99102, + 22.55205, + 22.36024, + 22.17485, + 23.00265, + 21.96775, + 21.97485, + 22.9294, + 22.02423, + 22.08535, + 23.08501, + 22.10341, + 22.20068, + 22.94464, + 22.02868, + 22.02156, + 22.65288, + 22.2367, + 21.9922, + 22.25684, + 22.45598, + 22.00954, + 22.11768, + 21.89281, + 22.1111, + 22.39623, + 21.98596, + 22.02725, + 22.1116, + 22.01302, + 22.0117, + 22.02031, + 21.99995, + 21.99934, + 22.10891, + 21.99479, + 22.0294, + 21.98634, + 22.33414, + 21.98768, + 22.17036, + 22.13312, + 22.00869, + 22.15352, + 22.21374, + 22.00058, + 22.06923, + 22.77846, + 22.11276, + 21.98947, + 23.00625, + 22.08583, + 21.94752, + 22.7972, + 22.16673, + 21.99947, + 23.13647, + 22.17495, + 22.00803, + 22.65398, + 22.0268, + 22.03376, + 22.62485, + 22.02085, + 22.07868, + 22.68809, + 21.96732, + 21.98695, + 22.36464, + 21.98573, + 22.14117, + 22.21013, + 21.99391, + 22.00853, + 22.34148, + 21.98298, + 22.24566, + 21.99089, + 22.74926, + 23.35053, + 39.50373, + 22.11181, + 21.98993, + 34.79176, + 33.35522, + 21.98722, + 21.99461, + 22.31978, + 22.02065, + 22.00112, + 22.51674, + 21.90936, + 22.0396, + 22.14533, + 22.04658, + 22.0397, + 22.24594, + 21.98591, + 21.99769, + 23.1272, + 21.98597, + 21.97945, + 23.41716, + 22.01276, + 22.16768, + 22.05336, + 22.01864, + 22.00924, + 22.00254, + 22.01507, + 22.06016, + 22.27916, + 22.04636, + 21.98814, + 22.00941, + 22.0346, + 21.99864, + 22.10695, + 22.23064, + 21.98859, + 22.36341, + 22.0013, + 22.18137, + 22.05605, + 21.98882, + 22.19102, + 22.48586, + 21.97836, + 21.99124, + 23.31346, + 22.07199, + 22.00141, + 23.42964, + 21.96173, + 22.25887, + 23.43985, + 22.01332, + 22.01627, + 22.95893, + 21.99034, + 22.14963, + 22.27016, + 22.01802, + 22.175, + 22.26961, + 21.98826, + 21.98134, + 22.31324, + 21.94652, + 21.92741, + 21.99249, + 22.11845, + 21.96309, + 21.97954, + 21.97694, + 21.98313, + 22.01211, + 22.00381, + 22.31301, + 21.96675, + 21.95389, + 21.96227, + 21.98151, + 22.07147, + 21.99381, + 22.5566, + 22.06232, + 22.26409, + 21.96544, + 22.39042, + 21.96799, + 21.96196, + 22.71161, + 21.958, + 22.11271, + 24.0816, + 22.2892, + 23.36337, + 23.24124, + 21.96664, + 21.95624, + 22.91121, + 21.96068, + 22.01115, + 22.88241, + 21.95788, + 21.93589, + 23.13276, + 21.95262, + 21.97219, + 22.27244, + 22.12735, + 21.93767, + 22.23338, + 22.10927, + 21.96938, + 22.24808, + 21.95405, + 22.14658, + 22.14783, + 28.50503, + 21.95101, + 28.99765, + 21.93268, + 21.95949, + 22.24857, + 22.04115, + 32.10111, + 23.01695, + 22.16382, + 22.06284, + 21.99858, + 22.32419, + 21.95636, + 21.97852, + 21.9966, + 21.98316, + 21.99546, + 21.99638, + 22.28976, + 21.95052, + 22.34413, + 21.98317, + 21.85908, + 22.03553, + 22.27835, + 22.0571, + 22.01643, + 22.32665, + 22.62609, + 22.0722, + 22.89276, + 22.01153, + 22.01705, + 22.99083, + 21.97377, + 22.19615, + 23.35959, + 22.13275, + 21.97111, + 23.10741, + 22.02579, + 22.06489, + 22.48569, + 22.23588, + 21.96494, + 22.19732, + 22.66303, + 21.91312, + 21.93004, + 22.00775, + 22.07734, + 21.9728, + 22.20443, + 21.97438, + 22.00575, + 22.09644, + 22.08538, + 22.30842, + 21.92897, + 21.9404, + 21.96093, + 21.94, + 22.23155, + 22.00614, + 22.44172, + 21.97061, + 22.13604, + 21.98885, + 22.12053, + 22.23869, + 22.08662, + 21.95649, + 21.97178, + 22.28082, + 21.99879, + 22.10142, + 22.96808, + 22.01427, + 21.95657, + 22.88311, + 21.99775, + 21.96125, + 23.36863, + 22.1433, + 21.99431, + 22.9282, + 22.04818, + 21.99794, + 22.43828, + 21.98034, + 21.94735, + 22.20725, + 21.93566, + 22.07658, + 22.05801, + 22.07393, + 21.94482, + 21.95115, + 21.93797, + 22.12318, + 22.33475, + 22.00191, + 22.17385, + 21.94542, + 22.04834, + 21.96882, + 22.03203, + 21.96371, + 21.99714, + 22.34338, + 21.93479, + 22.24105, + 21.9695, + 22.12514, + 21.97491, + 21.96482, + 22.60359, + 22.03091, + 22.28636, + 87.44035, + 29.37494, + 22.14932, + 22.00649, + 22.14842, + 22.15305, + 22.47064, + 22.12112, + 22.1235, + 22.11014, + 22.08956, + 22.23661, + 22.27827, + 22.31518, + 22.13057, + 22.36065, + 22.11009, + 22.15529, + 22.29036, + 22.09258, + 22.29345, + 22.08084, + 22.2472, + 22.26483, + 22.14362, + 22.35014, + 22.34224, + 22.03782, + 22.4855, + 22.10209, + 22.31665, + 22.57082, + 22.02015, + 22.17261, + 22.76065, + 22.09401, + 22.0559, + 23.06159, + 22.02222, + 22.02379, + 22.79652, + 22.31302, + 22.1096, + 22.72537, + 22.0562, + 22.15724, + 22.43723, + 22.60014, + 22.25093, + 22.30373, + 22.062, + 22.12679, + 22.29995, + 22.07457, + 22.03976, + 22.10053, + 22.06265, + 22.26463, + 22.07873, + 22.44415, + 22.07001, + 22.33738, + 22.08838, + 22.16296, + 22.16339, + 22.16991, + 22.42509, + 22.2312, + 22.15916, + 22.11519, + 22.04263, + 22.3869, + 22.16323, + 22.18507, + 22.48579, + 22.06755, + 22.0962, + 22.95661, + 22.16252, + 22.05745, + 22.79741, + 22.09334, + 22.1858, + 22.93376, + 22.334, + 22.3063, + 22.84675, + 22.16503, + 22.17242, + 22.59222, + 22.06465, + 22.07589, + 22.80193, + 22.07308, + 22.27505, + 22.55282, + 22.12552, + 22.06361, + 22.26227, + 22.41097, + 22.07737, + 22.0641, + 22.22291, + 21.91401, + 22.09448, + 22.07533, + 22.14453, + 22.07874, + 22.29419, + 22.07872, + 22.0924, + 22.05562, + 22.07998, + 22.21663, + 22.02422, + 22.15489, + 22.04533, + 22.02868, + 22.06831, + 22.20454, + 22.05581, + 22.02841, + 22.20265, + 22.02366, + 22.02199, + 22.0139, + 22.1598, + 22.05404, + 22.01743, + 22.0129, + 22.0247, + 22.13256, + 22.01642, + 22.0272, + 22.00517, + 21.99164, + 22.10011, + 22.03568, + 22.06918, + 23.56804, + 22.16179, + 22.08451, + 22.20877, + 22.2711, + 22.10781, + 22.03911, + 22.70341, + 22.00169, + 22.04696, + 22.67068, + 21.99085, + 22.01035, + 22.9163, + 21.99913, + 22.06136, + 23.07159, + 22.17796, + 22.36062, + 23.19125, + 22.03456, + 21.98697, + 22.58117, + 22.03722, + 22.12609, + 22.31277, + 22.00898, + 22.03641, + 22.027, + 21.99275, + 22.03062, + 22.1308, + 22.0163, + 21.98889, + 22.00985, + 22.02208, + 22.3909, + 22.0133, + 21.99356, + 22.02443, + 22.16854, + 22.01443, + 22.01095, + 22.20835, + 22.0065, + 21.99457, + 22.03279, + 22.06444, + 22.02094, + 22.03274, + 22.07727, + 22.024, + 22.05811, + 22.00449, + 22.16497, + 22.00399, + 22.11103, + 22.20282, + 22.00141, + 22.33244, + 22.01291, + 22.1501, + 22.98475, + 22.00135, + 21.89305, + 23.21657, + 22.01541, + 22.00729, + 23.27537, + 22.02325, + 22.02953, + 22.99426, + 22.37106, + 22.17864, + 22.43954, + 21.99077, + 22.06264, + 22.03073, + 22.00708, + 22.0082, + 22.06792, + 22.00983, + 22.03936, + 22.33591, + 22.17899, + 22.11585, + 22.10419, + 22.08032, + 22.14083, + 22.07963, + 22.17312, + 22.037, + 22.20653, + 22.10069, + 22.04341, + 22.15363, + 22.05156, + 22.39116, + 22.12367, + 22.2752, + 22.14157, + 22.35703, + 22.15858, + 22.01961, + 22.29095, + 22.08881, + 22.04276, + 22.75425, + 22.0342, + 22.11545, + 23.31582, + 22.03647, + 22.05616, + 23.38589, + 22.03024, + 22.11227, + 22.98518, + 22.04708, + 22.04421, + 22.85279, + 22.05935, + 22.12996, + 22.37204, + 22.13334, + 22.06316, + 22.3544, + 22.23473, + 22.02368, + 22.30709, + 22.02756, + 22.1135, + 22.01979, + 22.17032, + 22.04573, + 22.02348, + 22.0829, + 22.03043, + 22.48803, + 22.03458, + 22.03211, + 22.01908, + 22.00251, + 22.14211, + 22.04241, + 22.20086, + 22.00635, + 22.0097, + 22.17863, + 22.00551, + 22.09333, + 22.01044, + 22.04104, + 22.06058, + 22.27026, + 22.02366, + 22.31058, + 22.78117, + 22.01579, + 22.02808, + 22.97729, + 22.01965, + 22.10839, + 23.29251, + 22.12997, + 22.00996, + 23.10594, + 22.02723, + 22.02972, + 23.00036, + 22.09853, + 22.16474, + 22.82317, + 22.00512, + 22.31634, + 22.14177, + 22.06013, + 22.02529, + 22.31011, + 22.00654, + 22.02501, + 22.59174, + 22.01666, + 22.1144, + 22.10909, + 22.03189, + 22.03186, + 22.02997, + 21.99226, + 22.0248, + 22.12153, + 21.9721, + 22.13031, + 22.00527, + 22.01625, + 22.03869, + 21.9971, + 22.32019, + 22.18763, + 22.35166, + 22.17188, + 22.29416, + 22.1213, + 22.13695, + 22.49823, + 22.97301, + 22.10295, + 22.12038, + 22.08706, + 22.13407, + 22.10087, + 22.0762, + 22.14732, + 22.11962, + 22.12895, + 22.15144, + 22.06173, + 22.08087, + 22.29365, + 22.15383, + 22.20576, + 22.13582, + 22.05402, + 22.57075, + 22.32239, + 22.28969, + 22.20852, + 22.07419, + 22.3298, + 22.0726, + 22.14401, + 22.87172, + 22.27554, + 22.08264, + 23.03667, + 22.06085, + 22.08401, + 23.0776, + 22.32991, + 22.05539, + 23.08225, + 22.5749, + 22.11254, + 22.94656, + 22.0916, + 22.24724, + 22.94123, + 22.21239, + 22.05054, + 22.65562, + 22.07319, + 22.29545, + 22.56916, + 22.07369, + 22.10235, + 22.38025, + 22.05502, + 22.1442, + 22.39969, + 22.59194, + 22.06765, + 22.15861, + 22.13692, + 22.04978, + 22.2308, + 22.07787, + 22.04773, + 22.18925, + 22.09132, + 22.05915, + 22.04757, + 22.24268, + 22.11858, + 22.04981, + 22.04236, + 22.07326, + 22.05566, + 22.54976, + 22.33248, + 22.24413, + 22.58618, + 22.08154, + 22.07835, + 23.05144, + 22.05515, + 22.14249, + 22.73477, + 22.076, + 22.07176, + 23.03686, + 22.05126, + 22.05328, + 23.06891, + 22.03351, + 22.06355, + 22.74752, + 22.09005, + 22.12947, + 22.51651, + 22.24589, + 22.05862, + 22.52743, + 22.01698, + 22.05485, + 22.65973, + 22.04256, + 22.04391, + 22.37144, + 22.09203, + 22.1188, + 22.37972, + 22.20775, + 22.26424, + 22.13799, + 22.32221, + 22.08471, + 22.15401, + 22.20326, + 22.1117, + 22.38476, + 22.08183, + 22.06705, + 22.13908, + 22.10766, + 22.119, + 22.06683, + 22.27187, + 22.10087, + 22.2443, + 22.56028, + 22.35752, + 22.08776, + 22.99192, + 22.08303, + 22.13826, + 22.90352, + 22.41341, + 22.28265, + 23.20811, + 22.09551, + 22.2311, + 22.64804, + 22.08277, + 22.11031, + 22.90923, + 22.25287, + 22.31899, + 22.59954, + 22.11233, + 22.26726, + 22.3943, + 22.23083, + 22.05556, + 22.17205, + 22.24762, + 22.09411, + 22.22834, + 22.07723, + 22.13943, + 22.12574, + 22.16756, + 22.07795, + 22.12778, + 22.30969, + 22.12327, + 22.09924, + 22.09402, + 22.07373, + 22.08579, + 22.0969, + 22.29523, + 22.0814, + 22.33657, + 22.05957, + 22.06162, + 22.23924, + 22.22044, + 22.25518, + 22.76025, + 22.04576, + 22.1095, + 22.89399, + 22.11334, + 22.20662, + 23.22123, + 22.13405, + 22.14319, + 23.13889, + 22.08252, + 22.09186, + 22.88288, + 22.13033, + 22.24811, + 22.84108, + 22.0963, + 22.10466, + 22.56334, + 22.28161, + 22.11432, + 22.51849, + 22.0848, + 22.0716, + 22.29104, + 22.28107, + 22.04936, + 22.34781, + 22.08045, + 22.22841, + 22.38318, + 22.08404, + 22.27922, + 22.06086, + 22.06059, + 22.0609, + 22.10083, + 22.07708, + 22.03609, + 22.18118, + 22.06044, + 22.24976, + 22.07572, + 22.05061, + 22.03577, + 22.05157, + 22.41553, + 22.04533, + 22.58813, + 22.22882, + 22.22933, + 22.18269, + 22.22138, + 22.29704, + 22.1916, + 22.50302, + 22.1511, + 22.20668, + 22.18498, + 22.28163, + 22.18772, + 22.18406, + 22.30853, + 22.15384, + 22.14454, + 22.19723, + 22.42928, + 22.26607, + 23.24038, + 22.16549, + 22.17437, + 23.31809, + 22.16913, + 22.15666, + 23.41506, + 22.20052, + 22.15415, + 23.44726, + 22.30211, + 22.1587, + 22.84592, + 22.22882, + 22.3731, + 22.89438, + 22.15999, + 22.31374, + 22.22651, + 22.15052, + 22.12954, + 22.19818, + 22.14812, + 22.22392, + 22.22943, + 22.19123, + 22.14818, + 22.16315, + 22.35636, + 22.14742, + 22.18533, + 22.16984, + 22.16773, + 22.55359, + 22.21615, + 22.2091, + 22.13037, + 22.15519, + 22.10123, + 22.17487, + 22.17513, + 22.21376, + 22.15904, + 22.2451, + 22.16102, + 22.27373, + 22.42959, + 22.35776, + 22.263, + 22.73783, + 22.27069, + 22.57598, + 22.9897, + 22.18811, + 22.14974, + 22.94098, + 22.19084, + 22.26805, + 23.17091, + 22.27699, + 22.11621, + 23.52157, + 22.32281, + 22.20457, + 22.84343, + 22.34451, + 22.14532, + 22.54568, + 22.15921, + 22.38103, + 22.35533, + 22.12631, + 22.14453, + 22.13071, + 22.19417, + 22.12171, + 22.27355, + 22.25996, + 22.13962, + 22.17909, + 22.31349, + 22.18588, + 22.14944, + 22.15603, + 22.14809, + 22.27744, + 22.13968, + 22.43714, + 22.17337, + 22.11314, + 22.20855, + 22.16081, + 22.22404, + 22.15729, + 22.41279, + 22.14239, + 22.13028, + 22.21568, + 22.10188, + 22.34468, + 22.07896, + 22.1231, + 22.09002, + 22.09242, + 22.11111, + 22.17983, + 22.24994, + 22.10215, + 22.46662, + 22.09419, + 22.15175, + 22.14559, + 22.08943, + 22.12113, + 22.08889, + 22.28845, + 22.57452, + 22.14223, + 22.45406, + 22.21435, + 22.05357, + 22.66234, + 22.05918, + 22.14693, + 23.03717, + 22.12768, + 22.32128, + 23.20236, + 22.09008, + 22.05365, + 23.21157, + 22.10796, + 22.06815, + 22.87714, + 22.57965, + 22.05288, + 22.48416, + 22.10489, + 22.15942, + 22.0792, + 22.29933, + 22.06366, + 22.10414, + 22.23846, + 61.27965, + 61.17303, + 60.93715, + 61.13133, + 61.12721, + 60.81685, + 60.98225, + 61.30132, + 60.93549, + 60.69967, + 60.91489, + 60.81747, + 61.46471, + 61.69749, + 60.77694, + 60.76163, + 60.97084, + 61.28849, + 60.91529, + 60.80709, + 60.8915, + 61.05598, + 22.11434, + 22.36842, + 22.15676, + 22.10011, + 22.11174, + 22.13811, + 22.41267, + 22.06169, + 22.10501, + 22.24403, + 22.07369, + 22.10714, + 22.13241, + 22.30543, + 22.09326, + 22.4798, + 22.12286, + 22.12307, + 22.17564, + 22.09602, + 22.08707, + 22.06782, + 22.79265, + 22.42881, + 22.18655, + 23.35501, + 22.20008, + 22.06771, + 22.66239, + 22.04897, + 22.40341, + 23.11431, + 22.07558, + 22.24625, + 22.47141, + 22.36805, + 22.04884, + 22.17862, + 22.12284, + 22.10071, + 22.40183, + 22.49404, + 22.05267, + 22.06313, + 22.06909, + 22.18636, + 22.12141, + 22.25289, + 22.06973, + 22.08393, + 22.24575, + 22.06041, + 22.18843, + 22.04192, + 22.06083, + 22.07726, + 22.04325, + 22.14804, + 22.15436, + 22.92499, + 22.07397, + 22.07851, + 22.31569, + 22.04001, + 22.17268, + 22.59199, + 22.26674, + 22.40413, + 22.73767, + 22.03631, + 22.06472, + 23.22907, + 22.37175, + 22.06171, + 23.18735, + 22.06551, + 22.04094, + 23.01561, + 22.1797, + 22.0393, + 22.36705, + 22.23749, + 22.05647, + 22.27163, + 22.03717, + 22.23222, + 22.03541, + 22.09642, + 22.07479, + 22.04652, + 22.0752, + 22.0611, + 22.155, + 22.04841, + 22.04367, + 22.57311, + 22.07823, + 22.13918, + 22.07624, + 22.58741, + 22.05358, + 22.09416, + 22.06915, + 22.06697, + 22.17179, + 22.04659, + 22.0679, + 22.05597, + 22.20582, + 22.1163, + 22.05879, + 22.53564, + 22.05523, + 22.37207, + 22.15885, + 22.14002, + 22.14307, + 22.12354, + 22.27465, + 22.12406, + 22.37709, + 22.15483, + 22.08713, + 22.11552, + 22.08857, + 22.066, + 22.08113, + 22.30342, + 22.08316, + 22.09483, + 22.08368, + 22.31247, + 22.07708, + 22.09326, + 22.02953, + 22.04734, + 22.21646, + 22.18826, + 22.1858, + 22.06094, + 22.2184, + 22.05256, + 22.58915, + 22.16498, + 22.40896, + 22.76875, + 22.0528, + 22.13154, + 23.05687, + 22.05648, + 22.18597, + 23.14894, + 22.23368, + 22.11616, + 22.59598, + 22.35966, + 22.07336, + 22.17872, + 22.06577, + 22.32277, + 22.08732, + 22.08067, + 22.36932, + 22.07089, + 22.07751, + 22.0811, + 22.31345, + 22.06705, + 22.05811, + 22.06743, + 22.06308, + 22.1459, + 22.06573, + 22.44047, + 22.06664, + 22.08419, + 22.1892, + 22.04749, + 22.09074, + 22.64728, + 22.51719, + 22.09339, + 22.60724, + 22.05313, + 22.05373, + 22.73244, + 29.9374, + 23.23771, + 26.12982, + 22.0714, + 22.04965, + 23.02428, + 22.26129, + 22.26949, + 23.02104, + 22.06185, + 22.05681, + 23.15292, + 22.45871, + 22.16934, + 22.56592, + 22.04116, + 22.05877, + 22.45156, + 22.18365, + 22.03071, + 22.37645, + 22.06848, + 22.15173, + 22.51891, + 22.19234, + 22.02494, + 22.16566, + 22.22915, + 22.07767, + 22.15082, + 22.22704, + 22.06001, + 22.20203, + 22.04289, + 22.08313, + 22.32529, + 22.04353, + 22.07976, + 22.06153, + 22.14602, + 22.23695, + 97.32394, + 22.15297, + 22.25851, + 22.20962, + 22.15517, + 22.09394, + 22.31625, + 22.21339, + 22.13564, + 22.28151, + 22.08694, + 22.05186, + 22.08302, + 22.06486, + 22.24339, + 22.04107, + 22.05055, + 22.05284, + 22.19875, + 22.08528, + 22.04858, + 22.1898, + 22.04259, + 22.08821, + 22.04079, + 22.26902, + 22.09483, + 22.0653, + 22.3063, + 22.04724, + 22.03538, + 22.11389, + 22.17977, + 22.19797, + 22.09501, + 22.05264, + 22.23768, + 22.06425, + 22.19367, + 22.15496, + 22.04645, + 22.01735, + 22.05546, + 22.22108, + 22.52894, + 22.17078, + 22.04657, + 22.66171, + 22.08216, + 22.14434, + 22.91265, + 22.04189, + 22.30463, + 22.8161, + 22.10876, + 22.15244, + 23.07323, + 22.07645, + 22.07515, + 22.45072, + 22.06701, + 22.05001, + 22.81856, + 22.2083, + 22.07677, + 22.49164, + 22.06707, + 22.04991, + 22.50302, + 22.19432, + 22.05407, + 22.17785, + 22.17777, + 22.0591, + 22.42836, + 22.04898, + 22.25012, + 22.02919, + 22.03809, + 22.02566, + 22.04623, + 22.19503, + 22.03965, + 22.13501, + 22.03498, + 22.24937, + 22.12539, + 22.04288, + 22.01837, + 22.0592, + 22.14505, + 22.05825, + 22.33469, + 22.28682, + 22.0202, + 22.06255, + 22.3121, + 22.04525, + 22.05081, + 22.87176, + 22.02192, + 22.02659, + 23.14619, + 22.01422, + 22.0033, + 22.77386, + 22.04744, + 22.02232, + 22.71235, + 22.23808, + 22.33464, + 22.51963, + 22.04383, + 22.09721, + 22.492, + 22.16247, + 22.15125, + 23.31783, + 22.50191, + 22.25313, + 23.16342, + 22.08969, + 22.08897, + 23.02494, + 22.07001, + 22.431, + 22.91199, + 22.07168, + 22.05827, + 22.73213, + 22.0699, + 22.06272, + 22.91321, + 22.04565, + 22.02981, + 23.11438, + 22.06312, + 22.07263, + 22.60522, + 22.48687, + 22.06531, + 22.81767, + 22.1324, + 22.05353, + 22.72526, + 22.04709, + 22.33975, + 22.49839, + 22.06596, + 22.0488, + 22.49857, + 22.21481, + 22.04979, + 22.67688, + 22.05085, + 22.604, + 22.01359, + 22.01026, + 22.576, + 22.04568, + 22.05149, + 22.26098, + 22.20339, + 22.25645, + 22.15332, + 22.0521, + 22.04389, + 22.01911, + 22.04118, + 22.18372, + 22.36079, + 22.03144, + 22.2546, + 22.0347, + 22.11309, + 22.02022, + 22.06121, + 22.0363, + 22.07602, + 22.02511, + 22.03806, + 22.49011, + 22.08332, + 22.04208, + 22.0424, + 22.02196, + 22.12873, + 22.07355, + 22.39268, + 22.90289, + 22.21884, + 22.05382, + 23.32278, + 22.01646, + 22.04866, + 23.09335, + 22.03294, + 22.05951, + 23.07175, + 22.33506, + 22.13579, + 22.96479, + 22.17044, + 22.06808, + 22.71606, + 22.06192, + 22.2198, + 22.76581, + 22.04501, + 22.07784, + 22.45968, + 22.02073, + 22.06513, + 22.02161, + 22.05107, + 22.01897, + 22.12474, + 22.30654, + 22.05217, + 22.06245, + 22.03632, + 22.05141, + 22.04536, + 22.04668, + 22.07617, + 22.21171, + 22.04614, + 22.03868, + 22.27957, + 22.15533, + 22.10648, + 22.02181, + 22.08012, + 22.11044, + 23.19676, + 22.11926, + 22.36305, + 22.08336, + 22.18096, + 22.12117, + 22.12299, + 22.08193, + 22.06577, + 22.11211, + 22.08488, + 22.50658, + 22.08343, + 22.08416, + 22.10853, + 22.06203, + 22.05712, + 22.13873, + 22.35144, + 22.18615, + 22.0991, + 22.05517, + 22.16001, + 22.04568, + 22.10196, + 22.27976, + 22.04611, + 22.51055, + 22.06527, + 22.25575, + 22.26271, + 22.07975, + 22.08833, + 22.50771, + 22.08065, + 22.03076, + 22.93063, + 22.05803, + 22.04597, + 23.21894, + 22.18984, + 22.37802, + 22.98876, + 22.06177, + 22.30177, + 22.92668, + 22.23802, + 22.0502, + 22.87797 ] } } \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.9.0.json new file mode 100644 index 000000000..3c34692c4 --- /dev/null +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.9.0.json @@ -0,0 +1,21878 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 27308, + "step_interval": 5, + "values": [ + 12.66411, + 12.57512, + 11.54347, + 10.60309, + 10.16435, + 9.88037, + 9.63458, + 9.42019, + 9.20416, + 9.03345, + 8.87633, + 8.68266, + 8.55282, + 8.44289, + 8.32071, + 8.18419, + 8.04222, + 7.93414, + 7.76829, + 7.65767, + 7.58631, + 7.42708, + 7.35614, + 7.20111, + 7.12867, + 7.00843, + 6.93027, + 6.84437, + 6.76406, + 6.68399, + 6.61684, + 6.54664, + 6.47692, + 6.37613, + 6.34276, + 6.27588, + 6.20124, + 6.12117, + 6.09124, + 5.98671, + 5.95872, + 5.87765, + 5.82396, + 5.78384, + 5.72361, + 5.66607, + 5.65114, + 5.61262, + 5.52993, + 5.54276, + 5.42221, + 5.41338, + 5.33586, + 5.3198, + 5.31586, + 5.18782, + 5.14439, + 5.14995, + 5.12504, + 5.09826, + 5.06345, + 5.0078, + 4.98392, + 4.94395, + 4.90681, + 4.90251, + 4.87224, + 4.82824, + 4.80728, + 4.77264, + 4.74214, + 4.73947, + 4.67142, + 4.65377, + 4.63964, + 4.56415, + 4.57758, + 4.54651, + 4.49286, + 4.4527, + 4.44914, + 4.38955, + 4.38042, + 4.3699, + 4.32201, + 4.32255, + 4.26145, + 4.22908, + 4.2008, + 4.16944, + 4.14805, + 4.11125, + 4.08557, + 4.03095, + 4.03893, + 4.04441, + 3.98295, + 4.00241, + 3.96752, + 3.88737, + 3.91287, + 3.91207, + 3.83451, + 3.82414, + 3.81407, + 3.79929, + 3.77533, + 3.77, + 3.74376, + 3.72147, + 3.71352, + 3.6834, + 3.65812, + 3.66585, + 3.65781, + 3.63993, + 3.62103, + 3.6417, + 3.58509, + 3.55831, + 3.6012, + 3.53974, + 3.55814, + 3.55746, + 3.51119, + 3.50954, + 3.5255, + 3.53233, + 3.52729, + 3.51299, + 3.51783, + 3.4733, + 3.50497, + 3.47873, + 3.45585, + 3.49018, + 3.44842, + 3.41404, + 3.41565, + 3.38658, + 3.37656, + 3.36638, + 3.37443, + 3.36633, + 3.34174, + 3.33734, + 3.31549, + 3.30359, + 3.32553, + 3.28474, + 3.31545, + 3.28885, + 3.30293, + 3.30619, + 3.31654, + 3.33438, + 3.32533, + 3.30398, + 3.28048, + 3.2985, + 3.31593, + 3.2582, + 3.29186, + 3.27218, + 3.28093, + 3.23044, + 3.21895, + 3.23147, + 3.19311, + 3.17656, + 3.15227, + 3.15724, + 3.19058, + 3.15595, + 3.15154, + 3.19151, + 3.16355, + 3.19715, + 3.21367, + 3.18492, + 3.18232, + 3.1802, + 3.12057, + 3.13289, + 3.12574, + 3.11834, + 3.09283, + 3.10192, + 3.12903, + 3.14907, + 3.11761, + 3.12161, + 3.14585, + 3.10963, + 3.07548, + 3.07332, + 3.0613, + 3.06168, + 3.08481, + 3.03568, + 3.03012, + 3.05793, + 3.00981, + 3.02738, + 3.0574, + 3.04075, + 3.04196, + 3.05152, + 3.01682, + 3.03018, + 3.02359, + 3.03656, + 3.06873, + 3.13228, + 3.69746, + 3.34098, + 3.2697, + 3.2011, + 3.23706, + 3.22535, + 3.20222, + 3.22282, + 3.24482, + 3.2826, + 3.23777, + 3.19313, + 3.10125, + 3.08371, + 3.01564, + 3.01027, + 2.99933, + 2.99072, + 2.99681, + 2.9711, + 3.0003, + 2.97339, + 2.97206, + 2.95987, + 2.96103, + 3.81862, + 3.027, + 3.08442, + 3.02201, + 2.97428, + 2.9512, + 2.94254, + 2.94452, + 2.95629, + 2.95066, + 2.96785, + 2.94775, + 2.94434, + 2.94975, + 2.92395, + 2.91463, + 2.94346, + 2.91442, + 2.96389, + 2.93466, + 2.92769, + 2.92092, + 2.9296, + 2.93897, + 2.90964, + 2.90179, + 2.89109, + 2.88789, + 2.90236, + 2.87818, + 2.89445, + 2.88733, + 2.86963, + 2.88201, + 2.88201, + 2.91574, + 2.85808, + 2.87506, + 2.90114, + 2.85602, + 2.86231, + 2.90121, + 2.92758, + 2.92889, + 2.97651, + 2.94846, + 2.95235, + 2.91583, + 2.90138, + 2.8962, + 2.82255, + 2.87337, + 2.82863, + 2.84668, + 2.88019, + 2.87063, + 2.82263, + 2.84282, + 2.82272, + 2.82577, + 2.83317, + 2.86631, + 2.8377, + 2.80912, + 2.85542, + 2.79838, + 2.80437, + 2.81773, + 2.84532, + 2.79921, + 2.80908, + 2.79932, + 2.805, + 2.79934, + 2.7967, + 2.7993, + 2.81225, + 2.79087, + 2.80686, + 2.7917, + 2.7713, + 2.79413, + 2.7818, + 2.79096, + 2.79608, + 2.81718, + 2.76239, + 2.76664, + 2.78456, + 2.80506, + 2.7998, + 2.80214, + 2.86702, + 2.80958, + 2.85462, + 2.87831, + 2.85835, + 2.86664, + 2.98447, + 3.01179, + 2.86197, + 2.82217, + 2.80549, + 2.77205, + 2.75611, + 2.7306, + 3.02386, + 2.76038, + 2.77132, + 2.76668, + 2.76814, + 2.73318, + 2.74889, + 2.75312, + 2.74421, + 2.75876, + 2.72944, + 2.75698, + 2.70658, + 2.73879, + 2.7168, + 2.75181, + 2.72915, + 2.73445, + 2.76606, + 2.71916, + 2.73669, + 2.72278, + 2.76389, + 2.76707, + 2.72831, + 2.75726, + 2.7201, + 2.73956, + 2.71, + 2.72431, + 2.7079, + 2.72553, + 2.68492, + 2.70358, + 2.72405, + 2.70679, + 2.70858, + 2.73712, + 2.70487, + 2.72022, + 2.70781, + 2.71437, + 2.73678, + 2.76825, + 2.73086, + 2.73186, + 2.70006, + 2.7383, + 2.68168, + 2.71223, + 2.70812, + 2.71417, + 2.73951, + 2.73634, + 2.71619, + 2.6698, + 2.72761, + 2.67432, + 2.69199, + 2.69912, + 2.69334, + 2.70113, + 2.73844, + 2.70143, + 2.68763, + 2.69931, + 2.69486, + 2.67607, + 2.68582, + 2.63971, + 2.67889, + 2.6846, + 2.68313, + 2.64794, + 2.68019, + 2.68884, + 2.70938, + 2.68497, + 2.70578, + 2.69081, + 2.67461, + 2.7047, + 2.6548, + 2.65724, + 2.65819, + 2.64778, + 2.64452, + 2.67403, + 2.6698, + 2.72684, + 2.67124, + 2.68642, + 2.68748, + 2.68093, + 2.69559, + 2.73456, + 2.6983, + 2.68567, + 2.6938, + 2.69101, + 2.67246, + 2.68474, + 2.63712, + 2.6841, + 2.68197, + 2.68107, + 2.64263, + 2.68132, + 2.68796, + 2.68261, + 2.67503, + 2.67891, + 2.69154, + 2.66332, + 2.70234, + 2.6525, + 2.65316, + 2.65565, + 2.64145, + 2.64406, + 2.67459, + 2.67396, + 2.65601, + 2.64538, + 2.64518, + 2.64029, + 2.62506, + 2.64812, + 2.68023, + 2.65857, + 2.65188, + 2.65118, + 2.67127, + 2.6762, + 2.65533, + 2.63195, + 2.6706, + 2.67011, + 2.63114, + 2.64083, + 2.63528, + 2.64123, + 2.61442, + 2.61288, + 2.65875, + 2.62135, + 2.66254, + 2.62008, + 2.66671, + 2.66685, + 2.66895, + 2.72481, + 2.65198, + 2.63081, + 2.62924, + 2.61116, + 2.60944, + 2.64439, + 2.64299, + 2.63168, + 2.614, + 2.61138, + 2.63383, + 2.61753, + 2.62809, + 2.61149, + 2.60833, + 2.61664, + 2.60659, + 2.62218, + 2.60881, + 2.61107, + 2.61836, + 2.58814, + 2.58691, + 2.60137, + 2.59519, + 2.61287, + 2.59388, + 2.62939, + 2.57181, + 2.58867, + 2.59744, + 2.5881, + 2.60213, + 2.60711, + 2.626, + 2.57491, + 2.61578, + 2.61135, + 2.57712, + 2.59037, + 2.58269, + 2.60228, + 2.61117, + 2.57721, + 2.58988, + 2.6088, + 2.59343, + 2.5886, + 2.59325, + 2.57698, + 2.58705, + 2.60276, + 2.78045, + 2.78575, + 2.71235, + 2.74961, + 2.67202, + 2.62672, + 2.62165, + 2.612, + 2.59372, + 2.57245, + 2.5668, + 2.56261, + 2.59085, + 2.56532, + 2.5658, + 2.56428, + 2.5478, + 2.53411, + 2.5662, + 2.58326, + 2.56237, + 2.54502, + 2.56639, + 2.5723, + 2.65984, + 2.60739, + 2.61156, + 2.60302, + 2.61116, + 2.57458, + 2.55265, + 2.55707, + 2.78539, + 2.71638, + 2.7649, + 2.69004, + 2.6322, + 2.62564, + 2.61967, + 2.59594, + 2.57381, + 2.56544, + 2.56151, + 2.5912, + 2.56681, + 2.56909, + 2.59729, + 2.94733, + 2.75884, + 2.68768, + 2.65241, + 2.59956, + 2.5661, + 2.57886, + 2.58442, + 2.58039, + 2.56677, + 2.57118, + 2.56942, + 2.59178, + 2.56563, + 2.55076, + 2.56077, + 2.56136, + 2.57081, + 2.57043, + 2.57068, + 2.55957, + 2.56693, + 2.5647, + 2.5598, + 2.5351, + 2.56527, + 2.59743, + 2.57771, + 2.67896, + 2.58597, + 2.58197, + 2.56086, + 2.57367, + 2.54699, + 2.56719, + 2.56208, + 2.52928, + 2.57391, + 2.54608, + 2.55876, + 2.58457, + 2.56585, + 2.56691, + 2.5395, + 2.53599, + 2.54027, + 2.54413, + 2.52798, + 2.55987, + 2.55681, + 2.52661, + 2.55512, + 2.53563, + 2.52261, + 2.55698, + 2.56615, + 2.53246, + 2.55192, + 2.5543, + 2.55431, + 2.51778, + 2.53535, + 2.55671, + 2.54136, + 2.51511, + 2.52728, + 2.53625, + 2.54599, + 2.58454, + 2.56324, + 2.58224, + 2.53765, + 2.57012, + 2.53108, + 2.56653, + 2.53744, + 2.51537, + 2.5962, + 4.82565, + 3.12657, + 2.76828, + 2.70589, + 2.67721, + 2.57146, + 2.57396, + 2.56132, + 2.54688, + 2.53223, + 2.55593, + 2.56043, + 2.53207, + 2.5261, + 2.52873, + 2.53849, + 2.53505, + 2.52328, + 2.5018, + 2.52388, + 2.52509, + 2.53215, + 2.5431, + 2.50073, + 2.76597, + 2.63563, + 2.58268, + 2.56536, + 2.53671, + 2.53596, + 2.4962, + 2.51957, + 2.52972, + 2.50681, + 2.50437, + 2.51215, + 2.48754, + 2.49129, + 2.48452, + 2.51387, + 2.5192, + 2.48679, + 2.51679, + 2.51778, + 2.50136, + 2.51352, + 2.5061, + 2.48554, + 2.50426, + 2.50521, + 2.53404, + 2.5519, + 2.53764, + 2.56074, + 2.5365, + 2.5334, + 2.54575, + 2.48862, + 2.51039, + 2.51649, + 2.49997, + 2.49433, + 2.48134, + 2.51264, + 2.50471, + 2.50695, + 2.48079, + 2.48813, + 2.48351, + 2.46973, + 2.48284, + 2.50415, + 2.47805, + 2.51741, + 2.48992, + 2.50547, + 2.48293, + 2.48447, + 2.49026, + 2.46599, + 2.48778, + 2.49269, + 2.48381, + 2.48727, + 2.50358, + 2.48089, + 2.49332, + 2.51056, + 2.50232, + 2.49096, + 2.48902, + 2.47096, + 2.47017, + 2.46071, + 2.50019, + 2.46935, + 2.50016, + 2.49045, + 2.49533, + 2.47747, + 2.47233, + 2.45548, + 2.47473, + 2.4702, + 2.46163, + 2.46659, + 2.49281, + 2.46124, + 2.49415, + 2.48226, + 2.43948, + 2.46836, + 2.44224, + 2.45511, + 2.42348, + 2.75451, + 2.50208, + 2.45048, + 2.47487, + 2.45522, + 2.45882, + 2.46588, + 2.49273, + 2.45878, + 2.46673, + 2.43995, + 2.83249, + 2.80646, + 2.60667, + 2.52176, + 2.4823, + 2.48339, + 2.46671, + 2.49174, + 2.49155, + 2.49121, + 2.46149, + 2.49995, + 2.4981, + 2.47713, + 2.50676, + 2.49282, + 2.47929, + 2.47077, + 2.48221, + 2.46996, + 2.46778, + 2.46731, + 2.43917, + 2.47942, + 2.47357, + 2.48187, + 2.45511, + 2.49732, + 2.4967, + 2.47343, + 2.46274, + 2.46076, + 2.47058, + 2.46557, + 2.45525, + 2.48398, + 2.45081, + 2.47409, + 2.68078, + 2.56122, + 2.60827, + 2.5425, + 2.50496, + 2.4883, + 2.48589, + 2.47404, + 2.48121, + 2.47507, + 2.45793, + 2.45941, + 2.45624, + 2.46092, + 2.45602, + 2.46255, + 2.45272, + 2.45936, + 2.4459, + 2.42484, + 2.45679, + 2.44605, + 2.46919, + 2.46531, + 2.4194, + 2.48545, + 2.4578, + 2.44743, + 2.45089, + 2.45547, + 2.44483, + 2.46114, + 2.4749, + 2.4645, + 2.46158, + 2.46674, + 2.4581, + 2.4435, + 2.45596, + 2.49623, + 2.46442, + 2.47126, + 2.45498, + 2.44775, + 2.44513, + 2.47022, + 2.43861, + 2.43864, + 2.43908, + 2.44399, + 2.41899, + 2.45898, + 2.44765, + 2.38065, + 2.43301, + 2.41682, + 2.44297, + 2.45459, + 2.45838, + 2.42785, + 2.43634, + 2.46543, + 2.44646, + 2.42453, + 2.41897, + 2.44462, + 2.44677, + 2.42722, + 2.45637, + 2.40108, + 2.42734, + 2.44864, + 2.4148, + 2.4428, + 2.42374, + 2.42748, + 2.42454, + 2.43675, + 2.39771, + 2.41691, + 2.42674, + 2.41677, + 2.40544, + 2.41117, + 2.43502, + 2.42062, + 2.43591, + 2.45371, + 2.42327, + 2.41664, + 2.4086, + 2.44727, + 2.4208, + 2.43135, + 2.41342, + 2.42134, + 2.38586, + 2.41833, + 2.39067, + 2.39839, + 2.40338, + 2.37409, + 2.39872, + 2.40511, + 2.40637, + 2.40249, + 2.4125, + 2.38705, + 2.40897, + 2.42774, + 2.40223, + 2.40561, + 2.42666, + 2.41957, + 2.4042, + 2.42502, + 2.38898, + 2.41357, + 2.40634, + 2.41681, + 2.39775, + 2.40796, + 2.4032, + 2.37535, + 2.41899, + 2.38559, + 2.3912, + 2.39589, + 2.38517, + 2.40207, + 2.38928, + 2.4074, + 2.38044, + 2.3739, + 2.44088, + 2.43452, + 2.42374, + 2.42461, + 2.40463, + 2.41599, + 2.38614, + 2.39198, + 2.38546, + 2.39558, + 2.37887, + 2.40355, + 2.37008, + 2.36908, + 2.38129, + 2.38291, + 2.3617, + 2.38131, + 2.34726, + 2.40769, + 2.47172, + 2.39215, + 2.39478, + 2.37947, + 2.38038, + 2.37322, + 2.37966, + 2.38359, + 2.37862, + 2.3733, + 2.35494, + 2.38871, + 2.37306, + 2.36491, + 2.35944, + 2.3974, + 2.37231, + 2.38846, + 2.39679, + 2.39883, + 2.40719, + 2.38082, + 2.37977, + 2.35828, + 2.36703, + 2.35675, + 2.3746, + 2.36973, + 2.38381, + 2.37212, + 2.38227, + 2.36506, + 2.37879, + 2.38272, + 2.38627, + 2.38176, + 2.34656, + 2.3249, + 2.36355, + 2.3385, + 2.36851, + 2.35391, + 2.37452, + 2.36621, + 2.37412, + 2.367, + 2.36341, + 2.36374, + 2.36245, + 2.34795, + 2.37278, + 2.35673, + 2.36032, + 2.34857, + 2.34147, + 2.3469, + 2.34856, + 2.37439, + 2.34246, + 2.38103, + 2.34807, + 2.3474, + 2.36175, + 2.35238, + 2.35391, + 2.37458, + 2.3662, + 2.33669, + 2.36054, + 2.33713, + 2.35158, + 2.35924, + 2.37368, + 2.32304, + 2.36873, + 2.34849, + 2.3527, + 2.34423, + 2.3653, + 2.36238, + 2.34018, + 2.35903, + 2.36851, + 2.36456, + 2.36398, + 2.35311, + 2.36877, + 2.36581, + 2.3668, + 2.3457, + 2.34705, + 2.33717, + 2.36028, + 2.35904, + 2.32872, + 2.35047, + 2.33366, + 2.34168, + 2.35846, + 2.34037, + 2.34776, + 2.35682, + 2.34883, + 2.36469, + 2.35768, + 2.3761, + 2.35571, + 2.34615, + 2.37258, + 2.35749, + 2.34662, + 2.36566, + 2.35248, + 2.35009, + 2.37637, + 2.35171, + 2.36242, + 2.3416, + 2.35399, + 2.35245, + 2.32678, + 2.36516, + 2.34922, + 2.35739, + 2.34631, + 2.34099, + 2.34122, + 2.33591, + 2.33375, + 2.3502, + 2.35637, + 2.35875, + 2.34344, + 2.35683, + 2.33736, + 2.34862, + 2.33042, + 2.35488, + 2.33463, + 2.34, + 2.32903, + 2.33785, + 2.32755, + 2.34972, + 2.32716, + 2.33863, + 2.33016, + 2.3454, + 2.36866, + 2.34091, + 2.3453, + 2.35851, + 2.33064, + 2.33069, + 2.3473, + 2.3267, + 2.30219, + 2.32526, + 2.33784, + 2.34165, + 2.30773, + 2.35806, + 2.32552, + 2.31563, + 2.34779, + 2.32626, + 2.3413, + 2.33368, + 2.32137, + 2.32749, + 2.35523, + 2.32796, + 2.33235, + 2.35171, + 2.30917, + 2.33306, + 2.35034, + 2.34312, + 2.31802, + 2.33234, + 2.34206, + 2.35341, + 2.34036, + 2.31576, + 2.31165, + 2.33731, + 2.29825, + 2.34914, + 2.32176, + 2.32853, + 2.33133, + 2.32918, + 2.3162, + 2.32797, + 2.33239, + 2.35176, + 2.30929, + 2.33318, + 2.35059, + 2.34281, + 2.31815, + 2.33244, + 2.34054, + 2.35382, + 2.34099, + 2.45863, + 2.32853, + 2.34513, + 2.30006, + 2.33872, + 2.30425, + 2.32087, + 2.32606, + 2.32697, + 2.31494, + 2.31995, + 2.31405, + 2.34618, + 2.30509, + 2.31754, + 2.29277, + 2.30321, + 2.33671, + 2.30639, + 2.32532, + 2.32695, + 2.33429, + 2.33889, + 2.3276, + 2.30499, + 2.3092, + 2.32644, + 2.30815, + 2.27373, + 2.3164, + 2.31897, + 2.27502, + 2.32455, + 2.31004, + 2.29922, + 2.30738, + 2.31113, + 2.30872, + 2.28772, + 2.31526, + 2.31436, + 2.30915, + 2.31281, + 2.29928, + 2.32958, + 2.30162, + 2.29196, + 2.29498, + 2.31804, + 2.34092, + 2.29856, + 2.32396, + 2.29105, + 2.31536, + 2.31527, + 2.2933, + 2.31634, + 2.30357, + 2.28604, + 2.30816, + 2.31288, + 2.27816, + 2.32034, + 2.3218, + 2.31551, + 2.30983, + 2.30641, + 2.31583, + 2.28101, + 2.31661, + 2.31236, + 2.28956, + 2.29766, + 2.31127, + 2.32213, + 2.31153, + 2.28038, + 2.29481, + 2.28165, + 2.29778, + 2.31807, + 2.28079, + 2.3001, + 2.28161, + 2.30097, + 2.31626, + 2.31123, + 2.29114, + 2.27838, + 2.30138, + 2.26487, + 2.27687, + 2.28385, + 2.27387, + 2.30489, + 2.32051, + 2.30122, + 2.31244, + 2.29363, + 2.30703, + 2.27247, + 2.28263, + 2.28871, + 2.29798, + 2.31719, + 2.29299, + 2.30643, + 2.30114, + 2.2748, + 2.26932, + 2.27572, + 2.28465, + 2.27429, + 2.31593, + 2.30536, + 2.2893, + 2.30021, + 2.30559, + 2.28467, + 2.28533, + 2.28006, + 2.28362, + 2.24851, + 3.13736, + 2.34349, + 2.31706, + 2.3095, + 2.27356, + 2.30032, + 2.27103, + 2.26529, + 2.27284, + 2.27818, + 2.27641, + 2.28615, + 2.28124, + 2.28659, + 2.28398, + 2.25834, + 2.29008, + 2.29331, + 2.25314, + 2.26942, + 2.27118, + 2.26287, + 2.28015, + 2.28573, + 2.25666, + 2.2745, + 2.24479, + 2.29538, + 2.24132, + 2.29013, + 2.29946, + 2.26017, + 2.28032, + 2.25631, + 2.3803, + 2.28427, + 2.25475, + 2.27285, + 2.26157, + 2.26781, + 2.29452, + 2.28554, + 2.22876, + 2.23936, + 2.30079, + 2.2425, + 2.25008, + 2.27445, + 2.253, + 2.26435, + 2.26172, + 2.25706, + 2.28226, + 2.25494, + 2.25982, + 2.28013, + 2.29914, + 2.27967, + 2.27591, + 2.25077, + 2.26793, + 2.27734, + 2.26694, + 2.28532, + 2.26479, + 2.26003, + 2.2675, + 2.27342, + 2.26254, + 2.2557, + 2.25426, + 2.25718, + 2.24937, + 2.26807, + 2.28277, + 2.25364, + 2.24416, + 2.26937, + 2.24983, + 2.26268, + 2.2849, + 2.27594, + 2.25881, + 2.24596, + 2.2671, + 2.26164, + 2.24522, + 2.25231, + 2.25117, + 2.27033, + 2.27379, + 2.26479, + 2.253, + 2.2397, + 2.25166, + 2.24795, + 2.25577, + 2.27708, + 2.24945, + 2.25107, + 2.26486, + 2.26349, + 2.24775, + 2.25349, + 2.23204, + 2.27066, + 2.24562, + 2.27559, + 2.26674, + 2.23482, + 2.26067, + 2.2391, + 2.26454, + 2.25461, + 2.25512, + 2.26109, + 2.23266, + 2.27577, + 2.23838, + 2.25419, + 2.24642, + 2.26419, + 2.26339, + 2.27517, + 2.21192, + 2.25676, + 2.23074, + 2.25479, + 2.25587, + 2.26956, + 2.24416, + 2.2394, + 2.27883, + 2.27656, + 2.26203, + 2.25128, + 2.21602, + 2.25807, + 2.26626, + 2.27417, + 2.25492, + 2.23648, + 2.24943, + 2.25078, + 2.25182, + 2.26201, + 2.25115, + 2.26358, + 2.24804, + 2.25437, + 2.26313, + 2.22383, + 2.26468, + 2.25201, + 2.22707, + 2.2597, + 2.24138, + 2.25423, + 2.2621, + 2.24576, + 2.25048, + 2.24546, + 2.26679, + 2.2574, + 2.25016, + 2.26902, + 2.23078, + 2.23128, + 2.23901, + 2.23162, + 2.21177, + 2.24905, + 2.24624, + 2.24036, + 2.23302, + 2.24519, + 2.24625, + 2.30239, + 2.24714, + 2.25193, + 2.26974, + 2.2357, + 2.26385, + 2.26139, + 2.25835, + 2.2364, + 2.22322, + 2.25002, + 2.24943, + 2.23566, + 2.23905, + 2.23952, + 2.21951, + 2.24697, + 2.23577, + 2.23046, + 2.24607, + 2.25833, + 2.2677, + 2.23739, + 2.22333, + 2.23828, + 2.26917, + 2.2308, + 2.22023, + 2.26161, + 2.24056, + 2.22889, + 2.23077, + 2.2399, + 2.2547, + 2.23963, + 2.22847, + 2.22303, + 2.25143, + 2.24214, + 2.22738, + 2.2492, + 2.25634, + 2.23278, + 2.23352, + 2.22727, + 2.23876, + 2.22395, + 2.23621, + 2.22148, + 2.23977, + 2.23883, + 2.23685, + 2.24441, + 2.23751, + 2.2107, + 2.2459, + 2.24785, + 2.24492, + 2.22868, + 2.22927, + 2.20284, + 2.2295, + 2.23444, + 2.23173, + 2.20784, + 2.22443, + 2.25378, + 2.23748, + 2.22177, + 2.2047, + 2.21618, + 2.23123, + 2.24187, + 2.24805, + 2.23277, + 2.25623, + 2.21824, + 2.21982, + 2.22696, + 2.19515, + 2.25431, + 2.22253, + 2.22053, + 2.24161, + 2.21587, + 2.22632, + 2.24762, + 2.22113, + 2.24292, + 2.21537, + 2.23194, + 2.24111, + 2.21203, + 2.21692, + 2.20881, + 2.21976, + 2.19951, + 2.25468, + 2.20831, + 2.20419, + 2.23648, + 2.20517, + 2.22458, + 2.23751, + 2.19601, + 2.22394, + 2.21334, + 2.22503, + 2.19357, + 2.19617, + 2.2109, + 2.21355, + 2.23827, + 2.22569, + 2.2143, + 2.19897, + 2.19982, + 2.2469, + 2.20684, + 2.21741, + 2.20364, + 2.21216, + 2.21416, + 2.21838, + 2.21879, + 2.21076, + 2.19334, + 2.20261, + 2.19426, + 2.20914, + 2.22493, + 2.22029, + 2.21708, + 2.23053, + 2.22254, + 2.22852, + 2.2025, + 2.2155, + 2.19965, + 2.22, + 2.17151, + 2.19466, + 2.21291, + 2.23672, + 2.20658, + 2.1878, + 2.21051, + 2.19248, + 2.19171, + 2.23969, + 2.18496, + 2.22672, + 2.21179, + 2.21392, + 2.20582, + 2.20557, + 2.18895, + 2.21331, + 2.18822, + 2.21586, + 2.17662, + 2.23091, + 2.22355, + 2.23878, + 2.19607, + 2.177, + 2.21798, + 2.18291, + 2.2016, + 2.19151, + 2.19461, + 2.19927, + 2.192, + 2.20628, + 2.20727, + 2.22149, + 2.23594, + 2.19696, + 2.20535, + 2.20999, + 2.19752, + 2.2445, + 2.24472, + 2.21003, + 2.21792, + 2.18449, + 2.21178, + 2.23166, + 2.20748, + 2.19934, + 2.20233, + 2.19846, + 2.20003, + 2.23812, + 2.21293, + 2.21961, + 2.20527, + 2.23464, + 2.22353, + 2.24253, + 2.20205, + 2.20585, + 2.20726, + 2.20917, + 2.23005, + 2.23013, + 2.23127, + 2.22704, + 2.18664, + 2.20769, + 2.21269, + 2.20319, + 2.20367, + 2.2201, + 2.22511, + 2.2097, + 2.18994, + 2.19614, + 2.18474, + 2.17118, + 2.21018, + 2.19686, + 2.22627, + 2.21873, + 2.20468, + 2.2358, + 2.22683, + 2.20412, + 2.20633, + 2.20238, + 2.21522, + 2.19515, + 2.2028, + 2.19795, + 2.18096, + 2.20727, + 2.1997, + 2.21317, + 2.22488, + 2.26399, + 2.18111, + 2.21143, + 2.20699, + 2.20514, + 2.19352, + 2.20582, + 2.22068, + 2.19581, + 2.18276, + 2.19513, + 2.20962, + 2.22388, + 2.19544, + 2.19637, + 2.18981, + 2.19623, + 2.21615, + 2.21421, + 2.22024, + 2.19223, + 2.21191, + 2.21632, + 2.18854, + 2.17312, + 2.18947, + 2.22201, + 2.22048, + 2.19933, + 2.19456, + 2.17664, + 2.18431, + 2.19267, + 2.21804, + 2.20361, + 2.18337, + 2.19178, + 2.18778, + 2.17158, + 2.19257, + 2.18221, + 2.19847, + 2.18699, + 2.18876, + 2.16976, + 2.20922, + 2.19614, + 2.18728, + 2.20266, + 2.19289, + 2.17091, + 2.19684, + 2.21724, + 2.16567, + 2.19022, + 2.19836, + 2.18485, + 2.19693, + 2.18865, + 2.20503, + 2.17384, + 2.1712, + 2.18654, + 2.21132, + 2.18745, + 2.20208, + 2.18395, + 2.1848, + 2.20709, + 2.19518, + 2.19361, + 2.17612, + 2.16723, + 2.20663, + 2.2079, + 2.1932, + 2.18473, + 2.17167, + 2.19394, + 2.19302, + 2.17634, + 2.20809, + 2.1691, + 2.16108, + 2.1884, + 2.21153, + 2.20744, + 2.19177, + 2.18037, + 2.19112, + 2.19616, + 2.19094, + 2.19146, + 2.17807, + 2.1947, + 2.1586, + 2.17623, + 2.19792, + 2.19234, + 2.19163, + 2.18969, + 2.21447, + 2.20134, + 2.20198, + 2.19537, + 2.20342, + 2.18, + 2.16158, + 2.18495, + 2.17806, + 2.17374, + 2.18037, + 2.21216, + 2.18542, + 2.19031, + 2.21129, + 2.20942, + 2.17665, + 2.18671, + 2.18516, + 2.16291, + 2.17659, + 2.16202, + 2.18568, + 2.20677, + 2.19447, + 2.20705, + 2.17714, + 2.18493, + 2.16299, + 2.17545, + 2.19509, + 2.17116, + 2.19052, + 2.20077, + 2.16712, + 2.1948, + 2.18042, + 2.18408, + 2.18575, + 2.1789, + 2.18597, + 2.18217, + 2.19605, + 2.19769, + 2.19696, + 2.18047, + 2.19096, + 2.17095, + 2.18572, + 2.15836, + 2.19251, + 2.18092, + 2.19628, + 2.19637, + 2.18255, + 2.18958, + 2.18544, + 2.16992, + 2.19092, + 2.19757, + 2.19692, + 2.18018, + 2.17467, + 2.18018, + 2.18806, + 2.17013, + 2.17568, + 2.17635, + 2.18172, + 2.20073, + 2.18673, + 2.15887, + 2.19047, + 2.14857, + 2.18644, + 2.17722, + 2.18688, + 2.15443, + 2.15883, + 2.15911, + 2.17995, + 2.17298, + 2.17851, + 2.17268, + 2.16566, + 2.15298, + 2.15932, + 2.17773, + 2.19447, + 2.17726, + 2.13966, + 2.17382, + 2.18571, + 2.15872, + 2.17109, + 2.19878, + 2.1465, + 2.18311, + 2.15326, + 2.12654, + 2.16625, + 2.1843, + 2.20163, + 2.15418, + 2.13907, + 2.17831, + 2.16712, + 2.13713, + 2.16055, + 2.19328, + 2.16491, + 2.16781, + 2.17474, + 2.16969, + 2.16316, + 2.16878, + 2.1769, + 2.17746, + 2.16496, + 2.15373, + 2.16553, + 2.1735, + 2.15272, + 2.16627, + 2.17682, + 2.16885, + 2.1828, + 2.15382, + 2.15212, + 2.15102, + 2.14325, + 2.17305, + 2.1356, + 2.16714, + 2.15555, + 2.16119, + 2.1712, + 2.17886, + 2.16028, + 2.15121, + 2.17744, + 2.15147, + 2.13448, + 2.14071, + 2.17768, + 2.17594, + 2.13869, + 2.15645, + 2.16531, + 2.15147, + 2.16482, + 2.1595, + 2.15062, + 2.17233, + 2.15514, + 2.18615, + 2.20268, + 2.16471, + 2.14453, + 2.15228, + 2.14675, + 2.17867, + 2.15447, + 2.15482, + 2.18024, + 2.17748, + 2.18148, + 2.15387, + 2.17497, + 2.14583, + 2.13506, + 2.15334, + 2.1616, + 2.16861, + 2.16018, + 2.12502, + 2.15452, + 2.14351, + 2.15588, + 2.12787, + 2.16337, + 2.18621, + 2.14146, + 2.15627, + 2.188, + 2.16418, + 2.15986, + 2.15054, + 2.16858, + 2.17756, + 2.16659, + 2.17392, + 2.16967, + 2.17342, + 2.13234, + 2.17792, + 2.15698, + 2.18763, + 2.14509, + 2.13952, + 2.13901, + 2.19797, + 2.15779, + 2.16589, + 2.14065, + 2.13341, + 2.14516, + 2.19117, + 2.15529, + 2.17257, + 2.14044, + 2.15565, + 2.1437, + 2.15304, + 2.14632, + 2.16167, + 2.13667, + 2.14948, + 2.14201, + 2.16874, + 2.16466, + 2.16376, + 2.14861, + 2.174, + 2.16175, + 2.17386, + 2.15577, + 2.17167, + 2.13649, + 2.15809, + 2.15294, + 2.13937, + 2.15582, + 2.17657, + 2.17229, + 2.16359, + 2.17443, + 2.13591, + 2.14767, + 2.15529, + 2.13658, + 2.15147, + 2.13708, + 2.13482, + 2.13859, + 2.14746, + 2.16933, + 2.16783, + 2.13929, + 2.15073, + 2.12074, + 2.15631, + 2.15275, + 2.1551, + 2.15404, + 2.15029, + 2.13513, + 2.13395, + 2.17789, + 2.13861, + 2.14697, + 2.15728, + 2.1493, + 2.12088, + 2.14168, + 2.13093, + 2.16586, + 2.13017, + 2.12433, + 2.1473, + 2.17478, + 2.15107, + 2.14611, + 2.15852, + 2.17619, + 2.14707, + 2.1406, + 2.15638, + 2.15066, + 2.13429, + 2.13279, + 2.13147, + 2.16257, + 2.14616, + 2.14945, + 2.14813, + 2.14687, + 2.1412, + 2.12824, + 2.16432, + 2.15185, + 2.16026, + 2.15946, + 2.14282, + 2.15976, + 2.13651, + 2.14104, + 2.11914, + 2.14231, + 2.13941, + 2.12993, + 2.13585, + 2.14842, + 2.14437, + 2.12906, + 2.15912, + 2.14138, + 2.13916, + 2.1582, + 2.14697, + 2.10675, + 2.14707, + 2.14242, + 2.13025, + 2.1427, + 2.15357, + 2.15331, + 2.1475, + 2.12719, + 2.13866, + 2.12869, + 2.14753, + 2.11454, + 2.14203, + 2.14822, + 2.12628, + 2.14162, + 2.12982, + 2.14264, + 2.17107, + 2.15791, + 2.14374, + 2.13347, + 2.15014, + 2.13416, + 2.13864, + 2.12559, + 2.15583, + 2.13963, + 2.16299, + 2.12861, + 2.16321, + 2.14987, + 2.16199, + 2.13154, + 2.13184, + 2.13165, + 2.13287, + 2.14828, + 2.11313, + 2.11529, + 2.13551, + 2.11214, + 2.14401, + 2.12739, + 2.13151, + 2.1635, + 2.12853, + 2.13294, + 2.13775, + 2.14994, + 2.12092, + 2.1097, + 2.14613, + 2.11616, + 2.11584, + 2.10137, + 2.12805, + 2.1552, + 2.13622, + 2.11434, + 2.14826, + 2.13524, + 2.12116, + 2.156, + 2.14046, + 2.1169, + 2.18787, + 2.14709, + 2.13584, + 2.14864, + 2.13175, + 2.1632, + 2.11351, + 2.13574, + 2.1281, + 2.14272, + 2.1185, + 2.10652, + 2.13242, + 2.13186, + 2.12978, + 2.12412, + 2.13101, + 2.13118, + 2.14791, + 2.12874, + 2.15053, + 2.14159, + 2.13073, + 2.17532, + 2.16262, + 2.12112, + 2.15458, + 2.13775, + 2.11572, + 2.12178, + 2.13028, + 2.11059, + 2.13558, + 2.13028, + 2.13174, + 2.13716, + 2.15449, + 2.14044, + 2.13057, + 2.10441, + 2.12053, + 2.1156, + 2.11077, + 2.11363, + 2.13476, + 2.12949, + 2.13338, + 2.15169, + 2.14129, + 2.11756, + 2.12196, + 2.1343, + 2.13309, + 2.13331, + 2.13618, + 2.12234, + 2.12865, + 2.14467, + 2.11589, + 2.08846, + 2.12745, + 2.12271, + 2.12066, + 2.11856, + 2.13521, + 2.1229, + 2.13846, + 2.11947, + 2.10113, + 2.12818, + 2.14578, + 2.12999, + 2.09591, + 2.15252, + 2.14103, + 2.10953, + 2.10453, + 2.12981, + 2.10568, + 2.14137, + 2.1167, + 2.12884, + 2.09856, + 2.12673, + 2.1428, + 2.11999, + 2.13421, + 2.10442, + 2.10267, + 2.12809, + 2.1251, + 2.14083, + 2.12095, + 2.10503, + 2.13132, + 2.10792, + 2.11294, + 2.13636, + 2.12487, + 2.12406, + 2.14356, + 2.10983, + 2.11546, + 2.1572, + 2.1044, + 2.11461, + 2.13109, + 2.11564, + 2.10409, + 2.11169, + 2.11803, + 2.1154, + 2.11063, + 2.12554, + 2.11805, + 2.13521, + 2.14865, + 2.12121, + 2.13089, + 2.10464, + 2.11936, + 2.12328, + 2.10598, + 2.10864, + 2.13501, + 2.11967, + 2.13568, + 2.09394, + 2.11256, + 2.12363, + 2.09259, + 2.10638, + 2.14164, + 2.10185, + 2.11282, + 2.13083, + 2.12451, + 2.13088, + 2.1092, + 2.12835, + 2.11962, + 2.1021, + 2.12448, + 2.10318, + 2.13581, + 2.12242, + 2.12717, + 2.12315, + 2.08382, + 2.13049, + 2.129, + 2.0975, + 2.09546, + 2.11273, + 2.10469, + 2.13763, + 2.11709, + 2.12221, + 2.11943, + 2.08926, + 2.12843, + 2.12156, + 2.10348, + 2.11548, + 2.13646, + 2.12677, + 2.13118, + 2.1086, + 2.11485, + 2.11909, + 2.115, + 2.1092, + 2.12265, + 2.09117, + 2.11124, + 2.13024, + 2.11834, + 2.09421, + 2.09779, + 2.09732, + 2.12408, + 2.10045, + 2.1264, + 2.1041, + 2.08844, + 2.14092, + 2.10422, + 2.14597, + 2.12946, + 2.12877, + 2.10539, + 2.08287, + 2.09877, + 2.10603, + 2.11889, + 2.11412, + 2.10104, + 2.08954, + 2.12212, + 2.12721, + 2.11811, + 2.12716, + 2.10983, + 2.1043, + 2.10093, + 2.10433, + 2.08868, + 2.0932, + 2.11133, + 2.102, + 2.12057, + 2.12435, + 2.12055, + 2.13042, + 2.10298, + 2.13085, + 2.10518, + 2.13111, + 2.11486, + 2.10522, + 2.12598, + 2.13453, + 2.1222, + 2.11624, + 2.11133, + 2.10147, + 2.10384, + 2.10432, + 2.10393, + 2.10091, + 2.09466, + 2.14762, + 2.11342, + 2.11501, + 2.11138, + 2.12211, + 2.1176, + 2.12071, + 2.08537, + 2.08995, + 2.1087, + 2.11347, + 2.08444, + 2.09329, + 2.11455, + 2.12055, + 2.12006, + 2.14608, + 2.10379, + 2.10506, + 2.11217, + 2.10095, + 2.09882, + 2.11324, + 2.11496, + 2.13605, + 2.08657, + 2.10991, + 2.12226, + 2.09807, + 2.10117, + 2.12436, + 2.1053, + 2.11567, + 2.13096, + 2.10153, + 2.07801, + 2.08331, + 2.11912, + 2.11735, + 2.10141, + 2.11338, + 2.10666, + 2.10381, + 2.09491, + 2.10761, + 2.07867, + 2.08435, + 2.11523, + 2.12342, + 2.09382, + 2.0941, + 2.10372, + 2.0878, + 2.09271, + 2.09765, + 2.11361, + 2.11692, + 2.06285, + 2.10545, + 2.09785, + 2.10162, + 2.08064, + 2.10131, + 2.10451, + 2.11204, + 2.09609, + 2.07794, + 2.11175, + 2.08183, + 2.07816, + 2.10186, + 2.09586, + 2.0795, + 2.10609, + 2.11111, + 2.11781, + 2.08618, + 2.11121, + 2.08754, + 2.10148, + 2.09663, + 2.10378, + 2.1119, + 2.09123, + 2.08248, + 2.10658, + 2.1088, + 2.08833, + 2.08138, + 2.09552, + 2.09427, + 2.09635, + 2.08094, + 2.0823, + 2.09447, + 2.09277, + 2.1113, + 2.12253, + 2.0925, + 2.07634, + 2.1246, + 2.08519, + 2.11255, + 2.0889, + 2.10186, + 2.0908, + 2.07362, + 2.12953, + 2.10626, + 2.09138, + 2.07346, + 2.10082, + 2.07363, + 2.09896, + 2.09724, + 2.12122, + 2.10643, + 2.1136, + 2.08744, + 2.07192, + 2.09029, + 2.09695, + 2.11094, + 2.08152, + 2.10928, + 2.09143, + 2.11409, + 2.08638, + 2.11304, + 2.09931, + 2.09718, + 2.10935, + 2.08924, + 2.11833, + 2.10592, + 2.08718, + 2.10077, + 2.10666, + 2.11755, + 2.07809, + 2.08113, + 2.09786, + 2.10007, + 2.12291, + 2.09514, + 2.11964, + 2.06755, + 2.12986, + 2.08769, + 2.10759, + 2.09586, + 2.11245, + 2.11148, + 2.11318, + 2.09481, + 2.08279, + 2.07567, + 2.10163, + 2.0974, + 2.09861, + 2.0872, + 2.11898, + 2.11822, + 2.11255, + 2.08386, + 2.08003, + 2.06289, + 2.08296, + 2.10865, + 2.11009, + 2.07553, + 2.10028, + 2.07597, + 2.09328, + 2.09893, + 2.07379, + 2.09902, + 2.08147, + 2.0839, + 2.08326, + 2.09449, + 2.09364, + 2.10083, + 2.09278, + 2.08758, + 2.08167, + 2.07538, + 2.08995, + 2.09279, + 2.12736, + 2.10807, + 2.10184, + 2.08751, + 2.0847, + 2.09265, + 2.08386, + 2.07006, + 2.12153, + 2.08329, + 2.09103, + 2.09337, + 2.09789, + 2.09198, + 2.07388, + 2.09009, + 2.07877, + 2.09975, + 2.08558, + 2.08092, + 2.07796, + 2.11427, + 2.07645, + 2.08587, + 2.07994, + 2.09411, + 2.10426, + 2.09129, + 2.09493, + 2.076, + 2.07897, + 2.0684, + 2.06919, + 2.11733, + 2.05946, + 2.08593, + 2.06686, + 2.08705, + 2.08045, + 2.05353, + 2.07825, + 2.07442, + 2.08214, + 2.10407, + 2.08733, + 2.10553, + 2.09124, + 2.06818, + 2.09218, + 2.07988, + 2.08737, + 2.06578, + 2.07419, + 2.07227, + 2.10073, + 2.09684, + 2.0856, + 2.08269, + 2.07845, + 2.07241, + 2.0759, + 2.07716, + 2.06817, + 2.09202, + 2.06369, + 2.10273, + 2.08456, + 2.10201, + 2.05859, + 2.08902, + 2.07694, + 2.07087, + 2.11405, + 2.08858, + 2.08403, + 2.0973, + 2.09528, + 2.09896, + 2.07364, + 2.09369, + 2.07312, + 2.07375, + 2.07553, + 2.09223, + 2.06588, + 2.08612, + 2.07809, + 2.07918, + 2.10594, + 2.08003, + 2.07374, + 2.05965, + 2.07897, + 2.09012, + 2.08142, + 2.08566, + 2.07965, + 2.07752, + 2.06828, + 2.07113, + 2.08696, + 2.1019, + 2.08484, + 2.08401, + 2.07583, + 2.07677, + 2.05178, + 2.09273, + 2.09568, + 2.09049, + 2.09177, + 2.08109, + 2.09283, + 2.08877, + 2.07474, + 2.09682, + 2.07322, + 2.03588, + 2.08106, + 2.06506, + 2.08969, + 2.0882, + 2.08007, + 2.08811, + 2.08107, + 2.09831, + 2.07798, + 2.0824, + 2.09531, + 2.08053, + 2.08655, + 2.09363, + 2.08094, + 2.06883, + 2.05773, + 2.08156, + 2.07064, + 2.08566, + 2.0614, + 2.05996, + 2.0824, + 2.06653, + 2.06912, + 2.06263, + 2.07677, + 2.071, + 2.08375, + 2.07863, + 2.08268, + 2.07898, + 2.08983, + 2.08015, + 2.06793, + 2.08298, + 2.0856, + 2.07527, + 2.09334, + 2.0847, + 2.08023, + 2.05792, + 2.07577, + 2.08785, + 2.05772, + 2.08125, + 2.07732, + 2.0888, + 2.05139, + 2.08819, + 2.07745, + 2.0909, + 2.09667, + 2.06242, + 2.08731, + 2.05704, + 2.06665, + 2.06706, + 2.09522, + 2.07766, + 2.09186, + 2.08733, + 2.07577, + 2.06137, + 2.05698, + 2.05987, + 2.07703, + 2.08037, + 2.06197, + 2.08552, + 2.0674, + 2.0532, + 2.05848, + 2.04363, + 2.06823, + 2.08524, + 2.09389, + 2.06654, + 2.08576, + 2.08263, + 2.05954, + 2.07301, + 2.07322, + 2.08739, + 2.07438, + 2.08496, + 2.0897, + 2.0721, + 2.09638, + 2.0893, + 2.06878, + 2.08257, + 2.07654, + 2.0914, + 2.09669, + 2.08891, + 2.06168, + 2.10219, + 2.07219, + 2.07644, + 2.06758, + 2.05378, + 2.08748, + 2.06457, + 2.06228, + 2.06972, + 2.04294, + 2.06218, + 2.07311, + 2.07709, + 2.03163, + 2.08281, + 2.06533, + 2.06287, + 2.07793, + 2.08121, + 2.0489, + 2.09047, + 2.05149, + 2.07074, + 2.05586, + 2.07451, + 2.06613, + 2.07563, + 2.06583, + 2.04976, + 2.08328, + 2.0555, + 2.08469, + 2.0746, + 2.06961, + 2.08574, + 2.07199, + 2.08647, + 2.06953, + 2.09863, + 2.0604, + 2.05422, + 2.0866, + 2.09007, + 2.0587, + 2.06765, + 2.05642, + 2.05661, + 2.0532, + 2.05785, + 2.06507, + 2.09304, + 2.05373, + 2.04958, + 2.06994, + 2.06811, + 2.05625, + 2.08298, + 2.07656, + 2.07459, + 2.06211, + 2.07367, + 2.09634, + 2.07091, + 2.08139, + 2.09121, + 2.08477, + 2.05548, + 2.06353, + 2.05887, + 2.05781, + 2.05187, + 2.08027, + 2.06552, + 2.07838, + 2.06431, + 2.05816, + 2.06535, + 2.07466, + 2.02241, + 2.08052, + 2.06561, + 2.06828, + 2.06667, + 2.08978, + 2.05595, + 2.08019, + 2.08449, + 2.04339, + 2.04393, + 2.0677, + 2.06292, + 2.06163, + 2.05378, + 2.08155, + 2.06476, + 2.07416, + 2.06893, + 2.04094, + 2.07745, + 2.04948, + 2.06206, + 2.0877, + 2.05347, + 2.06698, + 2.06114, + 2.0844, + 2.0936, + 2.05004, + 2.08896, + 2.06247, + 2.07165, + 2.07894, + 2.06254, + 2.0758, + 2.0261, + 2.06208, + 2.06331, + 2.06554, + 2.06187, + 2.07687, + 2.04845, + 2.05538, + 2.08791, + 2.06246, + 2.07582, + 2.07205, + 2.0628, + 2.06098, + 2.05988, + 2.05163, + 2.04249, + 2.0748, + 2.08031, + 2.06845, + 2.05917, + 2.05907, + 2.036, + 2.05774, + 2.05842, + 2.05498, + 2.05977, + 2.06068, + 2.04566, + 2.05765, + 2.07981, + 2.04186, + 2.07228, + 2.0539, + 2.06648, + 2.04815, + 2.0785, + 2.04572, + 2.04963, + 2.05432, + 2.06814, + 2.07715, + 2.06665, + 2.04256, + 2.06452, + 2.04815, + 2.08958, + 2.06202, + 2.06886, + 2.08891, + 2.04816, + 2.06448, + 2.0574, + 2.05137, + 2.05945, + 2.05611, + 2.09314, + 2.08976, + 2.04836, + 2.07046, + 2.08485, + 2.05261, + 2.08214, + 2.04824, + 2.06593, + 2.07158, + 2.04431, + 2.06139, + 2.10085, + 2.05848, + 2.05744, + 2.06079, + 2.07822, + 2.0495, + 2.06758, + 2.04932, + 2.09124, + 2.0749, + 2.07058, + 2.06367, + 2.07331, + 2.04826, + 2.07363, + 2.0815, + 2.05574, + 2.05042, + 2.06515, + 2.07594, + 2.06561, + 2.06576, + 2.07672, + 2.03732, + 2.05907, + 2.04405, + 2.06044, + 2.05181, + 2.0648, + 2.06622, + 2.04453, + 2.05617, + 2.08418, + 2.06629, + 2.04479, + 2.06395, + 2.05835, + 2.03672, + 2.05091, + 2.06807, + 2.05965, + 2.05244, + 2.04799, + 2.04888, + 2.057, + 2.08043, + 2.06741, + 2.0405, + 2.04681, + 2.02577, + 2.04165, + 2.05684, + 2.0439, + 2.08849, + 2.05031, + 2.05494, + 2.05735, + 2.08037, + 2.0477, + 2.04138, + 2.04735, + 2.06975, + 2.07014, + 2.04386, + 2.07404, + 2.04255, + 2.08597, + 2.06324, + 2.06999, + 2.09555, + 2.0326, + 2.05872, + 2.0551, + 2.03545, + 2.05595, + 2.07117, + 2.05541, + 2.04732, + 2.06458, + 2.07959, + 2.08091, + 2.04403, + 2.02611, + 2.03873, + 2.044, + 2.079, + 2.06113, + 2.04412, + 2.05382, + 2.04889, + 2.05078, + 2.06199, + 2.08954, + 2.04934, + 2.03859, + 2.03884, + 2.09246, + 2.03765, + 2.03391, + 2.05129, + 2.06733, + 2.06966, + 2.05459, + 2.02772, + 2.04357, + 2.05342, + 2.04329, + 2.04843, + 2.03818, + 2.06872, + 2.04616, + 2.04948, + 2.06677, + 2.05371, + 2.06039, + 2.04519, + 2.04977, + 2.07279, + 2.05874, + 2.08292, + 2.03485, + 2.06968, + 2.05161, + 2.04221, + 2.03732, + 2.05368, + 2.03358, + 2.07244, + 2.0632, + 2.05497, + 2.0562, + 2.05756, + 2.0577, + 2.04868, + 2.06997, + 2.05162, + 2.03733, + 2.04518, + 2.06017, + 2.05151, + 2.07674, + 2.04583, + 2.05183, + 2.05818, + 2.06713, + 2.05392, + 2.02621, + 2.06379, + 2.06328, + 2.03294, + 2.04615, + 2.0459, + 2.05443, + 2.0525, + 2.05937, + 2.04022, + 2.05148, + 2.0474, + 2.05293, + 2.0327, + 2.04478, + 2.06375, + 2.04269, + 2.05838, + 2.06087, + 2.04193, + 2.04159, + 2.05141, + 2.01906, + 2.07603, + 2.0459, + 2.02989, + 2.05661, + 2.05426, + 2.06415, + 2.06897, + 2.0431, + 2.04359, + 2.06131, + 2.04656, + 2.04744, + 2.04301, + 2.04993, + 2.03863, + 2.06721, + 2.05433, + 2.05453, + 2.04678, + 2.0337, + 2.05245, + 2.0544, + 2.06631, + 2.0562, + 2.07694, + 2.07045, + 2.03206, + 2.03025, + 2.03966, + 2.04263, + 2.05788, + 2.03113, + 2.02026, + 2.05902, + 2.04813, + 2.03334, + 2.03314, + 2.03019, + 2.04366, + 2.04676, + 2.03124, + 2.06234, + 2.04272, + 2.0443, + 2.06435, + 2.03257, + 2.06472, + 2.03341, + 2.05938, + 2.04276, + 2.02397, + 2.04648, + 2.04746, + 2.03116, + 2.0212, + 2.05963, + 2.04057, + 2.05554, + 2.04235, + 2.03245, + 2.07551, + 2.05013, + 2.02111, + 2.06155, + 2.01687, + 2.04069, + 2.02718, + 2.05838, + 2.05003, + 2.04928, + 2.07062, + 2.04298, + 2.04932, + 2.03092, + 2.03631, + 2.03075, + 2.03513, + 2.05442, + 2.04891, + 2.04352, + 2.04856, + 2.03406, + 2.04979, + 2.02269, + 2.05948, + 2.03842, + 2.06328, + 2.05855, + 2.02, + 2.05978, + 2.02421, + 2.03968, + 2.06176, + 2.0099, + 2.032, + 2.0439, + 2.03357, + 2.01352, + 2.03896, + 2.04647, + 2.06164, + 2.02649, + 2.02286, + 2.02599, + 2.0478, + 2.02721, + 2.02933, + 2.034, + 2.03197, + 2.04919, + 2.05943, + 2.03878, + 2.0138, + 2.04394, + 2.03362, + 2.01361, + 2.03898, + 2.04646, + 2.0616, + 2.02648, + 2.02293, + 2.02588, + 2.04777, + 2.02733, + 2.02927, + 2.03505, + 2.04149, + 2.02404, + 2.06881, + 2.05541, + 2.03, + 2.06325, + 2.05576, + 2.03434, + 2.04154, + 2.05645, + 2.0754, + 2.03702, + 2.05585, + 2.05022, + 2.06735, + 2.02693, + 2.03098, + 2.03773, + 2.0409, + 2.02471, + 2.05199, + 2.04826, + 2.05405, + 2.04706, + 2.05467, + 2.04219, + 2.06868, + 2.02924, + 2.05956, + 2.0422, + 2.04101, + 2.02943, + 2.05235, + 2.01587, + 2.0456, + 2.06034, + 2.00481, + 2.02813, + 2.02533, + 2.02134, + 2.0237, + 2.03117, + 2.06598, + 2.05188, + 2.04349, + 2.02788, + 2.03197, + 2.04952, + 2.03158, + 2.02688, + 2.04042, + 2.06156, + 2.0179, + 2.045, + 2.0316, + 2.02006, + 2.01662, + 2.02275, + 2.05183, + 2.03239, + 2.03996, + 2.02567, + 2.05566, + 2.06439, + 2.04536, + 2.06814, + 2.05608, + 2.06716, + 2.05189, + 2.04294, + 2.06314, + 2.06828, + 2.03597, + 2.04591, + 2.05287, + 2.02678, + 2.01602, + 2.03592, + 2.03815, + 2.04632, + 2.01799, + 2.01732, + 2.05624, + 2.03592, + 2.02787, + 2.04043, + 2.02578, + 2.04396, + 2.03359, + 2.01349, + 2.03893, + 2.04647, + 2.06176, + 2.02653, + 2.0229, + 2.02598, + 2.04782, + 2.02717, + 2.02933, + 2.03659, + 2.04149, + 2.02393, + 2.0687, + 2.05545, + 2.02981, + 2.0632, + 2.05572, + 2.034, + 2.03291, + 2.03984, + 2.04409, + 2.02957, + 2.05496, + 2.06666, + 2.03022, + 2.04957, + 2.04188, + 2.04904, + 2.02569, + 2.04956, + 2.05682, + 2.04833, + 2.07465, + 2.04357, + 2.06222, + 2.0501, + 2.05913, + 2.05388, + 2.04926, + 2.05875, + 2.04815, + 2.0669, + 2.02762, + 2.06074, + 2.0521, + 2.02609, + 2.04725, + 2.02584, + 2.03384, + 2.02635, + 2.05591, + 2.05263, + 2.0394, + 2.08327, + 2.05314, + 2.02349, + 2.03445, + 2.04493, + 2.0415, + 2.03804, + 2.02113, + 2.03579, + 2.02991, + 2.04472, + 2.02853, + 2.04564, + 2.02667, + 2.05156, + 2.03525, + 2.03939, + 2.0331, + 2.01905, + 2.02494, + 2.03274, + 2.05049, + 2.07437, + 2.05395, + 2.0251, + 2.00919, + 2.0385, + 2.04835, + 2.06086, + 2.02653, + 2.06988, + 2.05402, + 2.04542, + 2.03796, + 2.05745, + 2.04767, + 2.03953, + 2.03321, + 2.03784, + 2.02143, + 2.02282, + 2.0503, + 2.02462, + 2.04714, + 2.04997, + 2.04745, + 2.02703, + 2.04497, + 2.03736, + 2.05468, + 2.02471, + 2.01144, + 2.04567, + 2.02565, + 2.02473, + 2.05988, + 2.05931, + 2.04323, + 2.02688, + 2.03698, + 2.03442, + 2.02243, + 2.03235, + 2.04507, + 2.06176, + 2.06495, + 2.05802, + 2.04039, + 2.04648, + 2.05026, + 2.04683, + 2.03191, + 2.04605, + 2.02344, + 2.02002, + 2.06325, + 2.05966, + 2.03333, + 2.05611, + 2.04358, + 2.04246, + 2.03001, + 2.03445, + 2.04782, + 2.02951, + 2.04397, + 2.03358, + 2.01351, + 2.03895, + 2.04651, + 2.06166, + 2.02649, + 2.02284, + 2.02604, + 2.04769, + 2.02719, + 2.0293, + 2.03509, + 2.04162, + 2.02407, + 2.06889, + 2.05542, + 2.03027, + 2.06325, + 2.05549, + 2.03415, + 2.04177, + 2.0565, + 2.0752, + 2.03714, + 2.05579, + 2.05008, + 2.06743, + 2.02718, + 2.03106, + 2.03823, + 2.04058, + 2.02439, + 2.05191, + 2.04824, + 2.05421, + 2.04726, + 2.05483, + 2.04195, + 2.06883, + 2.02931, + 2.05972, + 2.04222, + 2.04134, + 2.02953, + 2.05244, + 2.01613, + 2.04581, + 2.06051, + 2.00504, + 2.02815, + 2.02522, + 2.02139, + 2.02351, + 2.03101, + 2.06604, + 2.05178, + 2.04318, + 2.02806, + 2.03178, + 2.05, + 2.03177, + 2.02702, + 2.04058, + 2.06143, + 2.01748, + 2.04501, + 2.03202, + 2.0204, + 2.01696, + 2.02264, + 2.05149, + 2.03235, + 2.03981, + 2.02884, + 2.05668, + 2.06515, + 2.0454, + 2.0681, + 2.05568, + 2.0666, + 2.05111, + 2.04279, + 2.06268, + 2.06802, + 2.03526, + 2.04529, + 2.05254, + 2.02608, + 2.01563, + 2.03574, + 2.03796, + 2.04604, + 2.01755, + 2.01751, + 2.05593, + 2.03588, + 2.02807, + 2.0402, + 2.02571, + 2.03594, + 2.06438, + 2.05428, + 2.02712, + 2.03171, + 2.01774, + 2.03147, + 2.05044, + 2.03008, + 2.04768, + 2.03269, + 2.05801, + 2.04298, + 2.03748, + 2.03136, + 2.04519, + 2.04821, + 2.02631, + 2.05053, + 2.0224, + 2.0479, + 2.02607, + 2.03992, + 2.02724, + 2.03698, + 2.01763, + 2.02642, + 2.04083, + 2.0115, + 2.04666, + 2.03939, + 2.06161, + 2.04346, + 2.0432, + 2.04746, + 2.03375, + 2.0242, + 2.0539, + 2.03408, + 2.00949, + 2.04119, + 2.06036, + 2.03598, + 2.03167, + 2.05879, + 2.03298, + 2.04085, + 2.02361, + 2.05218, + 2.04051, + 2.03673, + 2.03554, + 2.06707, + 2.04583, + 2.03151, + 2.04519, + 2.02609, + 2.03599, + 2.04496, + 2.05446, + 2.04293, + 2.04716, + 2.05103, + 2.0279, + 2.03785, + 2.0435, + 2.04388, + 2.05922, + 2.04812, + 2.01589, + 2.06412, + 2.0452, + 2.01446, + 2.0251, + 2.02092, + 2.04435, + 2.00331, + 2.05554, + 2.01352, + 2.04411, + 2.0167, + 2.06144, + 2.0096, + 2.02281, + 2.04379, + 1.99617, + 2.03532, + 2.03883, + 2.03948, + 2.03198, + 2.03645, + 2.00508, + 2.02869, + 2.03915, + 2.04765, + 2.04023, + 2.02952, + 2.02942, + 2.02132, + 2.01645, + 2.03758, + 2.0374, + 2.01416, + 2.02903, + 2.01951, + 2.02498, + 2.01839, + 2.00845, + 2.05646, + 2.05556, + 2.04136, + 2.02348, + 2.0104, + 2.02331, + 2.03587, + 2.02512, + 2.0444, + 2.04504, + 2.02787, + 2.03921, + 2.00719, + 2.03029, + 2.05034, + 2.04776, + 2.01935, + 2.016, + 2.03799, + 2.02506, + 2.02453, + 2.00851, + 2.04414, + 2.02549, + 2.03912, + 2.0233, + 2.04076, + 2.04595, + 2.01984, + 2.01842, + 2.03928, + 2.03865, + 2.00384, + 2.04796, + 2.02404, + 2.04256, + 2.03615, + 2.01126, + 1.99975, + 2.06016, + 2.03503, + 2.04612, + 2.03777, + 2.01213, + 2.03331, + 2.03364, + 2.02796, + 2.03139, + 2.02793, + 2.05595, + 2.0206, + 2.02698, + 2.04021, + 2.05276, + 2.03124, + 2.03408, + 2.05539, + 2.01042, + 2.02646, + 2.04477, + 2.03293, + 2.01808, + 2.05037, + 2.01895, + 2.0142, + 2.01123, + 2.00228, + 2.03452, + 2.03668, + 2.03795, + 2.04075, + 2.0338, + 2.02026, + 2.02876, + 2.05434, + 2.00376, + 2.0258, + 2.0425, + 2.02823, + 2.01461, + 2.02835, + 2.05312, + 2.0226, + 2.01029, + 2.0192, + 2.01975, + 2.02787, + 2.01463, + 2.02743, + 2.04852, + 2.02419, + 2.02586, + 2.04197, + 2.04883, + 2.02141, + 2.02771, + 2.01096, + 2.02227, + 2.036, + 2.03664, + 2.03069, + 2.0215, + 2.03019, + 2.04333, + 2.01624, + 2.02534, + 2.01035, + 2.03591, + 2.03826, + 2.02992, + 2.01607, + 2.04707, + 2.02211, + 2.04492, + 2.01874, + 2.01465, + 2.03188, + 2.03963, + 2.02568, + 2.04292, + 2.0253, + 2.03506, + 2.0252, + 2.0404, + 2.02266, + 2.0265, + 1.99374, + 2.03086, + 2.0363, + 2.00907, + 2.00728, + 2.01826, + 2.04402, + 2.02234, + 2.03909, + 2.01504, + 2.04241, + 2.01518, + 2.0381, + 2.00526, + 2.0232, + 2.02637, + 2.03172, + 2.01971, + 2.02255, + 2.02098, + 2.04131, + 2.00762, + 2.01746, + 2.05109, + 2.02451, + 2.03881, + 2.03773, + 2.03991, + 2.03909, + 2.05305, + 2.04252, + 2.03305, + 2.01598, + 2.01951, + 2.02095, + 2.02267, + 2.00457, + 2.04229, + 2.03862, + 2.01822, + 2.00703, + 2.02232, + 2.00473, + 2.02345, + 2.01431, + 2.03504, + 2.00394, + 2.03596, + 2.04642, + 2.03118, + 2.02664, + 2.0215, + 2.0014, + 2.00328, + 2.01929, + 2.03842, + 2.02697, + 2.04953, + 2.03403, + 2.05436, + 2.03211, + 2.00312, + 2.01717, + 2.02091, + 2.02073, + 2.03551, + 2.02636, + 2.00197, + 2.0068, + 2.0264, + 2.01595, + 2.04482, + 2.00658, + 2.01882, + 2.01991, + 2.04207, + 2.03125, + 2.01756, + 2.03217, + 2.03539, + 2.0259, + 2.0113, + 2.01748, + 2.04184, + 2.02499, + 2.02478, + 2.02734, + 1.99993, + 2.02587, + 2.03754, + 2.0196, + 2.01352, + 2.01831, + 2.02719, + 1.97957, + 2.02861, + 2.00141, + 2.02072, + 2.03559, + 1.99199, + 2.03251, + 2.0117, + 2.00998, + 2.03799, + 2.04407, + 2.02457, + 2.03279, + 2.04851, + 2.03535, + 2.03706, + 2.0222, + 2.04565, + 2.02396, + 2.03269, + 2.02883, + 2.04738, + 2.00884, + 2.01463, + 2.06277, + 2.01061, + 2.02274, + 2.02174, + 2.03885, + 2.02175, + 2.00945, + 2.01173, + 1.99839, + 2.03348, + 2.02483, + 2.00947, + 2.03681, + 2.00672, + 2.0102, + 2.02135, + 2.02997, + 2.01814, + 2.03341, + 2.04105, + 2.02039, + 2.01078, + 2.0211, + 2.03391, + 2.04414, + 2.02224, + 2.01061, + 2.00997, + 2.01806, + 2.01049, + 2.04389, + 2.03295, + 2.02285, + 2.02985, + 2.00641, + 2.01114, + 2.00392, + 2.01181, + 1.99204, + 2.0043, + 2.05471, + 2.03352, + 2.03126, + 2.01104, + 2.03363, + 2.04537, + 2.01876, + 2.02748, + 2.00684, + 2.03696, + 2.03597, + 2.02328, + 2.02213, + 2.0123, + 2.05469, + 2.02028, + 2.02705, + 2.0123, + 2.01669, + 2.03614, + 2.02877, + 2.0248, + 2.00562, + 2.02101, + 2.02229, + 2.01241, + 2.01733, + 2.01033, + 2.0062, + 2.01695, + 2.02995, + 2.03489, + 2.03435, + 1.99674, + 2.03637, + 1.97473, + 2.0285, + 2.02166, + 2.00932, + 2.01303, + 2.02845, + 2.0121, + 2.01759, + 2.02185, + 2.02373, + 1.99442, + 2.01499, + 2.0251, + 2.01769, + 2.0369, + 2.03746, + 2.03999, + 2.02927, + 1.99617, + 2.02048, + 2.01224, + 2.03408, + 2.04855, + 2.03776, + 2.02121, + 2.02088, + 2.02342, + 2.02094, + 2.02883, + 2.0093, + 2.00349, + 2.00501, + 2.00206, + 2.02512, + 2.01474, + 2.02379, + 2.03325, + 2.01739, + 2.00359, + 2.01606, + 2.00935, + 2.0042, + 2.0391, + 2.01989, + 2.03264, + 2.04375, + 2.00157, + 2.03584, + 1.98595, + 1.99817, + 2.02562, + 1.99946, + 2.02634, + 2.01851, + 2.02183, + 2.00543, + 2.02697, + 2.02505, + 2.03926, + 2.0112, + 2.0265, + 2.01764, + 1.9907, + 2.01658, + 2.02287, + 2.02692, + 2.02423, + 2.01913, + 2.01748, + 2.03993, + 1.99342, + 1.99109, + 2.0284, + 2.00499, + 2.00884, + 2.02477, + 2.00956, + 2.02611, + 2.01225, + 2.02093, + 2.00794, + 2.01576, + 1.98959, + 1.97934, + 1.98179, + 1.99424, + 2.00574, + 2.01427, + 2.03237, + 1.98732, + 2.01259, + 2.00545, + 2.01827, + 1.98888, + 2.02968, + 2.02146, + 2.01335, + 2.02529, + 2.01897, + 2.0139, + 2.01508, + 2.03485, + 2.01784, + 2.01391, + 2.00587, + 2.02546, + 2.02624, + 2.01145, + 2.01581, + 2.0091, + 2.00749, + 1.99335, + 2.02129, + 2.03013, + 1.99746, + 2.03664, + 2.00065, + 2.02595, + 1.99041, + 2.00494, + 2.01986, + 2.00018, + 2.02406, + 2.01324, + 1.99281, + 2.02451, + 1.9776, + 2.00726, + 1.99596, + 1.99399, + 2.02369, + 2.02053, + 2.01494, + 1.99063, + 1.99063, + 1.99566, + 1.991, + 2.01349, + 2.00353, + 2.00615, + 2.0272, + 2.0215, + 2.00099, + 2.02368, + 2.00792, + 2.00765, + 2.0192, + 2.01224, + 2.01247, + 2.00374, + 2.03229, + 2.00682, + 2.0282, + 2.02579, + 2.02739, + 2.02702, + 2.04966, + 2.01156, + 2.01702, + 1.9772, + 2.02185, + 2.0135, + 1.99074, + 1.99859, + 2.01884, + 1.99996, + 2.01244, + 1.99301, + 2.01261, + 2.00005, + 2.00642, + 2.04607, + 1.98873, + 2.01114, + 2.00259, + 2.01393, + 1.99178, + 2.01583, + 1.98222, + 1.98603, + 2.01218, + 1.98422, + 1.99595, + 2.00548, + 2.02611, + 1.99943, + 2.02716, + 2.02111, + 1.99357, + 1.99446, + 2.00576, + 1.99796, + 2.00541, + 2.02915, + 2.01934, + 2.00474, + 1.99838, + 2.01315, + 1.98912, + 1.99828, + 1.99746, + 2.0068, + 2.00148, + 2.00274, + 1.98749, + 1.98955, + 2.00288, + 2.00494, + 1.99547, + 1.98932, + 2.0152, + 2.02474, + 2.0319, + 2.02131, + 1.99666, + 2.02336, + 2.01748, + 2.01568, + 2.02383, + 2.01804, + 2.02191, + 1.99647, + 2.04113, + 1.99835, + 2.01757, + 2.00291, + 2.00795, + 1.9965, + 2.03833, + 2.03312, + 2.0159, + 2.00347, + 2.01815, + 1.99738, + 1.99865, + 2.02775, + 2.0118, + 2.01652, + 2.00365, + 1.99708, + 2.01478, + 2.0096, + 2.00053, + 1.99631, + 1.99676, + 2.0218, + 2.0036, + 1.99673, + 1.98744, + 2.0243, + 2.01288, + 2.02169, + 1.99193, + 1.99207, + 1.99385, + 1.98364, + 2.01838, + 2.0119, + 2.02606, + 2.00953, + 2.00799, + 1.998, + 2.0096, + 2.00063, + 2.00497, + 2.02134, + 2.02549, + 2.00817, + 2.00153, + 1.99363, + 2.01924, + 1.99448, + 1.99103, + 2.0123, + 2.00526, + 2.00536, + 1.99344, + 2.00591, + 2.00644, + 2.02668, + 1.9902, + 2.01414, + 2.00261, + 2.00526, + 2.01571, + 1.99488, + 2.01849, + 1.99226, + 2.00224, + 1.9959, + 1.98548, + 2.02315, + 2.0166, + 2.00439, + 2.01403, + 2.03553, + 2.03098, + 2.01426, + 1.99837, + 2.01447, + 2.00354, + 2.00783, + 1.9762, + 2.01315, + 1.99774, + 2.00346, + 1.98258, + 2.00968, + 2.00718, + 2.00375, + 1.98296, + 1.99634, + 1.99745, + 1.9936, + 2.01049, + 1.99214, + 2.02528, + 2.00782, + 2.00797, + 1.98618, + 1.99327, + 2.0102, + 1.98836, + 2.00511, + 1.98047, + 1.9917, + 2.01363, + 2.01026, + 2.01448, + 2.0123, + 2.03357, + 1.99884, + 2.01975, + 1.99185, + 1.99982, + 1.9869, + 2.00961, + 2.01793, + 2.0002, + 2.01777, + 2.01325, + 1.96991, + 2.0236, + 1.99445, + 1.98482, + 1.994, + 2.02403, + 1.99803, + 2.00216, + 2.02583, + 2.00572, + 2.01962, + 2.00463, + 2.00918, + 2.00188, + 1.97518, + 2.01101, + 1.98695, + 1.98816, + 2.02163, + 2.01294, + 1.99473, + 1.99036, + 1.99521, + 1.98195, + 1.99594, + 1.99873, + 2.00363, + 1.98531, + 1.96729, + 1.99796, + 1.99204, + 2.0046, + 2.00107, + 1.99765, + 2.02475, + 2.01531, + 1.99235, + 1.99118, + 2.02512, + 1.98952, + 2.00246, + 2.02206, + 2.00464, + 2.00631, + 2.00843, + 1.99384, + 2.01929, + 2.00276, + 1.99631, + 1.98986, + 2.01423, + 2.00843, + 2.00873, + 2.01348, + 2.00372, + 1.99799, + 2.02631, + 2.00887, + 1.99379, + 2.02305, + 2.01456, + 2.00642, + 2.0145, + 2.00127, + 2.02978, + 2.00249, + 1.99584, + 1.98228, + 2.01136, + 2.00759, + 2.00296, + 1.98735, + 2.01883, + 2.04026, + 2.01551, + 1.99944, + 2.02439, + 2.02915, + 2.01985, + 2.01156, + 1.99161, + 1.98691, + 1.99373, + 1.98676, + 2.01398, + 2.01424, + 1.9962, + 2.00248, + 1.98727, + 1.99739, + 2.00205, + 1.99389, + 1.98172, + 1.98394, + 2.00599, + 2.01084, + 1.998, + 2.01484, + 2.01506, + 2.01734, + 1.95867, + 2.00927, + 2.00067, + 1.9831, + 2.01456, + 2.00151, + 2.01657, + 2.00972, + 1.98019, + 1.99941, + 2.00454, + 1.99487, + 2.00749, + 2.0238, + 1.99856, + 1.98922, + 1.97861, + 1.98356, + 2.00019, + 1.9754, + 2.02016, + 2.01505, + 2.01497, + 2.02162, + 1.99191, + 1.97784, + 2.00152, + 2.00859, + 2.00281, + 1.99582, + 1.99982, + 2.00718, + 1.99105, + 1.99937, + 1.99601, + 2.00682, + 2.00383, + 2.01042, + 1.99529, + 1.98861, + 1.96993, + 2.01151, + 1.99493, + 1.98738, + 2.00192, + 2.00577, + 1.98318, + 1.99018, + 1.97786, + 1.98973, + 1.98514, + 1.99466, + 1.98597, + 2.01991, + 2.00111, + 1.99513, + 1.98609, + 1.99549, + 1.98568, + 1.98854, + 1.99407, + 1.99212, + 2.00774, + 2.0106, + 1.99599, + 2.01794, + 1.99698, + 1.99203, + 1.99825, + 1.97776, + 1.98067, + 1.97192, + 2.0128, + 1.98777, + 2.00317, + 2.02269, + 1.98981, + 1.99107, + 2.00241, + 2.0089, + 1.99231, + 1.99466, + 2.0073, + 1.98429, + 2.00641, + 1.98484, + 1.97868, + 2.00488, + 1.99342, + 1.97961, + 1.99823, + 1.99831, + 1.99756, + 2.01837, + 1.9964, + 1.98817, + 1.9983, + 2.0072, + 1.95942, + 2.00587, + 2.0055, + 1.98522, + 1.98642, + 2.00471, + 1.96529, + 1.99443, + 1.9868, + 1.99511, + 1.99262, + 1.98121, + 1.99823, + 1.98101, + 1.99395, + 1.97918, + 2.01644, + 2.00973, + 1.98311, + 1.99397, + 1.98703, + 1.99056, + 2.02533, + 1.97577, + 2.00484, + 1.98652, + 2.00247, + 1.99383, + 1.99348, + 1.97358, + 1.99007, + 1.99383, + 2.00612, + 1.99098, + 1.98346, + 1.98504, + 2.02042, + 1.98966, + 1.98993, + 1.9653, + 1.98116, + 1.97851, + 1.98399, + 1.99803, + 1.99854, + 1.95326, + 2.01206, + 1.9883, + 1.97208, + 1.99392, + 1.96778, + 1.99153, + 1.99694, + 2.01723, + 1.99723, + 2.00538, + 1.98856, + 1.9838, + 1.99693, + 2.0042, + 1.99356, + 1.98675, + 2.00106, + 1.96893, + 1.99148, + 1.98955, + 1.99983, + 2.00057, + 1.99182, + 1.99221, + 1.98384, + 2.0264, + 1.95733, + 1.99858, + 2.00652, + 1.9867, + 1.99119, + 2.00533, + 1.98842, + 2.0015, + 2.01842, + 1.99, + 2.01771, + 1.9948, + 1.95961, + 2.01107, + 1.98955, + 1.99167, + 1.99483, + 1.99381, + 1.97862, + 1.98275, + 1.9984, + 1.97274, + 1.97934, + 1.97584, + 1.98197, + 2.01116, + 1.99772, + 2.00267, + 1.97656, + 1.98257, + 2.0175, + 1.98348, + 1.98509, + 2.02044, + 1.98954, + 1.99003, + 1.96536, + 1.98122, + 1.97847, + 1.98394, + 1.99805, + 1.99853, + 1.95332, + 2.01141, + 1.98813, + 1.97192, + 1.99398, + 1.9678, + 1.99162, + 1.99679, + 2.01708, + 1.99715, + 2.00533, + 1.9882, + 1.98388, + 1.99684, + 2.00421, + 1.99355, + 1.98684, + 2.00084, + 1.96871, + 1.99156, + 1.98973, + 2.00008, + 2.00073, + 1.99175, + 1.99211, + 1.98369, + 2.02626, + 1.95714, + 1.99944, + 2.00649, + 1.98683, + 1.99049, + 2.00547, + 1.9884, + 2.0012, + 2.01836, + 1.99022, + 2.01783, + 1.99463, + 1.95968, + 2.01089, + 1.98956, + 1.99176, + 1.99482, + 1.99385, + 1.97882, + 1.98243, + 1.99994, + 1.97235, + 1.97814, + 1.97438, + 1.98044, + 2.01053, + 1.99762, + 2.00222, + 1.97616, + 1.98231, + 2.01696, + 1.97877, + 2.00538, + 1.99873, + 1.97461, + 1.988, + 1.98626, + 1.99149, + 2.0059, + 1.98343, + 1.98994, + 1.97678, + 2.00177, + 2.02618, + 1.99016, + 2.00466, + 1.99777, + 1.97711, + 2.001, + 1.97949, + 2.00864, + 1.9868, + 1.98909, + 2.00929, + 1.97703, + 1.97347, + 1.9786, + 2.00475, + 1.96084, + 1.99219, + 1.99315, + 1.99878, + 1.98498, + 2.01073, + 1.97037, + 1.96679, + 2.00134, + 1.98144, + 2.00838, + 2.01109, + 2.00081, + 1.98762, + 1.99078, + 1.98843, + 2.00061, + 1.99174, + 1.98376, + 1.9658, + 1.98703, + 1.96768, + 1.98668, + 1.96562, + 1.99416, + 1.9771, + 1.98767, + 1.98824, + 1.98331, + 1.98867, + 1.98199, + 2.0128, + 2.00291, + 1.99064, + 1.98182, + 1.97698, + 1.97598, + 1.99764, + 2.01044, + 1.96939, + 2.02565, + 1.99414, + 1.97399, + 1.9811, + 1.98576, + 2.00258, + 1.97614, + 1.98381, + 1.98132, + 2.0054, + 1.99913, + 1.98434, + 1.97586, + 2.01047, + 1.96043, + 1.96485, + 1.96549, + 1.99039, + 1.97356, + 1.98531, + 1.9736, + 1.9881, + 2.00054, + 1.9915, + 1.98831, + 1.97704, + 1.99218, + 1.96905, + 1.96997, + 1.98602, + 2.00213, + 1.98472, + 2.00915, + 1.98712, + 1.97335, + 1.98435, + 1.98019, + 1.99907, + 1.98555, + 1.9794, + 1.9833, + 1.98759, + 1.9739, + 1.97072, + 1.99543, + 2.0046, + 1.98496, + 2.00707, + 1.99034, + 1.99959, + 1.98613, + 1.98244, + 2.01219, + 2.01181, + 1.99683, + 1.98363, + 1.99042, + 2.00333, + 1.98869, + 1.98984, + 1.97126, + 1.99389, + 1.98415, + 1.97493, + 1.99372, + 1.97052, + 1.99946, + 1.98945, + 1.99372, + 2.00014, + 1.98606, + 1.99123, + 1.98091, + 1.97301, + 1.97437, + 1.98973, + 1.9945, + 1.98571, + 2.00405, + 1.97876, + 1.99408, + 1.98102, + 1.98366, + 1.96198, + 2.00596, + 2.00458, + 1.96415, + 2.0093, + 1.97088, + 1.99221, + 1.97215, + 1.99583, + 2.02515, + 1.97191, + 1.96611, + 1.9876, + 1.99635, + 1.99328, + 1.99522, + 1.97658, + 1.97281, + 1.98563, + 1.97909, + 2.00599, + 2.01052, + 2.0059, + 1.99928, + 2.00409, + 1.9995, + 1.9827, + 1.96514, + 2.00301, + 1.97483, + 1.98658, + 1.99226, + 2.00692, + 2.01763, + 1.97241, + 2.01049, + 1.99232, + 2.00145, + 2.00695, + 1.97336, + 1.9731, + 1.97484, + 1.97478, + 1.95817, + 1.99751, + 1.97089, + 2.00821, + 2.00549, + 1.98289, + 1.98547, + 1.9927, + 1.97683, + 1.98381, + 1.97642, + 1.99029, + 2.00601, + 1.97765, + 1.99498, + 1.99673, + 1.97494, + 1.98723, + 1.9711, + 1.98442, + 1.98201, + 1.96729, + 1.99265, + 1.99556, + 2.00511, + 1.97418, + 1.96359, + 1.97762, + 1.99707, + 1.97991, + 2.01571, + 2.00365, + 1.97552, + 1.96444, + 1.98316, + 1.97419, + 1.97064, + 1.99781, + 1.97707, + 1.95463, + 1.96371, + 1.96548, + 1.99055, + 1.97352, + 1.96774, + 1.97162, + 1.98249, + 1.98541, + 2.00375, + 1.98719, + 2.00367, + 1.987, + 2.00572, + 1.97439, + 1.98879, + 1.96491, + 1.97587, + 1.99069, + 1.9845, + 1.98752, + 1.96083, + 2.00084, + 1.98862, + 1.98287, + 1.96241, + 2.00414, + 1.97379, + 1.97531, + 1.9662, + 1.97974, + 1.97107, + 1.98823, + 2.00284, + 1.97251, + 1.98486, + 1.96668, + 1.98589, + 1.97159, + 1.99563, + 1.99258, + 1.97384, + 1.98965, + 1.98947, + 1.97668, + 2.00633, + 1.96894, + 1.98136, + 1.99015, + 1.95861, + 1.98573, + 1.99342, + 2.00597, + 1.97206, + 1.98381, + 1.99702, + 1.97439, + 1.98843, + 1.95719, + 1.98185, + 1.98241, + 1.97481, + 1.98377, + 1.98445, + 1.98054, + 1.9798, + 1.97749, + 1.98345, + 2.00732, + 1.98269, + 1.98211, + 1.98634, + 1.99513, + 1.99244, + 1.98704, + 1.96953, + 1.97854, + 1.97254, + 1.99002, + 1.98312, + 1.98762, + 1.97659, + 1.99247, + 1.96273, + 1.97902, + 2.01247, + 1.98425, + 1.97728, + 1.97485, + 1.98387, + 1.97321, + 1.99546, + 1.97729, + 1.99722, + 1.96483, + 1.96849, + 1.98311, + 1.97619, + 1.99799, + 1.96903, + 1.99348, + 1.98248, + 1.99898, + 1.98743, + 1.99462, + 1.97632, + 1.97272, + 1.98822, + 1.96384, + 1.96671, + 1.98833, + 1.97111, + 1.97248, + 1.99858, + 1.98472, + 1.93862, + 2.00782, + 1.96082, + 1.95402, + 1.96906, + 1.94578, + 1.98568, + 1.99701, + 1.98832, + 2.01203, + 2.00532, + 2.0272, + 1.97646, + 1.9788, + 1.98217, + 1.9725, + 1.97882, + 1.99233, + 2.00309, + 1.99261, + 1.98452, + 1.98313, + 1.98882, + 1.99501, + 1.99343, + 1.99932, + 2.02093, + 2.00584, + 2.00419, + 1.97697, + 1.99948, + 2.00158, + 1.97836, + 1.98128, + 1.94488, + 1.95429, + 1.98673, + 1.95489, + 1.99305, + 1.98063, + 1.98326, + 1.9997, + 1.97296, + 1.96523, + 1.98869, + 1.9884, + 1.97835, + 2.00525, + 1.97962, + 2.0051, + 1.99767, + 1.98315, + 2.00384, + 1.99682, + 1.99166, + 1.99472, + 1.97568, + 1.97426, + 1.97346, + 1.96715, + 2.00427, + 1.98328, + 1.97681, + 1.97897, + 1.96255, + 1.97755, + 1.99092, + 1.95698, + 1.97455, + 1.97819, + 1.99421, + 1.97128, + 1.99379, + 1.98866, + 2.00399, + 1.98818, + 1.98073, + 1.99928, + 1.97521, + 1.98082, + 1.98037, + 1.98469, + 1.99175, + 1.96804, + 1.97871, + 1.99209, + 1.99361, + 1.99632, + 1.97949, + 2.01014, + 2.00051, + 1.98244, + 1.96974, + 1.96948, + 1.97568, + 1.99661, + 1.96753, + 1.96725, + 1.99069, + 2.00053, + 2.00619, + 1.96723, + 1.97666, + 1.98268, + 2.01349, + 1.98079, + 1.97488, + 1.97525, + 1.98251, + 1.96623, + 1.95799, + 2.00255, + 1.98963, + 1.94153, + 1.97789, + 1.99023, + 1.97405, + 1.98151, + 1.98136, + 1.99012, + 1.95989, + 1.96852, + 1.97087, + 1.97409, + 1.96884, + 1.96393, + 1.96448, + 1.96227, + 1.95257, + 1.99644, + 1.98548, + 1.96573, + 2.00275, + 1.97828, + 1.97782, + 1.97046, + 2.00472, + 1.98267, + 1.98218, + 1.98185, + 1.99811, + 1.98589, + 1.97235, + 1.97777, + 1.98526, + 2.00289, + 1.98397, + 1.97263, + 1.97974, + 1.97371, + 1.97122, + 1.94389, + 1.97888, + 1.9773, + 1.96434, + 1.99638, + 1.97667, + 1.98786, + 1.98576, + 1.96784, + 1.96557, + 1.98683, + 1.99695, + 1.98353, + 2.01931, + 1.98226, + 1.98531, + 1.98354, + 1.96481, + 1.95257, + 1.97466, + 1.95285, + 1.95801, + 1.99969, + 1.96933, + 1.97723, + 1.97527, + 1.97731, + 1.99963, + 1.99053, + 1.95466, + 1.97239, + 1.98604, + 1.9762, + 1.97383, + 1.9565, + 1.96983, + 1.96954, + 1.97003, + 1.99973, + 1.98099, + 1.98955, + 1.97763, + 2.01913, + 1.99743, + 1.9675, + 1.9957, + 1.9872, + 1.97773, + 1.95599, + 1.97118, + 1.97233, + 1.96631, + 1.96624, + 1.98136, + 1.97427, + 1.98497, + 1.97698, + 2.00865, + 1.96001, + 1.96002, + 1.97367, + 1.96463, + 2.00026, + 1.96533, + 1.98626, + 1.97479, + 1.98232, + 1.95663, + 1.98854, + 1.97536, + 1.96903, + 1.98223, + 1.96472, + 1.98033, + 1.97389, + 1.98336, + 1.98833, + 1.9987, + 1.95439, + 1.96558, + 1.97607, + 1.97454, + 1.95262, + 1.95987, + 1.954, + 1.99685, + 1.96699, + 1.97974, + 1.97317, + 1.98569, + 1.96072, + 1.97474, + 1.9908, + 1.96712, + 1.96168, + 1.98603, + 1.9706, + 1.96296, + 1.98109, + 1.99294, + 1.96026, + 1.97933, + 1.9638, + 1.98623, + 1.96743, + 1.97765, + 1.99254, + 1.98295, + 1.98242, + 1.97053, + 1.96738, + 1.99195, + 2.00885, + 1.97939, + 1.9566, + 1.97577, + 1.95175, + 1.9848, + 1.97406, + 1.95411, + 1.97756, + 1.95243, + 1.98551, + 2.0068, + 1.97829, + 2.00332, + 1.97448, + 1.97006, + 1.94414, + 2.0026, + 1.96999, + 1.97596, + 1.97469, + 1.99319, + 1.98729, + 1.98055, + 1.97456, + 1.98908, + 1.97522, + 1.99778, + 1.97824, + 1.98406, + 1.96976, + 1.98279, + 1.9757, + 1.96873, + 1.9817, + 1.98834, + 1.96731, + 1.99605, + 1.96234, + 2.00172, + 1.98201, + 1.98182, + 1.95661, + 1.95341, + 2.0007, + 1.98151, + 1.95422, + 1.98961, + 1.96653, + 1.97436, + 1.96444, + 1.97534, + 1.99044, + 1.96851, + 1.96761, + 1.9858, + 2.00054, + 1.9803, + 1.93778, + 1.97352 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 27308, + "step_interval": 5, + "values": [ + 406787200.0, + 413329568.0, + 407052480.0, + 416989632.0, + 410323520.0, + 410021248.0, + 411995328.0, + 407357856.0, + 414101504.0, + 411800608.0, + 413498080.0, + 414276960.0, + 417866400.0, + 411733120.0, + 407478656.0, + 414440672.0, + 409784800.0, + 418058752.0, + 411855488.0, + 411574688.0, + 406808480.0, + 412067776.0, + 403725760.0, + 411853472.0, + 411982112.0, + 418783136.0, + 413144736.0, + 418206656.0, + 417634336.0, + 414908320.0, + 411922880.0, + 411807968.0, + 420079360.0, + 416080672.0, + 411864256.0, + 406640672.0, + 409751616.0, + 419660832.0, + 411690336.0, + 423075008.0, + 418769376.0, + 412412512.0, + 415866784.0, + 408799616.0, + 413916160.0, + 412246336.0, + 410237088.0, + 405956160.0, + 414555232.0, + 401587936.0, + 418539328.0, + 400680832.0, + 415455552.0, + 415338304.0, + 407145152.0, + 425813632.0, + 414486336.0, + 413734432.0, + 414920608.0, + 409832832.0, + 415692448.0, + 417435904.0, + 406095040.0, + 420108832.0, + 423033856.0, + 413181056.0, + 405683648.0, + 405063040.0, + 413816288.0, + 412653504.0, + 414408320.0, + 411328576.0, + 412473376.0, + 419713664.0, + 408136928.0, + 415401120.0, + 422008128.0, + 417607904.0, + 411945568.0, + 415441920.0, + 408914016.0, + 412276000.0, + 420512128.0, + 411698688.0, + 410132576.0, + 404293952.0, + 409915840.0, + 412033376.0, + 418736480.0, + 415841632.0, + 416787616.0, + 417623008.0, + 415008416.0, + 415184320.0, + 413671232.0, + 408672480.0, + 421492544.0, + 409910848.0, + 406736032.0, + 414192800.0, + 413315424.0, + 413576064.0, + 408547648.0, + 408758208.0, + 410485152.0, + 419429056.0, + 409613728.0, + 420058144.0, + 406988256.0, + 416838432.0, + 410861728.0, + 407744768.0, + 415494368.0, + 412770400.0, + 414825536.0, + 409707296.0, + 417417600.0, + 401726240.0, + 411154880.0, + 417653472.0, + 409985696.0, + 414131424.0, + 417554592.0, + 408021280.0, + 409726880.0, + 420839456.0, + 406524288.0, + 406664448.0, + 403959776.0, + 413346016.0, + 410637920.0, + 406835872.0, + 411553728.0, + 413174752.0, + 418957472.0, + 406976160.0, + 408011104.0, + 409916896.0, + 404499520.0, + 406043456.0, + 411387360.0, + 416618912.0, + 417623232.0, + 417757952.0, + 400602624.0, + 420249632.0, + 406106016.0, + 409226176.0, + 418259168.0, + 408199552.0, + 414846176.0, + 419465664.0, + 415344256.0, + 411813472.0, + 407994176.0, + 407125856.0, + 406659520.0, + 411253536.0, + 413794944.0, + 402926144.0, + 406463872.0, + 409343200.0, + 415471328.0, + 411349920.0, + 410214592.0, + 412656192.0, + 416121856.0, + 402495488.0, + 415543456.0, + 412362944.0, + 417293728.0, + 414206720.0, + 403667680.0, + 420230432.0, + 411909248.0, + 414727552.0, + 407619008.0, + 411388416.0, + 410712896.0, + 413299808.0, + 418516704.0, + 412281760.0, + 412607168.0, + 412804096.0, + 413614240.0, + 411514752.0, + 411307904.0, + 411640832.0, + 414032320.0, + 413002496.0, + 417101088.0, + 413952064.0, + 401503680.0, + 415830624.0, + 412305536.0, + 417205664.0, + 418911456.0, + 410804160.0, + 414292192.0, + 421360960.0, + 409510368.0, + 407718336.0, + 418434784.0, + 415501024.0, + 416456448.0, + 407883520.0, + 409808256.0, + 406268768.0, + 412507840.0, + 414443840.0, + 406872384.0, + 410414624.0, + 412307360.0, + 412224448.0, + 423211488.0, + 410218304.0, + 409435264.0, + 422575328.0, + 409614784.0, + 409876000.0, + 412678848.0, + 414339040.0, + 413259168.0, + 418441376.0, + 415439552.0, + 410649312.0, + 413625376.0, + 412105632.0, + 406747776.0, + 412796352.0, + 422808672.0, + 412335680.0, + 409918880.0, + 418168192.0, + 407248768.0, + 421091680.0, + 412351008.0, + 405050624.0, + 413690368.0, + 406975264.0, + 410766016.0, + 406797536.0, + 416946592.0, + 410418368.0, + 417159840.0, + 415488544.0, + 410965056.0, + 415145344.0, + 412029536.0, + 410545856.0, + 414676704.0, + 407003776.0, + 406290464.0, + 413774272.0, + 418395648.0, + 407660864.0, + 410702272.0, + 408532352.0, + 416211008.0, + 414019680.0, + 410964352.0, + 412772064.0, + 406845984.0, + 421453184.0, + 407243136.0, + 418324864.0, + 420898432.0, + 414071136.0, + 419867392.0, + 406654304.0, + 403937152.0, + 409323328.0, + 415401248.0, + 408025344.0, + 412492192.0, + 417086848.0, + 416585664.0, + 410076384.0, + 418486784.0, + 412341792.0, + 419367168.0, + 411339808.0, + 407453568.0, + 414365728.0, + 424172576.0, + 405656032.0, + 417934912.0, + 406252864.0, + 404356960.0, + 410034560.0, + 415793760.0, + 414010432.0, + 410778400.0, + 407958240.0, + 413821312.0, + 414367392.0, + 413903072.0, + 413366400.0, + 414591872.0, + 421833216.0, + 398499584.0, + 414836000.0, + 411075744.0, + 406082048.0, + 423628352.0, + 411251072.0, + 408523904.0, + 409533376.0, + 418847968.0, + 412557376.0, + 409682464.0, + 408153344.0, + 409853312.0, + 415246272.0, + 407611456.0, + 409596320.0, + 414811424.0, + 416653984.0, + 414182176.0, + 411456896.0, + 415729824.0, + 414284576.0, + 414552960.0, + 423904608.0, + 410941792.0, + 414327808.0, + 419368352.0, + 411004832.0, + 416402144.0, + 409224032.0, + 413425696.0, + 405841152.0, + 406990304.0, + 410957248.0, + 408911808.0, + 416568352.0, + 407686880.0, + 412850912.0, + 406259584.0, + 420194784.0, + 411532000.0, + 417609120.0, + 416324000.0, + 415915328.0, + 423913472.0, + 416845696.0, + 409687168.0, + 408028128.0, + 411651712.0, + 409627808.0, + 412446400.0, + 410097792.0, + 419470976.0, + 412213632.0, + 405062560.0, + 413286816.0, + 416026720.0, + 411178336.0, + 416384992.0, + 408819424.0, + 411716640.0, + 413256512.0, + 406920448.0, + 410459776.0, + 404630752.0, + 407452640.0, + 412446816.0, + 404843776.0, + 412171488.0, + 416333632.0, + 410598720.0, + 412641088.0, + 405499872.0, + 414033120.0, + 411059424.0, + 415228192.0, + 410451200.0, + 420925920.0, + 410109248.0, + 414626208.0, + 405184256.0, + 412837728.0, + 407421856.0, + 411829184.0, + 416949952.0, + 405071200.0, + 412798720.0, + 414545024.0, + 404589184.0, + 416566880.0, + 409887776.0, + 407853536.0, + 419503104.0, + 408241408.0, + 414366208.0, + 410865760.0, + 409671552.0, + 407412128.0, + 405344416.0, + 406116320.0, + 414143744.0, + 403607424.0, + 414142912.0, + 415673600.0, + 406569568.0, + 420790400.0, + 421954880.0, + 413295776.0, + 411373568.0, + 405562784.0, + 406776288.0, + 407774912.0, + 413368736.0, + 409940160.0, + 417265920.0, + 412326912.0, + 412850176.0, + 416114272.0, + 410305056.0, + 413233312.0, + 415643840.0, + 410721024.0, + 407892800.0, + 413281344.0, + 417676352.0, + 414757216.0, + 407144704.0, + 412571648.0, + 410562784.0, + 412431008.0, + 418018176.0, + 411571200.0, + 411001152.0, + 414144160.0, + 403607552.0, + 414145344.0, + 415665824.0, + 406544032.0, + 420767488.0, + 421935424.0, + 413279392.0, + 411361120.0, + 405553664.0, + 406771264.0, + 407769120.0, + 413361824.0, + 409936768.0, + 417264416.0, + 412322560.0, + 412841664.0, + 416104448.0, + 410295520.0, + 413224832.0, + 415650720.0, + 410728832.0, + 407901152.0, + 413285216.0, + 417686272.0, + 414756288.0, + 407149056.0, + 412574752.0, + 410562816.0, + 412428864.0, + 418014848.0, + 411564064.0, + 410994624.0, + 407481760.0, + 410382976.0, + 408615200.0, + 408963136.0, + 412064448.0, + 415628032.0, + 415482368.0, + 412489280.0, + 413669696.0, + 408792640.0, + 414654784.0, + 409911424.0, + 401795520.0, + 414730592.0, + 414187392.0, + 406833792.0, + 408289280.0, + 415823360.0, + 414213664.0, + 405439840.0, + 418203392.0, + 411081824.0, + 410598208.0, + 408771808.0, + 414753760.0, + 410664384.0, + 417661760.0, + 403180512.0, + 423176192.0, + 411655232.0, + 410551776.0, + 417440992.0, + 414267488.0, + 417515072.0, + 406846144.0, + 414729920.0, + 413723552.0, + 405860128.0, + 416585056.0, + 406517728.0, + 412943392.0, + 415103904.0, + 413974336.0, + 407210496.0, + 414474176.0, + 404680608.0, + 412680768.0, + 405762144.0, + 403747680.0, + 419327552.0, + 418386048.0, + 416171072.0, + 416360736.0, + 417899840.0, + 406583168.0, + 411792640.0, + 411024672.0, + 406752736.0, + 406842432.0, + 411752832.0, + 412666592.0, + 410520608.0, + 419612192.0, + 409827488.0, + 416138880.0, + 413036352.0, + 410743104.0, + 407264992.0, + 408345632.0, + 410203552.0, + 415865856.0, + 408225216.0, + 420168608.0, + 408398144.0, + 417352128.0, + 405625280.0, + 410145248.0, + 414633632.0, + 405963744.0, + 412626048.0, + 410865024.0, + 412027616.0, + 407961568.0, + 421254464.0, + 407638144.0, + 407696768.0, + 412132800.0, + 417663840.0, + 404961600.0, + 416850112.0, + 416556512.0, + 404697312.0, + 415590848.0, + 407828704.0, + 408035040.0, + 419311200.0, + 410567520.0, + 409822688.0, + 416804544.0, + 408840928.0, + 418794560.0, + 414157664.0, + 407072800.0, + 409210368.0, + 404472704.0, + 420725024.0, + 406982784.0, + 416654656.0, + 411591360.0, + 406167200.0, + 420043872.0, + 406453856.0, + 408489088.0, + 418341600.0, + 406755488.0, + 407638400.0, + 407697376.0, + 412132992.0, + 417660160.0, + 404960832.0, + 416851680.0, + 416560576.0, + 404707392.0, + 415598432.0, + 407836800.0, + 408040960.0, + 419315776.0, + 410574176.0, + 409830880.0, + 416810848.0, + 408781632.0, + 418782976.0, + 414165856.0, + 407091072.0, + 409238592.0, + 404495328.0, + 420747168.0, + 407005024.0, + 416681920.0, + 411595360.0, + 406162944.0, + 420033984.0, + 406441760.0, + 408478720.0, + 418332544.0, + 406750976.0, + 414735808.0, + 414474976.0, + 409515840.0, + 417684640.0, + 416059008.0, + 411617792.0, + 416979200.0, + 408480352.0, + 415941056.0, + 407626464.0, + 412022944.0, + 416289216.0, + 413785408.0, + 418021248.0, + 408511328.0, + 410923904.0, + 408390944.0, + 418289216.0, + 406867808.0, + 416811072.0, + 410955648.0, + 408530368.0, + 412900544.0, + 409033664.0, + 416651296.0, + 411760160.0, + 414473184.0, + 411769728.0, + 418971136.0, + 416610368.0, + 408131296.0, + 416810080.0, + 402708128.0, + 412841536.0, + 411517216.0, + 414437952.0, + 412923616.0, + 403544256.0, + 406644064.0, + 406387584.0, + 414336192.0, + 411493984.0, + 411756992.0, + 420298208.0, + 409809184.0, + 408256608.0, + 414552832.0, + 413182784.0, + 410785728.0, + 419386048.0, + 406448000.0, + 423340416.0, + 415421536.0, + 414696512.0, + 404446592.0, + 413190560.0, + 413374784.0, + 414593568.0, + 409145280.0, + 411784864.0, + 406730848.0, + 413557408.0, + 411929152.0, + 405978784.0, + 409845248.0, + 416652864.0, + 416609792.0, + 412913088.0, + 406085856.0, + 414405856.0, + 410309088.0, + 410516704.0, + 411279456.0, + 399318688.0, + 416109952.0, + 409008320.0, + 412100448.0, + 408904960.0, + 416812192.0, + 409706400.0, + 417021856.0, + 413425280.0, + 410688928.0, + 406638208.0, + 407053760.0, + 415109440.0, + 415483488.0, + 412891968.0, + 410448640.0, + 415244704.0, + 413658784.0, + 409372928.0, + 408230048.0, + 415841952.0, + 415542912.0, + 405444480.0, + 411262592.0, + 408095936.0, + 414814080.0, + 418206560.0, + 413436160.0, + 412992928.0, + 410922720.0, + 413137312.0, + 406111872.0, + 413145760.0, + 417047808.0, + 410370464.0, + 407832128.0, + 412872704.0, + 413201568.0, + 412345408.0, + 413109024.0, + 405144640.0, + 405829760.0, + 411015968.0, + 411314048.0, + 417690304.0, + 406290688.0, + 408407168.0, + 418117920.0, + 416025440.0, + 403458560.0, + 412439296.0, + 417282496.0, + 408072928.0, + 410581440.0, + 415703072.0, + 415324032.0, + 416606048.0, + 406160256.0, + 410540224.0, + 401445248.0, + 413973856.0, + 409098976.0, + 412462976.0, + 403681664.0, + 411389632.0, + 409947808.0, + 418828896.0, + 408873920.0, + 409302880.0, + 418188192.0, + 412517600.0, + 410344544.0, + 411640000.0, + 407261024.0, + 404093888.0, + 410984736.0, + 400889568.0, + 411950880.0, + 412493408.0, + 407747776.0, + 413701120.0, + 409582336.0, + 408507488.0, + 406885664.0, + 417050432.0, + 412286720.0, + 415426944.0, + 414444864.0, + 404300032.0, + 415707168.0, + 414249856.0, + 415407264.0, + 410956608.0, + 413761056.0, + 410058848.0, + 410680704.0, + 403680992.0, + 409937152.0, + 414854208.0, + 412045664.0, + 417461632.0, + 412588608.0, + 420142624.0, + 417332864.0, + 408357440.0, + 416706560.0, + 411769664.0, + 416028960.0, + 414781568.0, + 416319424.0, + 414125824.0, + 412868256.0, + 409322368.0, + 410261120.0, + 408841600.0, + 415018496.0, + 413197632.0, + 417073952.0, + 414226464.0, + 414086816.0, + 411827136.0, + 415918272.0, + 409309440.0, + 410951392.0, + 412000992.0, + 421333152.0, + 404112864.0, + 421464160.0, + 418736352.0, + 411955424.0, + 413171328.0, + 418679552.0, + 409491008.0, + 406307744.0, + 409476480.0, + 407457920.0, + 413756576.0, + 414218144.0, + 416857088.0, + 414353152.0, + 409134240.0, + 414500832.0, + 406113120.0, + 414014720.0, + 411596224.0, + 413613152.0, + 412591808.0, + 411899968.0, + 416905184.0, + 413171584.0, + 411109920.0, + 424177440.0, + 413255808.0, + 415786016.0, + 410507488.0, + 411603296.0, + 412848320.0, + 417891872.0, + 407918624.0, + 403705888.0, + 409799488.0, + 418483936.0, + 407261408.0, + 409961280.0, + 413813472.0, + 402364032.0, + 413965152.0, + 398619360.0, + 414599104.0, + 415418496.0, + 413128736.0, + 414610560.0, + 416327296.0, + 409055008.0, + 414406688.0, + 413943904.0, + 412198944.0, + 411482784.0, + 413936064.0, + 411311168.0, + 403627776.0, + 415113440.0, + 409896640.0, + 413178912.0, + 410947520.0, + 409122304.0, + 414565056.0, + 415758080.0, + 410009184.0, + 418842176.0, + 418043712.0, + 408647072.0, + 407298464.0, + 412500704.0, + 422720288.0, + 417781952.0, + 416399552.0, + 417658496.0, + 408441664.0, + 421993632.0, + 417242592.0, + 406882208.0, + 408385536.0, + 410465728.0, + 411182848.0, + 409240768.0, + 420936320.0, + 421754944.0, + 407375616.0, + 407539360.0, + 411239040.0, + 408215488.0, + 409821152.0, + 412036768.0, + 407748608.0, + 410371040.0, + 409701664.0, + 422094752.0, + 407115584.0, + 417167424.0, + 413288672.0, + 409692480.0, + 420254624.0, + 420238848.0, + 402528320.0, + 410110240.0, + 407377792.0, + 413355616.0, + 410748160.0, + 411811360.0, + 394848320.0, + 422398752.0, + 410414560.0, + 414341536.0, + 403565216.0, + 411259168.0, + 411366752.0, + 409918784.0, + 409797568.0, + 407940064.0, + 418257472.0, + 415937344.0, + 408053568.0, + 410109984.0, + 408823296.0, + 409609568.0, + 416034112.0, + 409625344.0, + 412102464.0, + 417440128.0, + 411499392.0, + 417293600.0, + 414915360.0, + 414638240.0, + 411904576.0, + 416484576.0, + 416336224.0, + 412024736.0, + 420829440.0, + 414841280.0, + 405728576.0, + 422429472.0, + 405695968.0, + 414646272.0, + 412796736.0, + 409195520.0, + 408443616.0, + 411745856.0, + 409837184.0, + 410584384.0, + 414691648.0, + 412066336.0, + 407948032.0, + 414240704.0, + 411940864.0, + 406331488.0, + 416399616.0, + 409247872.0, + 412430592.0, + 412137312.0, + 410661632.0, + 406256448.0, + 410502208.0, + 415798528.0, + 411738272.0, + 413735456.0, + 410926400.0, + 407244448.0, + 413563104.0, + 413446752.0, + 414356448.0, + 411820768.0, + 419979008.0, + 407168800.0, + 415378848.0, + 413764064.0, + 407911008.0, + 417100224.0, + 400664832.0, + 412822944.0, + 411881056.0, + 413938400.0, + 417650976.0, + 416622656.0, + 409991328.0, + 415532096.0, + 407115104.0, + 405693472.0, + 403989152.0, + 405524896.0, + 417688224.0, + 410342592.0, + 412831008.0, + 415239424.0, + 407164416.0, + 414277888.0, + 418553344.0, + 413891552.0, + 413112896.0, + 413442432.0, + 406271936.0, + 417946688.0, + 412232000.0, + 404715040.0, + 415177632.0, + 406917696.0, + 401542208.0, + 413586144.0, + 416087104.0, + 412009856.0, + 418889856.0, + 406139392.0, + 415863872.0, + 411935744.0, + 415969536.0, + 415512672.0, + 410451104.0, + 415264224.0, + 419201984.0, + 415957472.0, + 411062432.0, + 411268832.0, + 410520480.0, + 409327520.0, + 411109600.0, + 408886272.0, + 418082080.0, + 413936256.0, + 412638176.0, + 406230368.0, + 414091328.0, + 415699072.0, + 419364576.0, + 406069984.0, + 406295776.0, + 420449568.0, + 416379104.0, + 409316544.0, + 420823776.0, + 404547168.0, + 411281792.0, + 406051104.0, + 414846816.0, + 409199328.0, + 405090528.0, + 410601408.0, + 411000544.0, + 407046688.0, + 413628832.0, + 409460192.0, + 412354656.0, + 412639360.0, + 406230272.0, + 414090848.0, + 413135328.0, + 408592576.0, + 415381472.0, + 411061952.0, + 406021152.0, + 407417312.0, + 412042304.0, + 401732800.0, + 412034944.0, + 413013280.0, + 411671808.0, + 414052096.0, + 406646912.0, + 412723296.0, + 418110592.0, + 414825504.0, + 400923232.0, + 406290176.0, + 411916864.0, + 405706240.0, + 409212448.0, + 405911488.0, + 412483328.0, + 411705632.0, + 414675104.0, + 407481984.0, + 414027200.0, + 416551872.0, + 415750272.0, + 403483648.0, + 410502528.0, + 411331360.0, + 417783776.0, + 414624576.0, + 415714496.0, + 410190656.0, + 412778784.0, + 411114656.0, + 403733344.0, + 425629760.0, + 414116352.0, + 407972352.0, + 413478144.0, + 413768928.0, + 412927136.0, + 409713152.0, + 405392640.0, + 414133536.0, + 417484640.0, + 406474880.0, + 416604544.0, + 404454656.0, + 417528640.0, + 410242592.0, + 412910784.0, + 411525568.0, + 410256832.0, + 413854976.0, + 414780512.0, + 410807712.0, + 418133376.0, + 407462656.0, + 406418464.0, + 419102432.0, + 414808256.0, + 416596320.0, + 415926880.0, + 407450176.0, + 413364896.0, + 406537920.0, + 410979008.0, + 415708320.0, + 414475840.0, + 408255968.0, + 410307200.0, + 407299424.0, + 407976128.0, + 407831392.0, + 426551776.0, + 418021056.0, + 419212992.0, + 415467008.0, + 413498464.0, + 418373504.0, + 410553568.0, + 405214080.0, + 415341728.0, + 412864064.0, + 415497920.0, + 414048416.0, + 412196320.0, + 406169536.0, + 409683744.0, + 413723328.0, + 412323648.0, + 409598656.0, + 411558624.0, + 406827328.0, + 411510752.0, + 411926464.0, + 406827968.0, + 415451712.0, + 405978784.0, + 403861088.0, + 420599872.0, + 407671904.0, + 402235296.0, + 414055296.0, + 410003712.0, + 406041344.0, + 403981632.0, + 418595136.0, + 413900832.0, + 411205024.0, + 409972800.0, + 408655296.0, + 411394720.0, + 414434624.0, + 412015520.0, + 416597632.0, + 405979136.0, + 421419104.0, + 417429024.0, + 408709760.0, + 411811232.0, + 416481216.0, + 420598912.0, + 407672512.0, + 402235456.0, + 414054784.0, + 410005056.0, + 406040800.0, + 403983392.0, + 418596032.0, + 413902016.0, + 411203296.0, + 409972992.0, + 408654752.0, + 411316256.0, + 414445632.0, + 412035680.0, + 416609088.0, + 405993024.0, + 421428096.0, + 417433024.0, + 408711968.0, + 411811168.0, + 416480288.0, + 407109216.0, + 406314304.0, + 417575488.0, + 412714624.0, + 414520960.0, + 422196128.0, + 415706784.0, + 411734176.0, + 410722656.0, + 409332128.0, + 403014624.0, + 410644448.0, + 408423872.0, + 404717856.0, + 417809440.0, + 413385952.0, + 410551360.0, + 416090176.0, + 418011264.0, + 414745088.0, + 406070944.0, + 412089248.0, + 415224288.0, + 413866112.0, + 415380096.0, + 413101792.0, + 413683648.0, + 412534016.0, + 412169088.0, + 408649376.0, + 410575616.0, + 413011552.0, + 409895840.0, + 412050112.0, + 405428000.0, + 416176576.0, + 414112320.0, + 411594080.0, + 415684992.0, + 406517952.0, + 411042464.0, + 410219008.0, + 411653952.0, + 414974336.0, + 419418080.0, + 406841056.0, + 415087232.0, + 419770368.0, + 415165856.0, + 414039264.0, + 414520288.0, + 415471328.0, + 415148704.0, + 411513920.0, + 410708896.0, + 414162944.0, + 418914016.0, + 413238400.0, + 407973120.0, + 412226080.0, + 402654976.0, + 408145152.0, + 418581344.0, + 407750880.0, + 414617152.0, + 408159168.0, + 416370624.0, + 415928512.0, + 415441632.0, + 413011552.0, + 416887808.0, + 414649600.0, + 406928640.0, + 417463328.0, + 411969664.0, + 405575616.0, + 411237184.0, + 418786976.0, + 414282784.0, + 414012512.0, + 421826656.0, + 405228832.0, + 405841248.0, + 416138816.0, + 407559200.0, + 415596544.0, + 411477088.0, + 408120576.0, + 411998688.0, + 421387712.0, + 401538368.0, + 415624576.0, + 411668448.0, + 403466880.0, + 416273344.0, + 407900064.0, + 415062880.0, + 410174304.0, + 417021056.0, + 428308928.0, + 410876288.0, + 409520864.0, + 411546944.0, + 406365856.0, + 410481792.0, + 417363296.0, + 408862304.0, + 414896832.0, + 413008480.0, + 410001632.0, + 415189664.0, + 414575840.0, + 420688512.0, + 413844448.0, + 412753120.0, + 412982816.0, + 410559968.0, + 416677376.0, + 407556448.0, + 408970912.0, + 406257696.0, + 408577088.0, + 413755360.0, + 416010624.0, + 414017472.0, + 414866080.0, + 407566560.0, + 410864864.0, + 419209024.0, + 418458016.0, + 410257600.0, + 415472096.0, + 407857056.0, + 412651168.0, + 417658432.0, + 412973600.0, + 410834976.0, + 412531584.0, + 414706496.0, + 413310912.0, + 410388960.0, + 417169376.0, + 407421728.0, + 414063616.0, + 408397536.0, + 408519296.0, + 414151584.0, + 403736192.0, + 411350944.0, + 419264608.0, + 406796064.0, + 409791360.0, + 407589024.0, + 410226400.0, + 411496608.0, + 414742656.0, + 413582624.0, + 408933248.0, + 416197728.0, + 419163584.0, + 414516320.0, + 421198496.0, + 410648000.0, + 413048576.0, + 413772576.0, + 401896032.0, + 415950848.0, + 416890112.0, + 409845728.0, + 402167520.0, + 406009440.0, + 413937728.0, + 408716800.0, + 410700928.0, + 413359520.0, + 417827456.0, + 407050464.0, + 414642272.0, + 416742176.0, + 415734208.0, + 403233888.0, + 408140352.0, + 411291008.0, + 407275296.0, + 417494208.0, + 412821152.0, + 410127744.0, + 412566144.0, + 407011712.0, + 416768544.0, + 411127168.0, + 419286464.0, + 415237952.0, + 403092224.0, + 411566272.0, + 410920064.0, + 408421888.0, + 416843200.0, + 406914048.0, + 414898656.0, + 412997024.0, + 413349856.0, + 414633856.0, + 412580928.0, + 408039328.0, + 417959680.0, + 415261664.0, + 416177760.0, + 405368864.0, + 410751744.0, + 412790784.0, + 413006112.0, + 416136192.0, + 405308480.0, + 410043520.0, + 414319424.0, + 405945952.0, + 406758528.0, + 411313472.0, + 406728768.0, + 415162272.0, + 415656672.0, + 417167424.0, + 411780992.0, + 415948512.0, + 414952608.0, + 408808224.0, + 411716640.0, + 404715520.0, + 417157472.0, + 412566400.0, + 410789152.0, + 412864064.0, + 410606528.0, + 409157952.0, + 407948192.0, + 410900128.0, + 419708032.0, + 404843840.0, + 412640352.0, + 419903200.0, + 424133056.0, + 404346752.0, + 411173472.0, + 416984192.0, + 412138496.0, + 408965856.0, + 410460576.0, + 418112608.0, + 415509856.0, + 405721152.0, + 407817632.0, + 411394240.0, + 408118976.0, + 409042144.0, + 402485056.0, + 417881568.0, + 413495808.0, + 415056768.0, + 418288448.0, + 414467264.0, + 412031456.0, + 408842496.0, + 406866752.0, + 418174144.0, + 413456992.0, + 411006048.0, + 415911232.0, + 402049952.0, + 416931200.0, + 413970720.0, + 415466976.0, + 411631488.0, + 413886304.0, + 416071040.0, + 407335488.0, + 410249760.0, + 420416832.0, + 406301504.0, + 410387584.0, + 409385632.0, + 409196832.0, + 415780800.0, + 422217024.0, + 418600704.0, + 416300672.0, + 407333856.0, + 409533408.0, + 418033280.0, + 415407360.0, + 419612864.0, + 408260800.0, + 416454464.0, + 408735392.0, + 412928928.0, + 413711648.0, + 412617280.0, + 409546400.0, + 409979680.0, + 408545952.0, + 411313472.0, + 405336832.0, + 406970528.0, + 415920288.0, + 405727360.0, + 413457184.0, + 403532448.0, + 411317408.0, + 411360416.0, + 412315744.0, + 409030400.0, + 410558816.0, + 406092416.0, + 412566880.0, + 408197120.0, + 411911584.0, + 411155200.0, + 418523520.0, + 407061600.0, + 405064160.0, + 416187744.0, + 416192032.0, + 410655200.0, + 411246144.0, + 413204000.0, + 417195456.0, + 420749888.0, + 405779968.0, + 416103328.0, + 407018624.0, + 414524640.0, + 405293248.0, + 406541600.0, + 406945600.0, + 413623136.0, + 414572608.0, + 412146240.0, + 410737568.0, + 417239328.0, + 419405664.0, + 412509088.0, + 413554304.0, + 407086816.0, + 408855488.0, + 417070592.0, + 408946464.0, + 414534720.0, + 401662976.0, + 409642656.0, + 411211552.0, + 416893856.0, + 408541664.0, + 413814368.0, + 418817504.0, + 420705984.0, + 410736032.0, + 413955968.0, + 413418208.0, + 415320032.0, + 409672576.0, + 407198816.0, + 410964352.0, + 410353760.0, + 406880096.0, + 412727872.0, + 401732256.0, + 418271328.0, + 409351296.0, + 408754976.0, + 415226176.0, + 407825888.0, + 408653792.0, + 415771296.0, + 402553952.0, + 413453216.0, + 416467072.0, + 407665504.0, + 411260160.0, + 414475904.0, + 407920608.0, + 415790688.0, + 407459840.0, + 414817952.0, + 410033120.0, + 408214080.0, + 412158720.0, + 421948064.0, + 419996672.0, + 408512672.0, + 413122240.0, + 419484000.0, + 410063008.0, + 403108832.0, + 413669472.0, + 418633856.0, + 410876192.0, + 413980768.0, + 408199936.0, + 420128032.0, + 422401760.0, + 413406944.0, + 416335680.0, + 418586816.0, + 404216928.0, + 407996128.0, + 411172608.0, + 414184736.0, + 411180352.0, + 413033664.0, + 410072736.0, + 410428256.0, + 411608224.0, + 411179552.0, + 410125408.0, + 408956000.0, + 416491296.0, + 418332800.0, + 408952128.0, + 410032480.0, + 415864256.0, + 414027552.0, + 404950112.0, + 403128160.0, + 412242592.0, + 410491872.0, + 418445696.0, + 418528896.0, + 415546400.0, + 405308512.0, + 413236032.0, + 413057792.0, + 414054752.0, + 411334080.0, + 411977440.0, + 419346944.0, + 422696512.0, + 418111200.0, + 413165408.0, + 408591232.0, + 411180768.0, + 411891776.0, + 412547648.0, + 412614144.0, + 407733376.0, + 413129792.0, + 414097888.0, + 420883648.0, + 407706016.0, + 417759872.0, + 407569984.0, + 414966624.0, + 409372000.0, + 411054976.0, + 406504160.0, + 416825888.0, + 412147872.0, + 410194688.0, + 416626496.0, + 406960896.0, + 413014176.0, + 420288032.0, + 413616928.0, + 417692288.0, + 413332224.0, + 415002016.0, + 417877248.0, + 415546432.0, + 415646272.0, + 420121280.0, + 417948000.0, + 413164640.0, + 418486624.0, + 406207936.0, + 415000544.0, + 407112640.0, + 415200608.0, + 417214272.0, + 415140992.0, + 411136352.0, + 422206784.0, + 410856896.0, + 406010784.0, + 418315296.0, + 414234752.0, + 411561056.0, + 416129056.0, + 411089408.0, + 404215552.0, + 411018368.0, + 408019648.0, + 412223456.0, + 415269056.0, + 411960704.0, + 408578400.0, + 401909856.0, + 414824672.0, + 403048384.0, + 409670720.0, + 409082144.0, + 401939904.0, + 407654528.0, + 412529312.0, + 423408288.0, + 413573600.0, + 420621856.0, + 406756896.0, + 415775904.0, + 411422112.0, + 412043904.0, + 413662016.0, + 412162304.0, + 425109024.0, + 409776256.0, + 406453568.0, + 407947584.0, + 412233152.0, + 412104768.0, + 403309728.0, + 417805472.0, + 414457728.0, + 406951968.0, + 414498624.0, + 422965984.0, + 407377952.0, + 408374784.0, + 406376832.0, + 408520640.0, + 411607296.0, + 412678560.0, + 415551616.0, + 413230912.0, + 411958816.0, + 408714144.0, + 411806944.0, + 417081920.0, + 407238880.0, + 409748864.0, + 407716864.0, + 417937952.0, + 416423872.0, + 416592000.0, + 407355328.0, + 412408672.0, + 411665728.0, + 416709440.0, + 414633280.0, + 408626752.0, + 413042464.0, + 407127712.0, + 410180160.0, + 409107808.0, + 405647744.0, + 416609760.0, + 407224640.0, + 416332352.0, + 413701728.0, + 419689728.0, + 407962080.0, + 411231424.0, + 408937216.0, + 415902912.0, + 412646912.0, + 411165312.0, + 416003232.0, + 409245920.0, + 413049664.0, + 412192000.0, + 417156128.0, + 412322656.0, + 413019840.0, + 408328512.0, + 418740960.0, + 414037600.0, + 413227680.0, + 408863968.0, + 413429696.0, + 412272768.0, + 408354592.0, + 410018048.0, + 414275552.0, + 410053056.0, + 409671776.0, + 408628608.0, + 418114144.0, + 412176288.0, + 407783040.0, + 412221984.0, + 410460864.0, + 415365664.0, + 408752800.0, + 415049024.0, + 417620640.0, + 405218944.0, + 411778304.0, + 402078112.0, + 411237216.0, + 421871328.0, + 408958336.0, + 410339264.0, + 410191808.0, + 419335104.0, + 410230176.0, + 418002912.0, + 412247904.0, + 414668960.0, + 418759776.0, + 402500160.0, + 407161920.0, + 420004896.0, + 413730048.0, + 416853152.0, + 411215232.0, + 411973056.0, + 422411040.0, + 410644736.0, + 401468352.0, + 417161664.0, + 410576384.0, + 415596064.0, + 408981152.0, + 403784960.0, + 412242304.0, + 413934336.0, + 410848416.0, + 412823872.0, + 410805664.0, + 410719040.0, + 406750272.0, + 413446848.0, + 410757216.0, + 401959040.0, + 412531776.0, + 409531520.0, + 408071392.0, + 409007520.0, + 411040512.0, + 415904064.0, + 408043488.0, + 420725408.0, + 410648608.0, + 411845792.0, + 410573120.0, + 414150720.0, + 408975072.0, + 406062848.0, + 410830048.0, + 410452000.0, + 408349440.0, + 416822592.0, + 415581440.0, + 416723520.0, + 420185856.0, + 411942432.0, + 408999552.0, + 419375008.0, + 404652000.0, + 415069312.0, + 417294784.0, + 408961600.0, + 416891712.0, + 416416800.0, + 408785120.0, + 418825024.0, + 409200416.0, + 426124416.0, + 415360320.0, + 413513824.0, + 417340544.0, + 419229056.0, + 412179872.0, + 411151488.0, + 414296608.0, + 413235520.0, + 409998496.0, + 410361856.0, + 418995488.0, + 404643008.0, + 413266112.0, + 412490144.0, + 422580800.0, + 413359104.0, + 412878048.0, + 423259744.0, + 416096096.0, + 411227488.0, + 414875680.0, + 410961344.0, + 414185760.0, + 417355872.0, + 408661760.0, + 412761920.0, + 411469120.0, + 410972928.0, + 415782368.0, + 413452608.0, + 423340480.0, + 410733088.0, + 419495200.0, + 411307072.0, + 409314848.0, + 415942080.0, + 410806464.0, + 407406368.0, + 421401568.0, + 414137152.0, + 411310432.0, + 412850048.0, + 410706016.0, + 418044320.0, + 412023328.0, + 405552832.0, + 415811616.0, + 417596192.0, + 416760992.0, + 413630112.0, + 409692320.0, + 414986080.0, + 409880800.0, + 409965856.0, + 411709056.0, + 417301600.0, + 414699648.0, + 405652544.0, + 412530624.0, + 408071712.0, + 413591616.0, + 422813408.0, + 406044064.0, + 416552800.0, + 412311808.0, + 417666720.0, + 412147584.0, + 404668960.0, + 419801984.0, + 413544416.0, + 401322976.0, + 410224224.0, + 421619808.0, + 412179104.0, + 413390944.0, + 416861888.0, + 408555584.0, + 413307296.0, + 415378368.0, + 418108448.0, + 406972864.0, + 415326432.0, + 410880160.0, + 413732544.0, + 430673664.0, + 406762016.0, + 401276704.0, + 407826816.0, + 410279680.0, + 412088832.0, + 403155456.0, + 413544192.0, + 410535872.0, + 417206624.0, + 413280448.0, + 409459008.0, + 414570048.0, + 425874528.0, + 407437312.0, + 414139744.0, + 413614848.0, + 412202656.0, + 413965728.0, + 402935424.0, + 413682976.0, + 410373152.0, + 409738976.0, + 411791200.0, + 424273760.0, + 419575936.0, + 407868608.0, + 416854272.0, + 414382848.0, + 407833696.0, + 411450528.0, + 423631904.0, + 413772928.0, + 406225952.0, + 410467392.0, + 415914560.0, + 418793760.0, + 404020640.0, + 410533440.0, + 408724160.0, + 412480320.0, + 417606656.0, + 407860736.0, + 411859968.0, + 408904672.0, + 413656416.0, + 409897728.0, + 404487936.0, + 415294176.0, + 419976640.0, + 405987648.0, + 405495200.0, + 417879808.0, + 409711136.0, + 407919328.0, + 414591136.0, + 419024640.0, + 411771040.0, + 414461344.0, + 411780992.0, + 414850496.0, + 418810720.0, + 405728192.0, + 407869952.0, + 416555392.0, + 398807040.0, + 407760544.0, + 414825824.0, + 418454464.0, + 407254272.0, + 413662080.0, + 415556288.0, + 422430592.0, + 417553440.0, + 413331136.0, + 416795232.0, + 413878560.0, + 416997376.0, + 412182656.0, + 409385376.0, + 410559968.0, + 417041536.0, + 407615616.0, + 402000448.0, + 407001280.0, + 414213600.0, + 420888800.0, + 412536288.0, + 406384992.0, + 415570176.0, + 417120544.0, + 409088480.0, + 412024544.0, + 408856608.0, + 412241952.0, + 416309696.0, + 410448768.0, + 415036768.0, + 404358272.0, + 409275264.0, + 415528480.0, + 406755648.0, + 414033088.0, + 404672064.0, + 415610624.0, + 412227712.0, + 408588544.0, + 415302336.0, + 417671104.0, + 410247008.0, + 417821216.0, + 414434784.0, + 408395264.0, + 417179744.0, + 407203776.0, + 411779744.0, + 416305056.0, + 404792352.0, + 416134848.0, + 420800224.0, + 409513856.0, + 421324192.0, + 419191808.0, + 415797984.0, + 413020096.0, + 415885600.0, + 415902176.0, + 411819424.0, + 411780992.0, + 418692416.0, + 420165952.0, + 410124768.0, + 411392032.0, + 417797376.0, + 409862240.0, + 407935808.0, + 416633408.0, + 414467456.0, + 409981376.0, + 403319456.0, + 427653056.0, + 410264480.0, + 411934688.0, + 405917248.0, + 408851104.0, + 413802432.0, + 405862016.0, + 406409280.0, + 411256064.0, + 423556960.0, + 411126528.0, + 413178912.0, + 412017088.0, + 411701792.0, + 413904480.0, + 413946528.0, + 414430240.0, + 411184320.0, + 414515904.0, + 409554624.0, + 406645312.0, + 412776896.0, + 415207968.0, + 413887488.0, + 409591072.0, + 406176000.0, + 408026048.0, + 409906304.0, + 406780704.0, + 416084992.0, + 411565728.0, + 412250016.0, + 411364128.0, + 413419168.0, + 414338848.0, + 410083008.0, + 408145472.0, + 418742400.0, + 419969984.0, + 417148640.0, + 410822208.0, + 413411744.0, + 413090752.0, + 412696768.0, + 422743136.0, + 409285472.0, + 411498048.0, + 420191712.0, + 420993312.0, + 406663904.0, + 406604352.0, + 413842656.0, + 406360320.0, + 413906496.0, + 409208768.0, + 409753056.0, + 407764064.0, + 409253504.0, + 413748320.0, + 411913888.0, + 410366560.0, + 415059008.0, + 415389632.0, + 417152352.0, + 411053888.0, + 402669760.0, + 414325600.0, + 411284416.0, + 412581920.0, + 411596160.0, + 408676544.0, + 413155392.0, + 407500224.0, + 409621984.0, + 414093888.0, + 412344288.0, + 409056032.0, + 413420160.0, + 413331264.0, + 404019968.0, + 414227008.0, + 413383360.0, + 405955904.0, + 408800160.0, + 417740128.0, + 411586368.0, + 409975488.0, + 414719008.0, + 402412832.0, + 414348608.0, + 410783584.0, + 413035904.0, + 413852416.0, + 410354176.0, + 415001440.0, + 414301376.0, + 406717792.0, + 411501600.0, + 410624320.0, + 412769792.0, + 407556480.0, + 407640832.0, + 414769216.0, + 412796416.0, + 414765344.0, + 408631744.0, + 413902176.0, + 413608064.0, + 417752032.0, + 415843680.0, + 413497184.0, + 408318752.0, + 411286016.0, + 409982080.0, + 413915840.0, + 404801568.0, + 408353216.0, + 416261216.0, + 407356864.0, + 403871616.0, + 418378880.0, + 416149088.0, + 424280992.0, + 408796640.0, + 413845920.0, + 418971200.0, + 410723200.0, + 409083520.0, + 415870368.0, + 413700384.0, + 412606432.0, + 417984256.0, + 402308832.0, + 407871040.0, + 414843200.0, + 417298848.0, + 413123552.0, + 410529056.0, + 411936192.0, + 410095232.0, + 420276640.0, + 413477920.0, + 404721536.0, + 420981824.0, + 404661184.0, + 414980256.0, + 416487712.0, + 420964512.0, + 414274464.0, + 412628032.0, + 413912288.0, + 407990336.0, + 408985120.0, + 423830944.0, + 412061376.0, + 401733088.0, + 417962528.0, + 412468384.0, + 418796320.0, + 404670592.0, + 408578496.0, + 418339328.0, + 410108448.0, + 404120992.0, + 413843264.0, + 413639552.0, + 412083232.0, + 420173952.0, + 414991360.0, + 407717920.0, + 407908096.0, + 419606176.0, + 416079680.0, + 401552384.0, + 412657856.0, + 408442368.0, + 412943680.0, + 418281184.0, + 413288000.0, + 415427104.0, + 413499232.0, + 416875968.0, + 410177984.0, + 414286592.0, + 406609312.0, + 408938560.0, + 416495904.0, + 413238912.0, + 405081280.0, + 420601056.0, + 416687104.0, + 410220288.0, + 407656800.0, + 407293760.0, + 418117632.0, + 408833536.0, + 415466080.0, + 413033536.0, + 415566592.0, + 412225856.0, + 415519136.0, + 417217248.0, + 415994208.0, + 408873600.0, + 419491200.0, + 413765920.0, + 407130688.0, + 411230720.0, + 413884096.0, + 410950496.0, + 412169856.0, + 410735712.0, + 407945312.0, + 414506528.0, + 414365312.0, + 418137792.0, + 407623552.0, + 420193312.0, + 410835104.0, + 412817920.0, + 424067936.0, + 408388128.0, + 418699008.0, + 412992960.0, + 403409056.0, + 413680448.0, + 417872448.0, + 406802240.0, + 415407840.0, + 410247232.0, + 419759712.0, + 404802624.0, + 415696448.0, + 417937472.0, + 408253600.0, + 411902112.0, + 408573408.0, + 409423648.0, + 414088960.0, + 401478240.0, + 411742528.0, + 408343648.0, + 407304224.0, + 410957120.0, + 421268832.0, + 412663840.0, + 410873120.0, + 410675360.0, + 410138272.0, + 409784064.0, + 407843648.0, + 412239680.0, + 412600000.0, + 414638464.0, + 404790400.0, + 408548288.0, + 409732128.0, + 418413984.0, + 409784288.0, + 416175200.0, + 415713600.0, + 415409568.0, + 414057056.0, + 419980224.0, + 405691744.0, + 418788224.0, + 412552992.0, + 408631488.0, + 412029696.0, + 420240480.0, + 415681632.0, + 415580864.0, + 406958848.0, + 412249344.0, + 413478432.0, + 406426208.0, + 410148896.0, + 418998176.0, + 410417632.0, + 415333728.0, + 416584000.0, + 415242304.0, + 412212096.0, + 415857280.0, + 412620384.0, + 407461184.0, + 409759744.0, + 418417024.0, + 406013248.0, + 406120928.0, + 406583136.0, + 414575488.0, + 411152704.0, + 407186560.0, + 406491904.0, + 413695904.0, + 420950880.0, + 415250464.0, + 408569792.0, + 412236512.0, + 418439616.0, + 406238048.0, + 416038464.0, + 400165088.0, + 411226912.0, + 408823104.0, + 415843360.0, + 413962656.0, + 412118304.0, + 411415264.0, + 413096384.0, + 418737664.0, + 407577312.0, + 408430784.0, + 408529504.0, + 413784064.0, + 410975392.0, + 410156928.0, + 416404096.0, + 407903520.0, + 421458272.0, + 412274848.0, + 405073952.0, + 413044256.0, + 418528960.0, + 410658560.0, + 411992480.0, + 403968416.0, + 411108288.0, + 415119680.0, + 403387392.0, + 411993024.0, + 418329088.0, + 408459872.0, + 416921280.0, + 405643424.0, + 408147744.0, + 413396000.0, + 406320640.0, + 421459648.0, + 416321312.0, + 409179648.0, + 414647392.0, + 417873888.0, + 412161664.0, + 410750816.0, + 422205216.0, + 406689888.0, + 407261248.0, + 406805888.0, + 414381376.0, + 408532320.0, + 406677696.0, + 413526272.0, + 408279712.0, + 412306944.0, + 416118816.0, + 412484224.0, + 408808352.0, + 410736992.0, + 414504448.0, + 418444480.0, + 407431328.0, + 411008672.0, + 411402464.0, + 410406624.0, + 406542400.0, + 414190880.0, + 411730528.0, + 406809056.0, + 408454528.0, + 409122304.0, + 416596416.0, + 415372416.0, + 413621472.0, + 419321152.0, + 408640352.0, + 417094624.0, + 407202720.0, + 412524576.0, + 406226656.0, + 404579616.0, + 414175200.0, + 407127040.0, + 410158848.0, + 420271744.0, + 413895072.0, + 416175968.0, + 422343520.0, + 414051168.0, + 411498976.0, + 413662496.0, + 414726048.0, + 413234336.0, + 408260704.0, + 411350304.0, + 411811552.0, + 408372416.0, + 418412384.0, + 402269280.0, + 413677056.0, + 418753024.0, + 412217952.0, + 415215456.0, + 416648128.0, + 408234560.0, + 411213856.0, + 408790112.0, + 408121952.0, + 409170336.0, + 410734112.0, + 409936224.0, + 412276096.0, + 414539840.0, + 405619040.0, + 414992384.0, + 415291232.0, + 414335744.0, + 417380000.0, + 409549120.0, + 406891776.0, + 409049056.0, + 420720800.0, + 409671840.0, + 416345280.0, + 406489760.0, + 411682208.0, + 415073120.0, + 406077760.0, + 412551104.0, + 413092512.0, + 405305504.0, + 409754720.0, + 411273344.0, + 412325984.0, + 414492768.0, + 416958176.0, + 414128096.0, + 408105376.0, + 408754656.0, + 407315520.0, + 416939712.0, + 407366656.0, + 408556384.0, + 412100224.0, + 412307968.0, + 413936288.0, + 411327424.0, + 415825472.0, + 416874944.0, + 415247808.0, + 416807584.0, + 408765568.0, + 411392032.0, + 421282240.0, + 412509024.0, + 406195264.0, + 409552864.0, + 419496640.0, + 419015264.0, + 416641184.0, + 408564768.0, + 407659392.0, + 406930816.0, + 414664800.0, + 408869568.0, + 412012128.0, + 417340096.0, + 413850336.0, + 417076608.0, + 409370816.0, + 409628352.0, + 411424096.0, + 412042336.0, + 411818944.0, + 408846720.0, + 407841536.0, + 406151360.0, + 406319488.0, + 409120352.0, + 412615872.0, + 413532736.0, + 419171904.0, + 413866208.0, + 410164864.0, + 422770624.0, + 410631808.0, + 413956256.0, + 419620512.0, + 408846368.0, + 414635328.0, + 406362528.0, + 402708768.0, + 419613536.0, + 404847744.0, + 421550976.0, + 413543200.0, + 406893024.0, + 407650080.0, + 417774560.0, + 410705152.0, + 409986528.0, + 412831264.0, + 412717184.0, + 416972352.0, + 411505920.0, + 411700640.0, + 415884704.0, + 413376000.0, + 413832928.0, + 412735072.0, + 408822528.0, + 412971776.0, + 410920544.0, + 412344832.0, + 405632768.0, + 411159168.0, + 415580256.0, + 413999360.0, + 407473632.0, + 412041280.0, + 410532512.0, + 404566688.0, + 410197056.0, + 412254976.0, + 408523040.0, + 422427584.0, + 410615264.0, + 419350144.0, + 403884512.0, + 407252288.0, + 420443200.0, + 421425568.0, + 408452256.0, + 417916000.0, + 416775968.0, + 419099776.0, + 407547168.0, + 406765472.0, + 415332032.0, + 417052992.0, + 412604256.0, + 414826368.0, + 408118688.0, + 419557792.0, + 411729856.0, + 411672960.0, + 417175904.0, + 410632768.0, + 413532800.0, + 414665024.0, + 418662048.0, + 406574048.0, + 409988768.0, + 417109568.0, + 408678784.0, + 412142272.0, + 416801792.0, + 408941920.0, + 417166912.0, + 412325920.0, + 419871040.0, + 419650368.0, + 406610880.0, + 412993280.0, + 412550848.0, + 405127520.0, + 414458272.0, + 415903712.0, + 410621632.0, + 410580192.0, + 410456000.0, + 419746208.0, + 412518816.0, + 409092480.0, + 413411168.0, + 410308800.0, + 417502400.0, + 419797824.0, + 413532768.0, + 417780960.0, + 409911392.0, + 413185920.0, + 410197600.0, + 412674560.0, + 416234432.0, + 410191456.0, + 420617888.0, + 415609376.0, + 420792032.0, + 418711520.0, + 415262688.0, + 409744544.0, + 413882496.0, + 410282624.0, + 415323712.0, + 411371776.0, + 418940608.0, + 408532544.0, + 408758336.0, + 412250464.0, + 403105312.0, + 410416512.0, + 415844832.0, + 403932672.0, + 405284288.0, + 412304992.0, + 407686560.0, + 420514752.0, + 412744448.0, + 403093440.0, + 420757408.0, + 422156928.0, + 404139104.0, + 402234144.0, + 415565280.0, + 408738848.0, + 407156288.0, + 413337280.0, + 410476544.0, + 415218112.0, + 417073728.0, + 410918624.0, + 413596864.0, + 410684256.0, + 405601152.0, + 414670560.0, + 416290304.0, + 410909664.0, + 418249536.0, + 409838784.0, + 411910048.0, + 411890336.0, + 407964928.0, + 407949504.0, + 407969632.0, + 416002176.0, + 412363360.0, + 407452544.0, + 417762272.0, + 410101504.0, + 423719232.0, + 405305408.0, + 410104960.0, + 424874272.0, + 420910496.0, + 410874304.0, + 413398016.0, + 415916768.0, + 412462880.0, + 413505888.0, + 406121248.0, + 419927584.0, + 413912672.0, + 409356000.0, + 410613056.0, + 411567840.0, + 414483264.0, + 400987968.0, + 419914912.0, + 414681216.0, + 406084352.0, + 414429888.0, + 412849632.0, + 412337824.0, + 416503072.0, + 420020544.0, + 410636576.0, + 410452000.0, + 417279072.0, + 414075232.0, + 419390976.0, + 413008032.0, + 414749856.0, + 414421024.0, + 411885696.0, + 408459392.0, + 425847936.0, + 400233696.0, + 404880160.0, + 418252736.0, + 416729056.0, + 406792704.0, + 413315616.0, + 415429888.0, + 413354752.0, + 414298848.0, + 413956544.0, + 414377280.0, + 410985344.0, + 411758848.0, + 413260128.0, + 413067872.0, + 412349504.0, + 408906624.0, + 418704320.0, + 407485024.0, + 413081152.0, + 418494112.0, + 407292192.0, + 409452544.0, + 415622272.0, + 415080736.0, + 412973536.0, + 413540768.0, + 407776736.0, + 413128544.0, + 412933728.0, + 412351552.0, + 410930048.0, + 415583424.0, + 418761024.0, + 411081440.0, + 419254016.0, + 410607392.0, + 416964448.0, + 412580512.0, + 418322432.0, + 416248864.0, + 414754272.0, + 418429536.0, + 422143040.0, + 416746720.0, + 408958208.0, + 413181408.0, + 411399776.0, + 399912832.0, + 412798848.0, + 409085984.0, + 418165440.0, + 400254528.0, + 413066368.0, + 409962528.0, + 412352096.0, + 414146048.0, + 408423744.0, + 416251552.0, + 408652000.0, + 413273280.0, + 410580384.0, + 412101824.0, + 415320704.0, + 410887616.0, + 420440704.0, + 401429440.0, + 407820384.0, + 417939328.0, + 408921792.0, + 407054592.0, + 415264192.0, + 404144160.0, + 410387296.0, + 419861152.0, + 411793760.0, + 407248736.0, + 416489664.0, + 409148640.0, + 412185472.0, + 411933376.0, + 410221984.0, + 416924800.0, + 416474016.0, + 415423904.0, + 408695008.0, + 418412224.0, + 411769216.0, + 412400160.0, + 411516896.0, + 408460416.0, + 403828544.0, + 413352224.0, + 405221632.0, + 418408672.0, + 413698016.0, + 414702240.0, + 411660704.0, + 411947200.0, + 417931072.0, + 417306720.0, + 416300256.0, + 410703072.0, + 418913088.0, + 410888928.0, + 414792896.0, + 408956864.0, + 409185760.0, + 412513856.0, + 405430176.0, + 417268288.0, + 411270240.0, + 408358976.0, + 408169280.0, + 408885088.0, + 417539776.0, + 400110304.0, + 413166752.0, + 413704768.0, + 418178432.0, + 409899200.0, + 412180032.0, + 408936448.0, + 416983968.0, + 410752128.0, + 406807296.0, + 406977856.0, + 407779328.0, + 412997728.0, + 410356704.0, + 408474208.0, + 409943168.0, + 416296992.0, + 411913344.0, + 412763904.0, + 407826208.0, + 412081312.0, + 410528512.0, + 410612640.0, + 411905664.0, + 404348896.0, + 416405504.0, + 410370304.0, + 413573696.0, + 418568800.0, + 414526176.0, + 406187648.0, + 409909088.0, + 412512832.0, + 412409088.0, + 411042592.0, + 413653536.0, + 414702464.0, + 412562560.0, + 414280224.0, + 415883424.0, + 403675616.0, + 412089248.0, + 408515456.0, + 418335744.0, + 411349888.0, + 404206336.0, + 414782080.0, + 411190048.0, + 405753760.0, + 409812160.0, + 413012512.0, + 413965888.0, + 416909696.0, + 414205504.0, + 406583456.0, + 403910592.0, + 417990240.0, + 404456896.0, + 417939296.0, + 405434496.0, + 412307264.0, + 416589504.0, + 414508448.0, + 413783296.0, + 407825792.0, + 411619104.0, + 409458336.0, + 402773504.0, + 417758560.0, + 413692704.0, + 409094112.0, + 418525408.0, + 413656000.0, + 403587776.0, + 416889760.0, + 409511328.0, + 413061216.0, + 417074688.0, + 401520640.0, + 418245664.0, + 409211136.0, + 416336512.0, + 416596512.0, + 413691360.0, + 416336640.0, + 408581920.0, + 418484608.0, + 410611744.0, + 406622592.0, + 414445952.0, + 417665696.0, + 412304576.0, + 410998880.0, + 413205824.0, + 418866144.0, + 417385056.0, + 411238240.0, + 410852224.0, + 417827200.0, + 408697696.0, + 412004608.0, + 417878144.0, + 416696256.0, + 400275040.0, + 416025568.0, + 415134720.0, + 411819584.0, + 420903648.0, + 416375392.0, + 407875744.0, + 414635808.0, + 413061056.0, + 414031392.0, + 418138784.0, + 407766528.0, + 419056768.0, + 414834624.0, + 405367904.0, + 411640864.0, + 420512544.0, + 410596736.0, + 412505184.0, + 411529280.0, + 418171264.0, + 414528352.0, + 410746144.0, + 401523232.0, + 411170336.0, + 406806880.0, + 403549920.0, + 399703296.0, + 413465984.0, + 409570048.0, + 406891296.0, + 414745920.0, + 409857088.0, + 412629888.0, + 415331616.0, + 415388640.0, + 411000064.0, + 411473952.0, + 413842240.0, + 412345888.0, + 417958240.0, + 399448416.0, + 415723968.0, + 414086400.0, + 409938144.0, + 414793216.0, + 410372256.0, + 409621024.0, + 408433472.0, + 410472672.0, + 403508160.0, + 411948000.0, + 409381472.0, + 410839488.0, + 414824512.0, + 413173664.0, + 422487232.0, + 408493280.0, + 418438336.0, + 404510976.0, + 406437024.0, + 419742944.0, + 409776224.0, + 414145856.0, + 415367104.0, + 410615616.0, + 409414368.0, + 413264960.0, + 408429600.0, + 413213280.0, + 410542176.0, + 412621280.0, + 417195008.0, + 415857344.0, + 412075808.0, + 407025024.0, + 416864384.0, + 406006240.0, + 410357408.0, + 410466144.0, + 413489984.0, + 422346496.0, + 408409664.0, + 404822848.0, + 413623104.0, + 417135488.0, + 413184576.0, + 415751392.0, + 414974912.0, + 422248032.0, + 408304736.0, + 412700896.0, + 406231424.0, + 422602336.0, + 422375168.0, + 401396256.0, + 413774112.0, + 408714752.0, + 409816096.0, + 408833344.0, + 409475104.0, + 409888160.0, + 409251872.0, + 408407936.0, + 409487616.0, + 411059552.0, + 408933120.0, + 413142752.0, + 415504000.0, + 406859872.0, + 400262400.0, + 416990816.0, + 407815424.0, + 405070304.0, + 414449760.0, + 407524864.0, + 412588704.0, + 415973984.0, + 405801504.0, + 417083072.0, + 405406432.0, + 417092320.0, + 419425408.0, + 398769120.0, + 409619936.0, + 419184544.0, + 418183296.0, + 413439584.0, + 408257088.0, + 408395104.0, + 409987712.0, + 413147040.0, + 411692384.0, + 416098912.0, + 410718400.0, + 417983104.0, + 416508768.0, + 411693632.0, + 413714688.0, + 409650240.0, + 410810272.0, + 409166656.0, + 418381344.0, + 415022944.0, + 416013760.0, + 413185440.0, + 409006368.0, + 408300224.0, + 410016480.0, + 416380480.0, + 411470080.0, + 414281280.0, + 408139840.0, + 417026752.0, + 424993600.0, + 418707648.0, + 404901312.0, + 409670880.0, + 415935936.0, + 408295520.0, + 420807488.0, + 405990656.0, + 411857184.0, + 403794464.0, + 416856416.0, + 408281728.0, + 418706528.0, + 407098752.0, + 408099584.0, + 422021472.0, + 414068448.0, + 405964672.0, + 406380320.0, + 409431776.0, + 416689632.0, + 409117472.0, + 408712608.0, + 409188352.0, + 418025472.0, + 408787520.0, + 417809440.0, + 410713856.0, + 410838976.0, + 404538208.0, + 410644128.0, + 408829888.0, + 406812864.0, + 421082848.0, + 405078272.0, + 409454784.0, + 406151840.0, + 414860896.0, + 404874080.0, + 418170496.0, + 415090176.0, + 413429856.0, + 414018592.0, + 417080832.0, + 416350976.0, + 408085024.0, + 415680160.0, + 410764288.0, + 416525824.0, + 415515488.0, + 412741376.0, + 412186976.0, + 415023296.0, + 401767872.0, + 408590400.0, + 410976576.0, + 412373984.0, + 413890976.0, + 413547936.0, + 413189408.0, + 409986752.0, + 410224992.0, + 401877792.0, + 408283648.0, + 411967040.0, + 406617024.0, + 409350912.0, + 417277568.0, + 404634848.0, + 414047360.0, + 408804224.0, + 415608000.0, + 410062016.0, + 417742560.0, + 416662336.0, + 406339264.0, + 414942208.0, + 412868608.0, + 407392064.0, + 413066528.0, + 415261536.0, + 414303040.0, + 409643072.0, + 408382400.0, + 412263328.0, + 408197632.0, + 408900128.0, + 414820128.0, + 409075200.0, + 411732768.0, + 414604608.0, + 409029472.0, + 419163104.0, + 416645216.0, + 402355488.0, + 416218432.0, + 413576480.0, + 416073152.0, + 414948928.0, + 402899360.0, + 409368416.0, + 414215712.0, + 409511872.0, + 416543392.0, + 405668096.0, + 414999040.0, + 411480608.0, + 417967744.0, + 406704608.0, + 410216352.0, + 418870528.0, + 411148000.0, + 404389440.0, + 414091712.0, + 404349600.0, + 411022048.0, + 410273760.0, + 408304032.0, + 416404640.0, + 414859328.0, + 413521152.0, + 409438240.0, + 411023776.0, + 415843808.0, + 420726848.0, + 418109856.0, + 415636768.0, + 410362688.0, + 414244832.0, + 408885056.0, + 414116288.0, + 411190912.0, + 412045856.0, + 414100352.0, + 408663040.0, + 416548992.0, + 408255072.0, + 410600576.0, + 418523008.0, + 405684992.0, + 407968256.0, + 424508736.0, + 408812800.0, + 417322016.0, + 409140704.0, + 410040416.0, + 419333984.0, + 414006144.0, + 412334592.0, + 409420672.0, + 417956064.0, + 415071200.0, + 413162592.0, + 408815072.0, + 414430464.0, + 412782176.0, + 423251232.0, + 413873696.0, + 409398336.0, + 421932320.0, + 416800352.0, + 414005952.0, + 410387072.0, + 400667680.0, + 410455936.0, + 410716480.0, + 412333536.0, + 409420128.0, + 417956544.0, + 415071584.0, + 413163072.0, + 408814528.0, + 414430720.0, + 412782368.0, + 423250528.0, + 413873280.0, + 409398144.0, + 421933888.0, + 416800608.0, + 414960064.0, + 411043040.0, + 416053696.0, + 412307296.0, + 406388960.0, + 410268512.0, + 414598272.0, + 411614656.0, + 409754944.0, + 414264000.0, + 404840576.0, + 411062368.0, + 404831232.0, + 410469312.0, + 409517952.0, + 412259776.0, + 415050816.0, + 408245568.0, + 415958720.0, + 412945088.0, + 410110656.0, + 412552160.0, + 410075424.0, + 406095648.0, + 412135808.0, + 408065856.0, + 412062496.0, + 420191392.0, + 410822912.0, + 413143296.0, + 415380320.0, + 417372288.0, + 416036800.0, + 406144064.0, + 415809440.0, + 413041184.0, + 415098464.0, + 408788608.0, + 411995072.0, + 419606432.0, + 407992160.0, + 407718688.0, + 406517632.0, + 410663232.0, + 413921824.0, + 410626336.0, + 412333888.0, + 407286336.0, + 412857472.0, + 412953568.0, + 416187744.0, + 408670496.0, + 410816736.0, + 410832320.0, + 416285056.0, + 414148096.0, + 415671680.0, + 416401472.0, + 412892800.0, + 410457568.0, + 417862816.0, + 408737408.0, + 414763840.0, + 406149536.0, + 408431296.0, + 404359424.0, + 412105440.0, + 416662720.0, + 403636864.0, + 410578208.0, + 408686784.0, + 407738848.0, + 415004192.0, + 411451360.0, + 411308224.0, + 415067328.0, + 407297920.0, + 416666208.0, + 411425760.0, + 414241088.0, + 410561920.0, + 413198336.0, + 408375040.0, + 414440000.0, + 402914720.0, + 406725216.0, + 412218432.0, + 412333824.0, + 409421280.0, + 417955552.0, + 415071680.0, + 413163104.0, + 408814464.0, + 414430784.0, + 412781856.0, + 423251040.0, + 413873792.0, + 409399872.0, + 421932416.0, + 416800576.0, + 414960032.0, + 411042464.0, + 416054080.0, + 412306368.0, + 406388608.0, + 410268128.0, + 414597280.0, + 411612736.0, + 408295104.0, + 414462272.0, + 417366784.0, + 411096192.0, + 412285920.0, + 406202240.0, + 407254496.0, + 412605824.0, + 403345856.0, + 406529920.0, + 413622688.0, + 415196064.0, + 412086176.0, + 410344992.0, + 408565760.0, + 407707584.0, + 406999168.0, + 408540576.0, + 408720480.0, + 408075552.0, + 420701632.0, + 413992352.0, + 409516032.0, + 406258496.0, + 419734592.0, + 415636032.0, + 413339936.0, + 414134336.0, + 408552352.0, + 420962624.0, + 412519552.0, + 414985376.0, + 409112800.0, + 410114080.0, + 412866208.0, + 404519328.0, + 408306176.0, + 419277504.0, + 410477568.0, + 418033280.0, + 412887840.0, + 405576096.0, + 410093152.0, + 405674016.0, + 404280832.0, + 406234976.0, + 409424800.0, + 412385952.0, + 408543712.0, + 406378976.0, + 419656224.0, + 408405952.0, + 415772640.0, + 412971200.0, + 418634976.0, + 411540544.0, + 410815712.0, + 411672384.0, + 419577536.0, + 401775584.0, + 416125920.0, + 412564608.0, + 406396832.0, + 419172992.0, + 410975616.0, + 419229696.0, + 406012096.0, + 412721120.0, + 408335744.0, + 410184192.0, + 407970400.0, + 403651584.0, + 417332704.0, + 406419200.0, + 406705536.0, + 419962176.0, + 415639200.0, + 407573184.0, + 417041280.0, + 418201280.0, + 418428288.0, + 413459200.0, + 417342336.0, + 421775392.0, + 409215936.0, + 411485760.0, + 414967680.0, + 411455360.0, + 410077248.0, + 407133472.0, + 414610656.0, + 412223904.0, + 412128000.0, + 417865952.0, + 411240128.0, + 409370656.0, + 412870144.0, + 408209440.0, + 407686720.0, + 415734528.0, + 410805984.0, + 418054432.0, + 405390752.0, + 411940864.0, + 412018496.0, + 410426176.0, + 415427104.0, + 409086784.0, + 412518464.0, + 416869440.0, + 408008384.0, + 408546624.0, + 409969984.0, + 409345536.0, + 405880288.0, + 413686688.0, + 412068704.0, + 424414560.0, + 402884288.0, + 426367424.0, + 412332352.0, + 409420608.0, + 417955968.0, + 415070688.0, + 413162752.0, + 408815040.0, + 414430912.0, + 412783104.0, + 423251232.0, + 413872416.0, + 409399072.0, + 421932480.0, + 416800608.0, + 414960832.0, + 411042912.0, + 416052992.0, + 412306720.0, + 406389024.0, + 410268224.0, + 414599648.0, + 411613472.0, + 409754464.0, + 414264256.0, + 404839328.0, + 411061280.0, + 404830528.0, + 410469632.0, + 409517216.0, + 412260704.0, + 415051104.0, + 408246304.0, + 415958304.0, + 412944992.0, + 410110112.0, + 412552736.0, + 410074880.0, + 406096256.0, + 412135648.0, + 408065024.0, + 412060960.0, + 420191872.0, + 410823136.0, + 413143744.0, + 415381248.0, + 417372224.0, + 416035904.0, + 406144000.0, + 415809376.0, + 413040672.0, + 415096640.0, + 408788352.0, + 411994240.0, + 419606048.0, + 407992384.0, + 407720096.0, + 406516192.0, + 410663584.0, + 413923008.0, + 410626880.0, + 412333312.0, + 407287040.0, + 412855872.0, + 412953696.0, + 416186784.0, + 408670272.0, + 410817664.0, + 410832480.0, + 416285024.0, + 414148864.0, + 415670656.0, + 416400928.0, + 412892672.0, + 410457472.0, + 417862656.0, + 408740128.0, + 414764544.0, + 406150304.0, + 408434368.0, + 404361312.0, + 412107424.0, + 416664416.0, + 403638496.0, + 410577728.0, + 408685888.0, + 407741504.0, + 415004640.0, + 411453056.0, + 411308192.0, + 415068416.0, + 407297920.0, + 416666816.0, + 411425984.0, + 414243264.0, + 410560576.0, + 413197600.0, + 408374976.0, + 414438720.0, + 402913856.0, + 406726240.0, + 412217376.0, + 409593184.0, + 416310208.0, + 412989696.0, + 415405952.0, + 412404096.0, + 405132032.0, + 413649344.0, + 410179456.0, + 411101632.0, + 417092896.0, + 415317152.0, + 414881536.0, + 413145472.0, + 411031744.0, + 410585024.0, + 415829120.0, + 407160768.0, + 408316832.0, + 413392928.0, + 422922304.0, + 407847264.0, + 414778048.0, + 406403968.0, + 411318240.0, + 417926656.0, + 411127392.0, + 410436608.0, + 405836544.0, + 416875072.0, + 408596352.0, + 420724736.0, + 410561056.0, + 406772576.0, + 411313696.0, + 410316672.0, + 411800672.0, + 414975584.0, + 410908608.0, + 402847744.0, + 415278624.0, + 411141760.0, + 411254048.0, + 419268960.0, + 404416832.0, + 414470112.0, + 406740992.0, + 413413248.0, + 409985792.0, + 409414560.0, + 414224896.0, + 410853600.0, + 411807456.0, + 410688000.0, + 415543008.0, + 413525568.0, + 412613504.0, + 419237952.0, + 415279904.0, + 402528928.0, + 400186944.0, + 419198688.0, + 402603456.0, + 413331072.0, + 405925888.0, + 419994272.0, + 412333088.0, + 411687040.0, + 406823904.0, + 400992736.0, + 412719648.0, + 413201152.0, + 405933184.0, + 417393216.0, + 418254144.0, + 410101344.0, + 414142720.0, + 418019616.0, + 399554336.0, + 408644256.0, + 400246624.0, + 414155328.0, + 408550272.0, + 419760512.0, + 417298816.0, + 412370784.0, + 417099648.0, + 409352416.0, + 412594432.0, + 411392928.0, + 414576800.0, + 414586048.0, + 414782528.0, + 409057664.0, + 415109056.0, + 411199104.0, + 412653664.0, + 412627008.0, + 407838048.0, + 407430880.0, + 406327904.0, + 413594976.0, + 410473088.0, + 413426016.0, + 411759328.0, + 415309632.0, + 418306752.0, + 410454976.0, + 414280256.0, + 408103904.0, + 409534496.0, + 410438720.0, + 413541440.0, + 420091712.0, + 415800704.0, + 418100384.0, + 414012928.0, + 411054496.0, + 409962272.0, + 407187520.0, + 410066592.0, + 407791200.0, + 418949696.0, + 402407872.0, + 410174944.0, + 420186208.0, + 411943712.0, + 413347712.0, + 410057984.0, + 415427744.0, + 412076544.0, + 417233568.0, + 418581472.0, + 409409632.0, + 413002272.0, + 418524032.0, + 413671904.0, + 409373600.0, + 415921600.0, + 420740320.0, + 408673344.0, + 417167360.0, + 415787264.0, + 411862272.0, + 418703520.0, + 405013344.0, + 414699040.0, + 420402368.0, + 405552128.0, + 415035360.0, + 405473056.0, + 405967136.0, + 412309152.0, + 411652032.0, + 397773056.0, + 411081856.0, + 418675712.0, + 408187680.0, + 409751104.0, + 405371040.0, + 416268480.0, + 410736000.0, + 408020000.0, + 416843104.0, + 414388288.0, + 406380672.0, + 416985696.0, + 419766272.0, + 405959520.0, + 409991584.0, + 409848096.0, + 410249568.0, + 397445888.0, + 410630560.0, + 416447264.0, + 410464640.0, + 412293184.0, + 404117728.0, + 415202464.0, + 410438720.0, + 408767520.0, + 409015616.0, + 419974016.0, + 408899456.0, + 417250784.0, + 417589472.0, + 407038272.0, + 410672352.0, + 411449056.0, + 405278528.0, + 408102336.0, + 410093280.0, + 412896768.0, + 409913344.0, + 412756224.0, + 409367392.0, + 421088064.0, + 413039744.0, + 407730176.0, + 406522240.0, + 408859456.0, + 411516544.0, + 400306400.0, + 412775552.0, + 413981024.0, + 413943360.0, + 415361728.0, + 411286880.0, + 406578432.0, + 409504800.0, + 416983520.0, + 411709376.0, + 411107776.0, + 417143296.0, + 411754048.0, + 416764768.0, + 409507232.0, + 403772224.0, + 410465504.0, + 418273152.0, + 404107648.0, + 415542528.0, + 409330784.0, + 413391520.0, + 415793696.0, + 418099104.0, + 411934400.0, + 411521536.0, + 411593632.0, + 411388736.0, + 407068224.0, + 408093696.0, + 403867776.0, + 409259392.0, + 408781184.0, + 411940320.0, + 410667296.0, + 417449312.0, + 412331392.0, + 413866432.0, + 413272960.0, + 411865344.0, + 411812320.0, + 415565376.0, + 409462784.0, + 411160480.0, + 416418496.0, + 406518336.0, + 416268800.0, + 408092160.0, + 401766432.0, + 419639840.0, + 410718944.0, + 408926048.0, + 417168512.0, + 412317408.0, + 411438624.0, + 410338432.0, + 406784000.0, + 413270304.0, + 412651744.0, + 413761696.0, + 407144000.0, + 398757760.0, + 412297504.0, + 410139488.0, + 411136352.0, + 413750688.0, + 406022208.0, + 416577056.0, + 414127904.0, + 408137536.0, + 410128096.0, + 418443968.0, + 412141248.0, + 414607392.0, + 414087744.0, + 414201952.0, + 410218944.0, + 412134272.0, + 404243008.0, + 411880224.0, + 417923040.0, + 412157152.0, + 409931264.0, + 411632736.0, + 411707296.0, + 419498848.0, + 420366240.0, + 410800384.0, + 412836640.0, + 413333472.0, + 410439840.0, + 412670464.0, + 411889152.0, + 411074144.0, + 412865184.0, + 419942048.0, + 420019520.0, + 414496608.0, + 424268064.0, + 408957312.0, + 414585600.0, + 407925216.0, + 405087968.0, + 412011264.0, + 410478048.0, + 412896864.0, + 413307104.0, + 414115552.0, + 403227520.0, + 405560896.0, + 415158784.0, + 410759744.0, + 411851424.0, + 415566080.0, + 417507712.0, + 413171392.0, + 419198080.0, + 409451168.0, + 417564256.0, + 405871776.0, + 416142944.0, + 410680192.0, + 413849408.0, + 411941056.0, + 417300768.0, + 406647648.0, + 414399168.0, + 412662080.0, + 414233344.0, + 414039232.0, + 405511296.0, + 417026560.0, + 407493376.0, + 418037792.0, + 419356160.0, + 416813216.0, + 414660704.0, + 414270688.0, + 409459648.0, + 415086176.0, + 404081536.0, + 406716512.0, + 408404160.0, + 406878560.0, + 412887520.0, + 410712384.0, + 414372000.0, + 422359616.0, + 404960736.0, + 413646528.0, + 420218336.0, + 409225024.0, + 417984448.0, + 413833280.0, + 407472128.0, + 414571264.0, + 411421600.0, + 416557984.0, + 405020928.0, + 417161728.0, + 407989088.0, + 410008704.0, + 415822784.0, + 397264352.0, + 416360128.0, + 415021120.0, + 410166080.0, + 419657312.0, + 416481344.0, + 409199136.0, + 409173376.0, + 408719904.0, + 402699360.0, + 413787072.0, + 415104608.0, + 410347680.0, + 416941952.0, + 419532416.0, + 409054848.0, + 413920096.0, + 414353344.0, + 403808288.0, + 404103328.0, + 414294368.0, + 406022400.0, + 413980512.0, + 404513792.0, + 408380256.0, + 413233312.0, + 413223264.0, + 413232576.0, + 407129600.0, + 407573600.0, + 409252224.0, + 406044480.0, + 411344128.0, + 409123328.0, + 415280256.0, + 417513440.0, + 406856032.0, + 416962592.0, + 411770048.0, + 411990912.0, + 409274112.0, + 411866688.0, + 411793280.0, + 412453056.0, + 403054848.0, + 416962880.0, + 409884544.0, + 408514560.0, + 416725792.0, + 405316736.0, + 416100480.0, + 411469792.0, + 405906208.0, + 417704096.0, + 404116544.0, + 409684928.0, + 409256736.0, + 409281728.0, + 402020768.0, + 402074272.0, + 412050336.0, + 412262176.0, + 411720864.0, + 413394336.0, + 409789696.0, + 414700576.0, + 419364960.0, + 411055648.0, + 409317088.0, + 405888544.0, + 414987008.0, + 413221088.0, + 409427616.0, + 421257632.0, + 407055040.0, + 415942976.0, + 411933920.0, + 406975296.0, + 408777184.0, + 410383040.0, + 416171104.0, + 411157216.0, + 413661216.0, + 415019840.0, + 407880480.0, + 409953920.0, + 413232992.0, + 406559872.0, + 415108480.0, + 420771264.0, + 403820608.0, + 411093632.0, + 408571072.0, + 411816064.0, + 428174016.0, + 412835168.0, + 409151328.0, + 412774336.0, + 414327680.0, + 408718240.0, + 409906720.0, + 414947872.0, + 412527616.0, + 419480512.0, + 417008320.0, + 409319008.0, + 412079296.0, + 417816224.0, + 408929600.0, + 411617856.0, + 409836416.0, + 420434112.0, + 416234656.0, + 415962976.0, + 411586560.0, + 412288288.0, + 411526976.0, + 411715584.0, + 411053312.0, + 408447328.0, + 404235200.0, + 415558400.0, + 415507200.0, + 406669344.0, + 414048128.0, + 420099168.0, + 417598912.0, + 422765248.0, + 411750880.0, + 410144448.0, + 412728064.0, + 410105696.0, + 411087424.0, + 412000480.0, + 411394240.0, + 408583776.0, + 415410720.0, + 418687104.0, + 413001824.0, + 407414048.0, + 409516160.0, + 411923616.0, + 410166016.0, + 418181312.0, + 409344192.0, + 416763680.0, + 414939104.0, + 412936800.0, + 410700128.0, + 409537632.0, + 410188832.0, + 414002848.0, + 418110496.0, + 402172992.0, + 412341504.0, + 418667296.0, + 403326464.0, + 410703168.0, + 413742592.0, + 418261056.0, + 415183584.0, + 408002496.0, + 407256992.0, + 418691424.0, + 409610944.0, + 409124960.0, + 421610240.0, + 409020288.0, + 407234976.0, + 408767648.0, + 413340096.0, + 410958048.0, + 416810624.0, + 411687840.0, + 408020512.0, + 413992288.0, + 407717920.0, + 418078432.0, + 409209888.0, + 408614656.0, + 408477312.0, + 414019456.0, + 415234976.0, + 411960384.0, + 408796128.0, + 416215520.0, + 409486816.0, + 419772768.0, + 408267360.0, + 408882880.0, + 418252192.0, + 414112352.0, + 422162848.0, + 415268192.0, + 403428544.0, + 420774336.0, + 406468864.0, + 421077632.0, + 428270144.0, + 412467488.0, + 413505152.0, + 413549632.0, + 417397472.0, + 415305600.0, + 413451328.0, + 415158368.0, + 423987296.0, + 413324288.0, + 415818240.0, + 416950176.0, + 416349664.0, + 406019776.0, + 402688960.0, + 412278976.0, + 411485056.0, + 416906624.0, + 405126752.0, + 404135136.0, + 420790816.0, + 413249600.0, + 411586624.0, + 411436192.0, + 410582048.0, + 408570944.0, + 410722592.0, + 413051776.0, + 411314208.0, + 406731296.0, + 417484128.0, + 412573248.0, + 410448416.0, + 419529632.0, + 405180672.0, + 424109728.0, + 411415424.0, + 413732256.0, + 414075456.0, + 416771648.0, + 414102240.0, + 413529600.0, + 404785920.0, + 409181664.0, + 413906080.0, + 408658848.0, + 414729216.0, + 408554848.0, + 419915232.0, + 414633376.0, + 411829344.0, + 405695264.0, + 413557728.0, + 418526208.0, + 415096672.0, + 424292576.0, + 417733536.0, + 418604704.0, + 411442112.0, + 411265728.0, + 412027840.0, + 426011040.0, + 408536192.0, + 409523744.0, + 412519104.0, + 421151968.0, + 413040896.0, + 411303808.0, + 407286880.0, + 410922688.0, + 410816992.0, + 404551648.0, + 410934336.0, + 416845888.0, + 419800512.0, + 415870752.0, + 404941600.0, + 403836512.0, + 413734656.0, + 407222944.0, + 415828832.0, + 408647296.0, + 411327328.0, + 415406624.0, + 419435584.0, + 411225152.0, + 417874656.0, + 408762400.0, + 415056064.0, + 409725664.0, + 410317408.0, + 407079520.0, + 412851168.0, + 404216000.0, + 409463904.0, + 412213408.0, + 407073792.0, + 409818592.0, + 419280800.0, + 417554528.0, + 408209600.0, + 405972256.0, + 416959936.0, + 411566080.0, + 413864288.0, + 417084224.0, + 407670016.0, + 413385312.0, + 407325632.0, + 419148608.0, + 418247776.0, + 408901248.0, + 409249600.0, + 413336608.0, + 408365728.0, + 409470528.0, + 415449728.0, + 415238656.0, + 413695424.0, + 414744096.0, + 414077344.0, + 411156800.0, + 420996704.0, + 410633536.0, + 411545568.0, + 410693760.0, + 420488256.0, + 403753568.0, + 417051264.0, + 406674688.0, + 412248896.0, + 410862752.0, + 416118016.0, + 406218176.0, + 414699232.0, + 411616128.0, + 412067200.0, + 412450560.0, + 411369536.0, + 415937952.0, + 415274752.0, + 406674144.0, + 406815392.0, + 410921888.0, + 404419104.0, + 411259520.0, + 413207744.0, + 404282880.0, + 413085600.0, + 404968000.0, + 420965824.0, + 407557920.0, + 407005472.0, + 419038464.0, + 407394048.0, + 418149056.0, + 411156800.0, + 409444384.0, + 408961280.0, + 413993856.0, + 403310784.0, + 413584640.0, + 403683104.0, + 409338912.0, + 419388928.0, + 408335584.0, + 415915296.0, + 409688480.0, + 412441760.0, + 418482464.0, + 401084512.0, + 409711584.0, + 404632768.0, + 408691488.0, + 413791296.0, + 407553984.0, + 414567104.0, + 415310112.0, + 414574400.0, + 418404064.0, + 407714976.0, + 407671136.0, + 407571616.0, + 414897344.0, + 406000768.0, + 411459680.0, + 408501408.0, + 414923872.0, + 419512832.0, + 420328416.0, + 409924064.0, + 415170848.0, + 413594432.0, + 412716832.0, + 414456288.0, + 412364800.0, + 409342432.0, + 415079936.0, + 418535040.0, + 410023008.0, + 420469504.0, + 413501888.0, + 419594912.0, + 411149248.0, + 408000224.0, + 413901856.0, + 415041056.0, + 410592320.0, + 415970464.0, + 415638016.0, + 415852960.0, + 399083488.0, + 401402240.0, + 412633376.0, + 405406304.0, + 410640768.0, + 411674496.0, + 409171904.0, + 411352032.0, + 409339680.0, + 422185920.0, + 408538464.0, + 412623104.0, + 417310048.0, + 409934816.0, + 416477760.0, + 421674688.0, + 420129632.0, + 415626144.0, + 413892192.0, + 417549280.0, + 411884928.0, + 415794592.0, + 414585408.0, + 416051520.0, + 407581632.0, + 413066432.0, + 404276800.0, + 415900128.0, + 411388224.0, + 415099648.0, + 415149504.0, + 407609024.0, + 418693792.0, + 404404096.0, + 412497984.0, + 423197152.0, + 408897408.0, + 416664224.0, + 408850912.0, + 416506592.0, + 411212800.0, + 414671264.0, + 407007872.0, + 415510624.0, + 418816544.0, + 412434432.0, + 411318688.0, + 413666496.0, + 412977760.0, + 412893888.0, + 420609088.0, + 409751008.0, + 416614688.0, + 407548736.0, + 403942496.0, + 405373216.0, + 407348128.0, + 409148064.0, + 418983808.0, + 412971008.0, + 409399776.0, + 407666528.0, + 412713760.0, + 415746976.0, + 411044800.0, + 409970112.0, + 411167104.0, + 409869920.0, + 418025152.0, + 408120256.0, + 409303392.0, + 409807520.0, + 410351392.0, + 408406528.0, + 403326656.0, + 406561824.0, + 412858560.0, + 417861088.0, + 411190528.0, + 409534048.0, + 413665792.0, + 412734784.0, + 412345312.0, + 408027232.0, + 417489312.0, + 410693344.0, + 418244800.0, + 412187040.0, + 416294528.0, + 407152256.0, + 410340160.0, + 410764640.0, + 411476448.0, + 408448192.0, + 414655808.0, + 419568928.0, + 406367680.0, + 412313952.0, + 415858848.0, + 412070496.0, + 408672160.0, + 414939072.0, + 413201248.0, + 409922400.0, + 412048800.0, + 410020224.0, + 410075840.0, + 412940000.0, + 414263168.0, + 412676832.0, + 407743520.0, + 420247552.0, + 411710720.0, + 415620000.0, + 414421344.0, + 410101600.0, + 408988352.0, + 416256096.0, + 402490112.0, + 408745888.0, + 422249504.0, + 408895968.0, + 413087200.0, + 414572704.0, + 411535168.0, + 413508384.0, + 402569472.0, + 408889344.0, + 418075136.0, + 410048768.0, + 416121952.0, + 405886240.0, + 413847680.0, + 407409408.0, + 411192544.0, + 417178944.0, + 416621952.0, + 413747104.0, + 417660928.0, + 412243200.0, + 416387584.0, + 411064096.0, + 418697920.0, + 424831648.0, + 413290944.0, + 413815904.0, + 406725184.0, + 419155872.0, + 404200000.0, + 412809440.0, + 413000960.0, + 411457216.0, + 410462880.0, + 410847232.0, + 408533984.0, + 404060992.0, + 417029408.0, + 414560768.0, + 407073344.0, + 412733536.0, + 408379552.0, + 419107040.0, + 412535808.0, + 405930624.0, + 414432224.0, + 413327968.0, + 405766144.0, + 409937984.0, + 416881888.0, + 407882944.0, + 413686432.0, + 406863168.0, + 416222464.0, + 408207200.0, + 423153472.0, + 406585056.0, + 409257888.0, + 411868384.0, + 412083264.0, + 414864128.0, + 414590144.0, + 405081696.0, + 415446848.0, + 414018176.0, + 413303008.0, + 406314944.0, + 410501280.0, + 416356384.0, + 414040992.0, + 410520576.0, + 410577600.0, + 410110720.0, + 420064576.0, + 414459744.0, + 408932160.0, + 404067104.0, + 403946336.0, + 417242976.0, + 406385824.0, + 411881312.0, + 412223808.0, + 409743360.0, + 416056736.0, + 408751584.0, + 413151776.0, + 414881408.0, + 409417856.0, + 415199200.0, + 421822720.0, + 406805536.0, + 411158624.0, + 411038336.0, + 411371968.0, + 414510304.0, + 409683424.0, + 411538048.0, + 411293312.0, + 418505024.0, + 407069632.0, + 418164384.0, + 413494624.0, + 414124096.0, + 412794560.0, + 416333664.0, + 409870912.0, + 416313184.0, + 413283392.0, + 409782848.0, + 419167424.0, + 411709088.0, + 414716992.0, + 409342944.0, + 409857408.0, + 413854976.0, + 408939488.0, + 427380896.0, + 405747040.0, + 412877824.0, + 415042368.0, + 415022336.0, + 415259520.0, + 416400896.0, + 403938688.0, + 414416544.0, + 408415072.0, + 404913056.0, + 408419840.0, + 407509696.0, + 408921888.0, + 415695872.0, + 408726336.0, + 411368608.0, + 415452928.0, + 418441184.0, + 415481184.0, + 421594144.0, + 416409600.0, + 408116480.0, + 411919296.0, + 413586688.0, + 413259648.0, + 413050400.0, + 412055392.0, + 412826016.0, + 409402208.0, + 415799104.0, + 409565120.0, + 409883936.0, + 411809152.0, + 416490720.0, + 413156224.0, + 411161728.0, + 411398816.0, + 415444864.0, + 419458080.0, + 405163808.0, + 417201024.0, + 413085888.0, + 422484640.0, + 417028032.0, + 408711840.0, + 407249184.0, + 410171840.0, + 418905568.0, + 423057568.0, + 410384928.0, + 408250816.0, + 416966944.0, + 413731456.0, + 412908544.0, + 416137920.0, + 404774080.0, + 417087712.0, + 403045440.0, + 410037088.0, + 413323264.0, + 409782688.0, + 419168192.0, + 411709184.0, + 414717056.0, + 409342944.0, + 409857088.0, + 413853920.0, + 408938976.0, + 427380480.0, + 405745184.0, + 412877984.0, + 415042144.0, + 415023616.0, + 415259424.0, + 416400928.0, + 403938336.0, + 414415936.0, + 408414880.0, + 404913184.0, + 408418944.0, + 407509824.0, + 408923776.0, + 415696128.0, + 408725856.0, + 411368384.0, + 415452064.0, + 418440928.0, + 415481280.0, + 421594176.0, + 416410464.0, + 408116832.0, + 411920000.0, + 413586752.0, + 413260320.0, + 413049472.0, + 412055424.0, + 412826560.0, + 409402912.0, + 415799904.0, + 409565824.0, + 409883904.0, + 411808480.0, + 416491456.0, + 413156640.0, + 411163296.0, + 411398368.0, + 415446176.0, + 419458592.0, + 405163616.0, + 417200416.0, + 413086080.0, + 422485760.0, + 417029408.0, + 408712224.0, + 407249952.0, + 410169664.0, + 418905344.0, + 423058208.0, + 410385600.0, + 408247872.0, + 416963744.0, + 413728192.0, + 412906944.0, + 416136672.0, + 404769504.0, + 417085280.0, + 403042848.0, + 410035104.0, + 413321216.0, + 416867136.0, + 413173088.0, + 405334112.0, + 412472320.0, + 415194944.0, + 409439616.0, + 413350368.0, + 410201664.0, + 409082784.0, + 412555040.0, + 412189536.0, + 412259840.0, + 408011072.0, + 403736832.0, + 416718752.0, + 404656608.0, + 415319360.0, + 414533184.0, + 423143424.0, + 416456448.0, + 408547680.0, + 407838112.0, + 418401856.0, + 405085184.0, + 412671392.0, + 415452992.0, + 404419936.0, + 413512672.0, + 410620608.0, + 403741440.0, + 409871264.0, + 410435584.0, + 418829952.0, + 407941408.0, + 413339968.0, + 412251168.0, + 415039840.0, + 414166944.0, + 408257120.0, + 412312064.0, + 417625440.0, + 406232224.0, + 409569632.0, + 415952832.0, + 416282304.0, + 412728128.0, + 417127488.0, + 416681792.0, + 412442336.0, + 417915776.0, + 410897824.0, + 410443168.0, + 412096576.0, + 410352160.0, + 417513696.0, + 418094336.0, + 410561184.0, + 414794080.0, + 418340800.0, + 409464672.0, + 407962944.0, + 417527008.0, + 409945536.0, + 413742272.0, + 407055488.0, + 411071520.0, + 413535392.0, + 413026080.0, + 426695840.0, + 405755936.0, + 409803456.0, + 402702208.0, + 409381920.0, + 420295296.0, + 408148960.0, + 407524064.0, + 416752480.0, + 412317312.0, + 407183360.0, + 415490816.0, + 405625600.0, + 416093440.0, + 409883264.0, + 411608928.0, + 405792768.0, + 413779296.0, + 415663840.0, + 409326752.0, + 410384160.0, + 412148960.0, + 411116608.0, + 410583616.0, + 410644224.0, + 411709120.0, + 418371040.0, + 413618400.0, + 424024320.0, + 420999232.0, + 419021696.0, + 408752224.0, + 412612096.0, + 414639648.0, + 411044800.0, + 407760032.0, + 407324128.0, + 410101248.0, + 410610304.0, + 410166592.0, + 410226848.0, + 417601344.0, + 410154240.0, + 415633344.0, + 411953120.0, + 412540160.0, + 416702048.0, + 417102784.0, + 426717472.0, + 410496448.0, + 413774336.0, + 411682272.0, + 409479872.0, + 416407904.0, + 421082848.0, + 410556768.0, + 406629888.0, + 410350048.0, + 415307168.0, + 413522240.0, + 403550880.0, + 421376960.0, + 405186688.0, + 418321568.0, + 418466368.0, + 404490592.0, + 410016128.0, + 406053024.0, + 414175680.0, + 414242912.0, + 414882528.0, + 414529504.0, + 415778880.0, + 422159808.0, + 410270752.0, + 408782528.0, + 410824192.0, + 413070240.0, + 410121696.0, + 413777472.0, + 416295712.0, + 413909344.0, + 418438720.0, + 405393696.0, + 411723904.0, + 418372928.0, + 412801792.0, + 414278240.0, + 416205728.0, + 412894368.0, + 411682080.0, + 421283488.0, + 417175968.0, + 412144896.0, + 415207744.0, + 412947552.0, + 411333920.0, + 415746592.0, + 416873440.0, + 414757312.0, + 408075520.0, + 407757280.0, + 412861472.0, + 413505408.0, + 415010496.0, + 405795808.0, + 420578720.0, + 407982784.0, + 414164864.0, + 415067552.0, + 410654464.0, + 418618560.0, + 410312160.0, + 412042464.0, + 405805984.0, + 406633472.0, + 413807712.0, + 414789568.0, + 415976960.0, + 398154400.0, + 416566752.0, + 417202688.0, + 410834176.0, + 409721088.0, + 412676896.0, + 411763360.0, + 418318400.0, + 412911264.0, + 404585600.0, + 414992800.0, + 409566784.0, + 420918144.0, + 406934560.0, + 415502144.0, + 419220160.0, + 414232480.0, + 416056128.0, + 412057248.0, + 408750304.0, + 417448640.0, + 417483872.0, + 405272160.0, + 414002944.0, + 413475488.0, + 412748128.0, + 416160192.0, + 417418048.0, + 413482304.0, + 410519136.0, + 403782944.0, + 411360384.0, + 415220736.0, + 404434176.0, + 416508352.0, + 412169120.0, + 401651616.0, + 406695104.0, + 413363392.0, + 414902112.0, + 417173696.0, + 412177152.0, + 414389632.0, + 407262976.0, + 412202816.0, + 422671264.0, + 419997888.0, + 403653056.0, + 411229632.0, + 410847392.0, + 406487296.0, + 415415072.0, + 411510592.0, + 412393632.0, + 405321472.0, + 412734304.0, + 416715360.0, + 405623520.0, + 405564992.0, + 409543360.0, + 408135040.0, + 412380128.0, + 414238016.0, + 413230240.0, + 414362848.0, + 404919904.0, + 413887104.0, + 412071808.0, + 406509664.0, + 404890400.0, + 420840672.0, + 419543360.0, + 408540704.0, + 412880032.0, + 415953152.0, + 411657312.0, + 411606912.0, + 411646176.0, + 408148256.0, + 409308032.0, + 410284128.0, + 410640576.0, + 415392064.0, + 409084576.0, + 418902656.0, + 414953280.0, + 414640160.0, + 411663168.0, + 408150720.0, + 414628928.0, + 408316288.0, + 416297312.0, + 414155808.0, + 406869408.0, + 425966048.0, + 414848160.0, + 411601280.0, + 419840960.0, + 410488032.0, + 409195520.0, + 417774400.0, + 408751968.0, + 413544128.0, + 418550656.0, + 409471040.0, + 413158208.0, + 409223424.0, + 411010144.0, + 406960096.0, + 408077088.0, + 413780256.0, + 414168096.0, + 414353504.0, + 406885408.0, + 404241632.0, + 414064160.0, + 409646592.0, + 410281856.0, + 411679968.0, + 416243520.0, + 404785344.0, + 403984416.0, + 404878752.0, + 409183008.0, + 415826848.0, + 415122144.0, + 412185600.0, + 408520192.0, + 421287808.0, + 408672576.0, + 413298944.0, + 413467104.0, + 406984512.0, + 412318848.0, + 412709632.0, + 421537664.0, + 406775008.0, + 404700192.0, + 412582720.0, + 410817536.0, + 412796832.0, + 418861504.0, + 405357600.0, + 412806784.0, + 405746176.0, + 408707232.0, + 412464544.0, + 415678912.0, + 414442560.0, + 409652000.0, + 407475744.0, + 398902720.0, + 408842656.0, + 421491904.0, + 416185408.0, + 411142368.0, + 415594368.0, + 414723456.0, + 413442016.0, + 421615104.0, + 404462144.0, + 412357184.0, + 414613728.0, + 404847072.0, + 413734272.0, + 414247200.0, + 409626048.0, + 405592384.0, + 416373024.0, + 407660896.0, + 405725792.0, + 405698592.0, + 410651744.0, + 414211488.0, + 413706496.0, + 411401984.0, + 412373600.0, + 410624032.0, + 410629056.0, + 408744224.0, + 415665536.0, + 412485792.0, + 406977664.0, + 410130944.0, + 408421408.0, + 409544672.0, + 405554624.0, + 405657792.0, + 407111392.0, + 414962656.0, + 405947744.0, + 409236928.0, + 407208256.0, + 406124192.0, + 421160800.0, + 411457184.0, + 406809056.0, + 414147616.0, + 410097920.0, + 415244192.0, + 413859872.0, + 407559584.0, + 423466048.0, + 409413120.0, + 413979808.0, + 409470400.0, + 408693056.0, + 414448224.0, + 414206496.0, + 409932160.0, + 417578144.0, + 408779904.0, + 413545056.0, + 405554784.0, + 410653600.0, + 417618496.0, + 405065056.0, + 412851072.0, + 412948480.0, + 409216192.0, + 417855424.0, + 405823776.0, + 404151040.0, + 408320128.0, + 409148416.0, + 413846784.0, + 408813664.0, + 418152992.0, + 413817920.0, + 417386208.0, + 412205088.0, + 409163232.0, + 413539584.0, + 414094240.0, + 404732704.0, + 415835872.0, + 418341696.0, + 408911392.0, + 417898816.0, + 418943680.0, + 413356672.0, + 412573088.0, + 412165728.0, + 415440768.0, + 415615136.0, + 409410304.0, + 414407744.0, + 403833824.0, + 405599488.0, + 412193056.0, + 419614560.0, + 418475616.0, + 412749312.0, + 414353248.0, + 403964512.0, + 415875968.0, + 414815488.0, + 406770240.0, + 412814304.0, + 407327424.0, + 409648384.0, + 415934880.0, + 409559648.0, + 417769216.0, + 411861920.0, + 408670208.0, + 409908832.0, + 413190656.0, + 417249632.0, + 419422272.0, + 414544992.0, + 414035904.0, + 412567296.0, + 414525856.0, + 413345728.0, + 413224768.0, + 410348288.0, + 415287584.0, + 413636864.0, + 418653664.0, + 410725536.0, + 408467968.0, + 418469312.0, + 411717440.0, + 415058400.0, + 411068512.0, + 418466912.0, + 426838016.0, + 414877472.0, + 416154048.0, + 418760544.0, + 414722432.0, + 412547968.0, + 413842624.0, + 412536192.0, + 412193568.0, + 408993984.0, + 415939456.0, + 407144384.0, + 420579168.0, + 408979616.0, + 409361728.0, + 412482816.0, + 405211616.0, + 407349280.0, + 416475520.0, + 410697792.0, + 411385952.0, + 408907296.0, + 409212704.0, + 419849440.0, + 405209664.0, + 415689472.0, + 407773920.0, + 404753280.0, + 423845888.0, + 414080320.0, + 410734432.0, + 409974368.0, + 420848864.0, + 405265952.0, + 412001632.0, + 418803008.0, + 410403232.0, + 409923872.0, + 411246336.0, + 407009632.0, + 401001120.0, + 415164128.0, + 411744672.0, + 410635136.0, + 409976128.0, + 410186944.0, + 412817376.0, + 415046912.0, + 407553440.0, + 416752064.0, + 411832896.0, + 413511136.0, + 408357856.0, + 417875232.0, + 409265792.0, + 408991584.0, + 412974752.0, + 409484992.0, + 404348608.0, + 417255840.0, + 415399680.0, + 413680288.0, + 417364096.0, + 410461792.0, + 414346240.0, + 412381280.0, + 417941888.0, + 404608416.0, + 417577696.0, + 411246848.0, + 414256512.0, + 413302624.0, + 412222528.0, + 413853632.0, + 414105664.0, + 410215744.0, + 411992896.0, + 412422176.0, + 410441344.0, + 409220608.0, + 423066816.0, + 408758144.0, + 413956640.0, + 411603456.0, + 411750272.0, + 408924512.0, + 415287776.0, + 413966304.0, + 406181312.0, + 411627104.0, + 404660160.0, + 407257728.0, + 412939264.0, + 410327968.0, + 412852416.0, + 415560576.0, + 408297568.0, + 406727360.0, + 408172992.0, + 404212832.0, + 411568864.0, + 409437984.0, + 411797504.0, + 407477408.0, + 411486720.0, + 415295392.0, + 416135456.0, + 412305120.0, + 409700512.0, + 415905632.0, + 413016800.0, + 410426656.0, + 410999840.0, + 412306880.0, + 409613856.0, + 412883712.0, + 414024480.0, + 406378272.0, + 413402816.0, + 411964736.0, + 409209760.0, + 406782272.0, + 419007392.0, + 410481344.0, + 415320960.0, + 411916384.0, + 413330624.0, + 413738624.0, + 403096352.0, + 410987744.0, + 410248096.0, + 411915552.0, + 408780416.0, + 414472896.0, + 414283552.0, + 421019616.0, + 415122944.0, + 413441728.0, + 417481344.0, + 407879904.0, + 416028384.0, + 411960448.0, + 409839168.0, + 416706880.0, + 415146048.0, + 415824384.0, + 415381920.0, + 414179008.0, + 408640096.0, + 404155264.0, + 404889920.0, + 412660896.0, + 417988512.0, + 413679552.0, + 411348320.0, + 413236256.0, + 410595104.0, + 411361920.0, + 410095104.0, + 416193088.0, + 412658688.0, + 411892416.0, + 419331552.0, + 408420576.0, + 418358912.0, + 414829472.0, + 408023136.0, + 413982720.0, + 407457440.0, + 403236768.0, + 414210208.0, + 412159424.0, + 415586240.0, + 412262912.0, + 418215552.0, + 411634368.0, + 412696480.0, + 410259232.0, + 411108096.0, + 410867968.0, + 412139616.0, + 403073568.0, + 412327520.0, + 404484736.0, + 417144512.0, + 420561088.0, + 412251264.0, + 410655840.0, + 409090784.0, + 408897920.0, + 420830144.0, + 412937792.0, + 408788672.0, + 412919232.0, + 417588640.0, + 409970080.0 + ] + }, + "mem-allocated-bytes": { + "start_step": 0, + "end_step": 27308, + "step_interval": 5, + "values": [ + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17447112704.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17449054208.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448914944.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448853504.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448747008.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448620032.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17449050112.0, + 17448865792.0, + 17448013824.0, + 17448013824.0, + 17448030208.0, + 17448013824.0, + 17448251392.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17449041920.0, + 17448013824.0, + 17448759296.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448505344.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17448013824.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447915520.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447075840.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447034880.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447206912.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447362560.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448030208.0, + 17446985728.0, + 17446985728.0, + 17447190528.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447145472.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447612416.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446983680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447411712.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447419904.0, + 17446985728.0, + 17447387136.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447264256.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447116800.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447251968.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447542784.0, + 17446985728.0, + 17447632896.0, + 17446985728.0, + 17446985728.0, + 17447477248.0, + 17447378944.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447165952.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447133184.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447550976.0, + 17446985728.0, + 17447227392.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447043072.0, + 17446985728.0, + 17446983680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447514112.0, + 17447346176.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447526400.0, + 17446985728.0, + 17447108608.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447264256.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447526400.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447374848.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447968768.0, + 17446985728.0, + 17447108608.0, + 17446985728.0, + 17447723008.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447264256.0, + 17446985728.0, + 17447227392.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447895040.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447673856.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447370752.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447346176.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447329792.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17448046592.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17448206336.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447354368.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17447174144.0, + 17446983680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447018496.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447448576.0, + 17447632896.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447215104.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447391232.0, + 17447256064.0, + 17446985728.0, + 17446985728.0, + 17447526400.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446998016.0, + 17446985728.0, + 17447845888.0, + 17447510016.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447813120.0, + 17446985728.0, + 17446985728.0, + 17447157760.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446993920.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447813120.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447141376.0, + 17447280640.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447190528.0, + 17447272448.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447755776.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447862272.0, + 17446985728.0, + 17446985728.0, + 17447960576.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447526400.0, + 17446985728.0, + 17447378944.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447280640.0, + 17447931904.0, + 17447301120.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447739392.0, + 17447546880.0, + 17446985728.0, + 17446985728.0, + 17447133184.0, + 17446985728.0, + 17447616512.0, + 17446985728.0, + 17447682048.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447424000.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447026688.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447428096.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447174144.0, + 17446985728.0, + 17447936000.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447583744.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447460864.0, + 17447747584.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447184384.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447165952.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447952384.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447763968.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447854080.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447829504.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447989248.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448280064.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447100416.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447215104.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447051264.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447411712.0, + 17446985728.0, + 17446985728.0, + 17447903232.0, + 17448509440.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17448402944.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17448513536.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17448435712.0, + 17447903232.0, + 17448075264.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17448034304.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17448058880.0, + 17448013824.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17448611840.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447911424.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447903232.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447788544.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447837696.0, + 17447059456.0, + 17447124992.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447149568.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447346176.0, + 17447059456.0, + 17447059456.0, + 17447428096.0, + 17447354368.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447325696.0, + 17447059456.0, + 17447059456.0, + 17447903232.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447948288.0, + 17447059456.0, + 17447059456.0, + 17447256064.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447059456.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447976960.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447702528.0, + 17447485440.0, + 17447485440.0, + 17447485440.0, + 17447911424.0, + 17447485440.0, + 17447485440.0, + 17448067072.0, + 17447485440.0, + 17447485440.0, + 17447124992.0, + 17446985728.0, + 17447043072.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447690240.0, + 17447927808.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447178240.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447018496.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447653376.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447813120.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448026112.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447112704.0, + 17446985728.0, + 17446985728.0, + 17447960576.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447477248.0, + 17446985728.0, + 17446985728.0, + 17447727104.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447907328.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447985152.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447944192.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447677952.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447346176.0, + 17446985728.0, + 17447370752.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447272448.0, + 17447227392.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447346176.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447354368.0, + 17446985728.0, + 17446985728.0, + 17447403520.0, + 17446985728.0, + 17446985728.0, + 17446983680.0, + 17447649280.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447137280.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447305216.0, + 17447092224.0, + 17446985728.0, + 17446985728.0, + 17447239680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447034880.0, + 17447575552.0, + 17447206912.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447436288.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447362560.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447698432.0, + 17447534592.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447755776.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447612416.0, + 17447342080.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447813120.0, + 17446985728.0, + 17447567360.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447206912.0, + 17447526400.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447297024.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448128512.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447387136.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447559168.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447108608.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17448128512.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447264256.0, + 17447084032.0, + 17447084032.0, + 17448116224.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447825408.0, + 17447084032.0, + 17447428096.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447354368.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447403520.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447809024.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17448075264.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17447084032.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448984576.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447260160.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447673856.0, + 17446985728.0, + 17446985728.0, + 17447395328.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447084032.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447858176.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447632896.0, + 17446985728.0, + 17446985728.0, + 17447624704.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447120896.0, + 17447051264.0, + 17447452672.0, + 17446985728.0, + 17447714816.0, + 17447403520.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447813120.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447510016.0, + 17447268352.0, + 17447841792.0, + 17448194048.0, + 17447268352.0, + 17447579648.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447555072.0, + 17447268352.0, + 17447628800.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447899136.0, + 17447268352.0, + 17447849984.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447620608.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447600128.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447387136.0, + 17447268352.0, + 17447268352.0, + 17447383040.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17448112128.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17448112128.0, + 17447268352.0, + 17447268352.0, + 17447284736.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447641088.0, + 17447268352.0, + 17447268352.0, + 17448280064.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447268352.0, + 17447276544.0, + 17447835648.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447903232.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17448747008.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17448247296.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447444480.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447817216.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447153664.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447862272.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447198720.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447067648.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447190528.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447436288.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447026688.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447411712.0, + 17447747584.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447907328.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17448366080.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447686144.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447972864.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447460864.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17448054784.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17448411136.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17448312832.0, + 17448157184.0, + 17448394752.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447411712.0, + 17447469056.0, + 17447411712.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447182336.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447215104.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447141376.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447178240.0, + 17447673856.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447559168.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447616512.0, + 17446985728.0, + 17446985728.0, + 17447862272.0, + 17446985728.0, + 17446985728.0, + 17447583744.0, + 17446985728.0, + 17447534592.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447051264.0, + 17447542784.0, + 17447419904.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447108608.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447575552.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447264256.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447174144.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447284736.0, + 17446985728.0, + 17447387136.0, + 17446985728.0, + 17447915520.0, + 17447325696.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447231488.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447026688.0, + 17447706624.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447165952.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447174144.0, + 17446985728.0, + 17446985728.0, + 17447444480.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447579648.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447022592.0, + 17446985728.0, + 17447698432.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447706624.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447256064.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448026112.0, + 17446985728.0, + 17446985728.0, + 17446983680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447362560.0, + 17446985728.0, + 17446985728.0, + 17447370752.0, + 17446985728.0, + 17446985728.0, + 17447862272.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447297024.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447120896.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447190528.0, + 17447976960.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447501824.0, + 17447501824.0, + 17446985728.0, + 17447268352.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447620608.0, + 17446985728.0, + 17447604224.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447297024.0, + 17446985728.0, + 17447526400.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447239680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447116800.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447731200.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447387136.0, + 17446985728.0, + 17446985728.0, + 17447665664.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447534592.0, + 17446985728.0, + 17447714816.0, + 17446985728.0, + 17446985728.0, + 17447632896.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447886848.0, + 17447124992.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447256064.0, + 17446985728.0, + 17446985728.0, + 17447157760.0, + 17447337984.0, + 17447702528.0, + 17446985728.0, + 17447833600.0, + 17447690240.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447395328.0, + 17447362560.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447428096.0, + 17446985728.0, + 17446985728.0, + 17447309312.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447636992.0, + 17446985728.0, + 17447616512.0, + 17446985728.0, + 17447288832.0, + 17446985728.0, + 17447456768.0, + 17446985728.0, + 17447579648.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447286784.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447301120.0, + 17446985728.0, + 17447084032.0, + 17446985728.0, + 17446985728.0, + 17447927808.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448034304.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447075840.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447755776.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447288832.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447477248.0, + 17447211008.0, + 17446985728.0, + 17446985728.0, + 17447690240.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447387136.0, + 17447997440.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447976960.0, + 17446985728.0, + 17447985152.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447870464.0, + 17446985728.0, + 17446985728.0, + 17447026688.0, + 17446985728.0, + 17446985728.0, + 17447231488.0, + 17446985728.0, + 17446985728.0, + 17447927808.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447231488.0, + 17446985728.0, + 17447075840.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446983680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447395328.0, + 17446985728.0, + 17446985728.0, + 17447690240.0, + 17446985728.0, + 17447178240.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447153664.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447919616.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447755776.0, + 17446985728.0, + 17447641088.0, + 17446985728.0, + 17446985728.0, + 17447002112.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447845888.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447165952.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447305216.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447788544.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447768064.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448185856.0, + 17447157760.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447436288.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447825408.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447165952.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447477248.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447133184.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447313408.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447878656.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447518208.0, + 17446985728.0, + 17446985728.0, + 17447182336.0, + 17446985728.0, + 17446985728.0, + 17447542784.0, + 17447944192.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447985152.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448132608.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447149568.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447886848.0, + 17446985728.0, + 17446985728.0, + 17447256064.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447108608.0, + 17446985728.0, + 17447624704.0, + 17447624704.0, + 17447624704.0, + 17447624704.0, + 17447624704.0, + 17448075264.0, + 17447624704.0, + 17447624704.0, + 17447624704.0, + 17448140800.0, + 17447624704.0, + 17447624704.0, + 17447624704.0, + 17446985728.0, + 17447337984.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447436288.0, + 17447985152.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447878656.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447346176.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447124992.0, + 17446985728.0, + 17447641088.0, + 17446985728.0, + 17447174144.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447133184.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447084032.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447264256.0, + 17447133184.0, + 17446985728.0, + 17447251968.0, + 17446985728.0, + 17447370752.0, + 17446985728.0, + 17446985728.0, + 17447849984.0, + 17447116800.0, + 17446985728.0, + 17446985728.0, + 17447108608.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448034304.0, + 17447051264.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447858176.0, + 17446985728.0, + 17447542784.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448341504.0, + 17447600128.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447804928.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447165952.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447706624.0, + 17448673280.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17448185856.0, + 17447706624.0, + 17447706624.0, + 17447706624.0, + 17447680000.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17448038400.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447731200.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447936000.0, + 17447682048.0, + 17448099840.0, + 17448263680.0, + 17447682048.0, + 17448017920.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17448165376.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17448673280.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17448030208.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17448566784.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447845888.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17448464384.0, + 17447682048.0, + 17448460288.0, + 17448697856.0, + 17448349696.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17448660992.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17447682048.0, + 17448689664.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447419904.0, + 17446985728.0, + 17446985728.0, + 17447813120.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447280640.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447718912.0, + 17447854080.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447510016.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447862272.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447501824.0, + 17447305216.0, + 17446985728.0, + 17446993920.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447878656.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447714816.0, + 17446985728.0, + 17447432192.0, + 17446985728.0, + 17447976960.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448017920.0, + 17446985728.0, + 17446985728.0, + 17447661568.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447313408.0, + 17446985728.0, + 17447600128.0, + 17446985728.0, + 17447895040.0, + 17446985728.0, + 17447485440.0, + 17446985728.0, + 17447919616.0, + 17446985728.0, + 17447337984.0, + 17446985728.0, + 17446989824.0, + 17447358464.0, + 17447034880.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447018496.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447559168.0, + 17447493632.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447591936.0, + 17447485440.0, + 17446985728.0, + 17446985728.0, + 17447190528.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447510016.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447854080.0, + 17446985728.0, + 17446985728.0, + 17447370752.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447665664.0, + 17446985728.0, + 17447886848.0, + 17446985728.0, + 17446985728.0, + 17448038400.0, + 17446985728.0, + 17447559168.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447452672.0, + 17446985728.0, + 17446985728.0, + 17447198720.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447559168.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448034304.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447084032.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447649280.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447403520.0, + 17446985728.0, + 17448235008.0, + 17446985728.0, + 17447124992.0, + 17447862272.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447841792.0, + 17447907328.0, + 17446985728.0, + 17447837696.0, + 17447821312.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447100416.0, + 17446985728.0, + 17447059456.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447600128.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447100416.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447567360.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447231488.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447342080.0, + 17447084032.0, + 17446983680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447002112.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447084032.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447444480.0, + 17448157184.0, + 17446985728.0, + 17447149568.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447403520.0, + 17446985728.0, + 17447972864.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447673856.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447100416.0, + 17446985728.0, + 17447772160.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447477248.0, + 17447464960.0, + 17447464960.0, + 17448144896.0, + 17448194048.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17448071168.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447624704.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447833600.0, + 17447464960.0, + 17447702528.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17448112128.0, + 17447464960.0, + 17448349696.0, + 17447464960.0, + 17447636992.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447686144.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447882752.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447907328.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447960576.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17448341504.0, + 17447464960.0, + 17447464960.0, + 17447464960.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447890944.0, + 17448525824.0, + 17447481344.0, + 17447481344.0, + 17448022016.0, + 17448292352.0, + 17448169472.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447579648.0, + 17448054784.0, + 17448103936.0, + 17447481344.0, + 17447989248.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17448398848.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447956480.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447514112.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447596032.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17448361984.0, + 17448443904.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17448374272.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447481344.0, + 17447297024.0, + 17448173568.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17448312832.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17448009728.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447493632.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17448239104.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447813120.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17447854080.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17448247296.0, + 17447297024.0, + 17447297024.0, + 17447297024.0, + 17448026112.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447718912.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447026688.0, + 17446985728.0, + 17447067648.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447141376.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447755776.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447534592.0, + 17446985728.0, + 17446985728.0, + 17447968768.0, + 17446985728.0, + 17447653376.0, + 17447383040.0, + 17446985728.0, + 17447018496.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447944192.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447821312.0, + 17446985728.0, + 17446983680.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447165952.0, + 17446985728.0, + 17447542784.0, + 17446985728.0, + 17446985728.0, + 17447776256.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447780352.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447567360.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447661568.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447075840.0, + 17447485440.0, + 17447239680.0, + 17447919616.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447837696.0, + 17447763968.0, + 17446985728.0, + 17446985728.0, + 17447493632.0, + 17446985728.0, + 17447051264.0, + 17447256064.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447510016.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448005632.0, + 17446985728.0, + 17446985728.0, + 17447227392.0, + 17446985728.0, + 17446985728.0, + 17447919616.0, + 17447821312.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447051264.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447071744.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447387136.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447153664.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447743488.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447596032.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447374848.0, + 17446985728.0, + 17447088128.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447862272.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447141376.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447305216.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447034880.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448017920.0, + 17447739392.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447337984.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447387136.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447768064.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447268352.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447038976.0, + 17446985728.0, + 17447034880.0, + 17447493632.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447100416.0, + 17447403520.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447845888.0, + 17446985728.0, + 17447727104.0, + 17446985728.0, + 17446985728.0, + 17447923712.0, + 17447596032.0, + 17447141376.0, + 17446985728.0, + 17447997440.0, + 17446985728.0, + 17446985728.0, + 17447854080.0, + 17446985728.0, + 17447469056.0, + 17447018496.0, + 17446985728.0, + 17447321600.0, + 17446985728.0, + 17446985728.0, + 17447362560.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447288832.0, + 17446985728.0, + 17447436288.0, + 17446985728.0, + 17447342080.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447059456.0, + 17446985728.0, + 17446985728.0, + 17447702528.0, + 17446985728.0, + 17447727104.0, + 17446985728.0, + 17447387136.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447944192.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448009728.0, + 17446985728.0, + 17447141376.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447354368.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447108608.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447075840.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447034880.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447272448.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447108608.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447018496.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447071744.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447706624.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447088128.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447428096.0, + 17446985728.0, + 17447305216.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447780352.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447395328.0, + 17446985728.0, + 17447329792.0, + 17446985728.0, + 17447673856.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447182336.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447157760.0, + 17447321600.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447510016.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447845888.0, + 17447542784.0, + 17448312832.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447849984.0, + 17447542784.0, + 17447870464.0, + 17447542784.0, + 17448419328.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17448034304.0, + 17447542784.0, + 17447542784.0, + 17448140800.0, + 17447542784.0, + 17448054784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447813120.0, + 17448288256.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447854080.0, + 17448423424.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447604224.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447542784.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17448022016.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447641088.0, + 17447849984.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447907328.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447677952.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447972864.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447800832.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447514112.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17448136704.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447632896.0, + 17448226816.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17448071168.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447923712.0, + 17447325696.0, + 17447325696.0, + 17447686144.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17447469056.0, + 17447325696.0, + 17447624704.0, + 17447325696.0, + 17447604224.0, + 17447325696.0, + 17447481344.0, + 17447325696.0, + 17447325696.0, + 17447325696.0, + 17446983680.0, + 17446985728.0, + 17447116800.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447817216.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447895040.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448026112.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447182336.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447677952.0, + 17446985728.0, + 17446985728.0, + 17447231488.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448947712.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447477248.0, + 17446985728.0, + 17446985728.0, + 17447112704.0, + 17447321600.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447702528.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447706624.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447452672.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448030208.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17448337408.0, + 17446985728.0, + 17447514112.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17447075840.0, + 17446985728.0, + 17447350272.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0, + 17446985728.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 27308, + "step_interval": 5, + "values": [ + 144.08891, + 27.18296, + 28.7759, + 28.37953, + 27.76792, + 28.07504, + 27.39385, + 26.78229, + 27.39429, + 25.28079, + 25.5494, + 26.54548, + 25.41502, + 25.6055, + 25.57833, + 24.08431, + 24.73616, + 25.01832, + 24.08821, + 23.51395, + 24.73726, + 24.21609, + 24.04016, + 25.09547, + 23.76432, + 23.56989, + 23.52949, + 23.4645, + 23.33365, + 23.6517, + 23.47858, + 23.61385, + 23.7676, + 23.47229, + 22.97194, + 23.39169, + 23.41737, + 23.56892, + 23.08883, + 22.66263, + 23.51845, + 22.96823, + 22.61889, + 23.5187, + 22.80851, + 22.96399, + 22.9144, + 22.42292, + 22.60671, + 23.16861, + 22.82373, + 22.84703, + 22.62976, + 22.53477, + 22.35819, + 22.7189, + 22.38451, + 22.50971, + 22.93356, + 22.64643, + 22.62173, + 22.53904, + 22.3477, + 22.37545, + 22.99236, + 22.56689, + 22.36764, + 22.76719, + 22.32971, + 22.26796, + 22.43175, + 22.86586, + 22.37827, + 22.31797, + 23.05517, + 22.38161, + 22.15415, + 22.85999, + 22.31488, + 22.1238, + 22.68572, + 22.69305, + 22.04383, + 22.71203, + 22.05705, + 22.30961, + 23.00833, + 22.02052, + 22.49272, + 22.69917, + 22.17568, + 22.16281, + 22.7872, + 22.00362, + 22.22705, + 22.92269, + 22.36921, + 22.17753, + 22.68225, + 22.1444, + 23.5386, + 22.505, + 22.01473, + 22.46687, + 22.24677, + 22.39756, + 22.1972, + 22.23715, + 22.16025, + 22.16319, + 22.41521, + 22.39638, + 22.03389, + 22.21401, + 22.08418, + 22.1449, + 22.226, + 22.35003, + 22.20765, + 22.0749, + 23.09716, + 22.09986, + 22.15346, + 22.98874, + 22.35659, + 22.08677, + 22.4387, + 22.22567, + 22.08282, + 22.80666, + 22.07835, + 22.12375, + 22.38661, + 22.07926, + 22.38071, + 22.14634, + 22.19898, + 22.25255, + 22.14789, + 22.03402, + 22.03884, + 22.53378, + 22.39106, + 22.00408, + 22.0108, + 23.3929, + 21.98099, + 22.01587, + 23.15318, + 22.20737, + 22.01783, + 22.22849, + 22.22966, + 22.13073, + 22.55899, + 22.0429, + 22.35985, + 22.44003, + 22.25177, + 22.17871, + 21.96168, + 22.29543, + 22.18, + 22.37824, + 22.18173, + 22.13368, + 22.53572, + 21.99892, + 22.00424, + 22.01292, + 22.26095, + 21.99937, + 22.04101, + 23.2343, + 21.98997, + 22.21035, + 23.17278, + 22.25899, + 22.12446, + 22.54666, + 22.0171, + 22.08991, + 22.45741, + 21.98198, + 22.12532, + 22.37849, + 21.99417, + 21.98968, + 22.10685, + 22.38733, + 22.22672, + 22.40604, + 22.03877, + 22.02761, + 22.02356, + 22.17616, + 22.32819, + 21.98196, + 23.25932, + 21.99223, + 22.82682, + 22.14838, + 22.07154, + 22.70525, + 22.43407, + 22.02542, + 22.63539, + 22.25999, + 22.18628, + 22.28038, + 22.00327, + 22.20951, + 22.35197, + 22.49728, + 23.56005, + 22.76213, + 24.61836, + 23.00086, + 22.83544, + 22.99861, + 22.90281, + 22.4608, + 23.60628, + 22.99803, + 22.32844, + 23.52395, + 22.3822, + 22.47603, + 23.19293, + 22.24039, + 22.05491, + 23.5743, + 22.07715, + 21.99079, + 23.59318, + 21.98454, + 22.11036, + 22.85504, + 22.45315, + 25.81426, + 22.52048, + 22.44753, + 22.47766, + 22.24636, + 22.24311, + 22.02379, + 22.12952, + 22.17585, + 22.23626, + 22.227, + 21.96626, + 22.25846, + 22.66167, + 22.04917, + 22.18736, + 22.93901, + 22.23628, + 22.00751, + 22.85013, + 21.99802, + 22.14584, + 22.64398, + 22.12933, + 22.03666, + 22.12914, + 22.47871, + 21.98998, + 22.08852, + 22.10707, + 22.02827, + 22.04949, + 22.10938, + 22.16002, + 22.0572, + 22.4045, + 21.9906, + 22.36884, + 22.57462, + 22.11775, + 22.29225, + 22.64343, + 22.27508, + 22.08397, + 23.19772, + 22.23017, + 22.19658, + 22.63357, + 22.08414, + 22.28009, + 22.59849, + 22.38033, + 21.96807, + 22.07953, + 22.15342, + 22.0268, + 22.26485, + 21.96872, + 22.56672, + 21.96759, + 22.14143, + 21.43117, + 22.27329, + 22.1273, + 22.67007, + 22.84943, + 22.03139, + 22.21482, + 22.93781, + 22.19395, + 22.04166, + 22.97579, + 22.04506, + 21.98575, + 22.37801, + 22.30579, + 21.9824, + 22.03537, + 22.09295, + 22.31415, + 21.98727, + 21.77508, + 22.62691, + 22.15103, + 22.14421, + 21.99115, + 22.31846, + 22.06955, + 22.17395, + 22.25436, + 139.46249, + 22.75183, + 22.51547, + 23.37055, + 22.65482, + 22.63677, + 23.55777, + 22.64493, + 23.05364, + 23.51189, + 22.66016, + 22.51283, + 22.90432, + 22.32768, + 22.55442, + 22.80493, + 22.64357, + 22.26495, + 22.93471, + 22.27821, + 22.25688, + 22.86227, + 22.23824, + 22.20756, + 22.43165, + 22.40266, + 22.24195, + 22.29421, + 22.39034, + 22.18892, + 22.24207, + 21.90287, + 22.62409, + 22.39802, + 22.3563, + 22.37461, + 22.84475, + 22.38544, + 22.29, + 23.4498, + 22.54358, + 22.3157, + 22.91372, + 22.51769, + 22.37781, + 22.83857, + 22.7779, + 22.26592, + 22.98142, + 22.4236, + 22.21238, + 22.88876, + 22.28733, + 22.55918, + 22.37388, + 22.25656, + 22.29004, + 22.34599, + 22.43384, + 22.56104, + 22.49615, + 22.44958, + 22.43601, + 22.26295, + 22.86147, + 22.39765, + 22.35822, + 23.10647, + 22.33805, + 22.32324, + 22.97255, + 21.7446, + 22.66551, + 22.67271, + 22.29879, + 22.55611, + 22.81529, + 22.48018, + 22.7111, + 22.26949, + 22.85083, + 22.71677, + 22.35647, + 22.43576, + 22.68977, + 22.40417, + 22.28594, + 22.2769, + 22.80963, + 22.37005, + 22.41868, + 23.11052, + 22.55657, + 22.45834, + 22.93099, + 22.38713, + 22.30621, + 22.57878, + 22.6241, + 22.36017, + 22.55442, + 22.33244, + 22.53711, + 22.37295, + 150.1908, + 22.31466, + 22.09742, + 23.3826, + 22.32718, + 22.11036, + 22.95423, + 22.0759, + 22.15037, + 22.74689, + 22.0872, + 22.12055, + 22.70332, + 22.01518, + 22.20242, + 22.61501, + 22.15112, + 21.99156, + 22.34172, + 21.98494, + 22.07139, + 22.42343, + 22.08413, + 22.01145, + 22.12979, + 22.19043, + 21.98698, + 21.97181, + 22.15881, + 22.01087, + 21.97878, + 22.03357, + 22.19872, + 21.98681, + 21.98032, + 21.95105, + 22.21537, + 22.07794, + 21.9827, + 22.18917, + 21.73407, + 22.00102, + 22.48948, + 21.97008, + 22.10194, + 22.76787, + 22.04689, + 22.02991, + 23.51822, + 22.66788, + 21.96909, + 22.51084, + 21.98716, + 22.22728, + 21.96566, + 21.98205, + 21.96522, + 22.06763, + 21.96275, + 21.98508, + 22.3101, + 21.99387, + 22.0796, + 22.08397, + 22.07532, + 22.00018, + 21.99079, + 22.69585, + 21.98075, + 21.98031, + 22.5497, + 21.95231, + 21.97636, + 23.47594, + 22.48762, + 21.96987, + 22.74353, + 21.98197, + 21.95332, + 22.09058, + 21.59242, + 22.27239, + 22.06962, + 21.96895, + 21.97272, + 22.09908, + 22.39087, + 21.96533, + 22.11435, + 21.95389, + 21.97265, + 22.00925, + 22.22567, + 22.17171, + 21.95621, + 22.08434, + 21.98597, + 21.98224, + 22.64483, + 22.20371, + 23.15428, + 21.9978, + 21.97693, + 22.61262, + 22.28172, + 22.56743, + 22.00901, + 21.99811, + 21.9869, + 21.97021, + 21.97121, + 22.16697, + 22.48932, + 21.97317, + 21.98121, + 22.00708, + 22.56698, + 22.76444, + 22.3888, + 22.47333, + 22.17974, + 22.38066, + 22.19249, + 22.06505, + 22.1555, + 22.02924, + 22.00077, + 21.9668, + 22.35229, + 21.95424, + 22.1547, + 23.02753, + 21.96796, + 22.09918, + 23.15867, + 22.0003, + 22.10756, + 22.80626, + 22.24095, + 22.02607, + 22.72858, + 22.61805, + 22.09383, + 22.05538, + 22.17601, + 22.25792, + 22.03217, + 21.99017, + 22.71981, + 22.26331, + 22.45265, + 22.14421, + 22.19871, + 21.99202, + 22.03261, + 22.12663, + 21.94556, + 21.90994, + 21.90858, + 22.34492, + 21.93792, + 22.06428, + 22.60384, + 22.25879, + 22.28391, + 23.46466, + 22.04683, + 22.21721, + 22.86592, + 22.23653, + 21.91424, + 22.0933, + 22.50215, + 21.94183, + 22.015, + 22.09922, + 22.20373, + 21.90876, + 21.9333, + 21.92505, + 21.95365, + 21.97395, + 22.13822, + 22.23772, + 22.32163, + 21.93494, + 21.95154, + 22.57417, + 21.99284, + 21.95553, + 23.02139, + 21.67191, + 22.02365, + 23.05264, + 21.96061, + 21.94264, + 22.87476, + 22.49135, + 21.95872, + 22.08128, + 21.91896, + 22.08139, + 21.92737, + 21.94585, + 22.39994, + 22.02547, + 22.0884, + 21.92127, + 22.30053, + 21.9285, + 22.02136, + 21.9092, + 22.22074, + 21.95978, + 21.95417, + 22.63596, + 21.95056, + 21.97393, + 22.54615, + 22.00624, + 22.09699, + 23.11883, + 22.1166, + 21.92557, + 22.81165, + 21.99968, + 21.97545, + 22.13873, + 21.93904, + 22.53462, + 22.05603, + 21.68633, + 22.13439, + 21.95697, + 22.14256, + 22.6049, + 206.4328, + 22.13583, + 22.42085, + 22.70371, + 21.95279, + 23.59682, + 23.43193, + 22.29466, + 22.23401, + 23.69629, + 22.20447, + 22.09062, + 22.74803, + 21.98634, + 21.95441, + 22.22846, + 21.97244, + 22.24925, + 21.94374, + 21.97849, + 22.03202, + 21.94975, + 21.94527, + 146.68144, + 22.27439, + 21.99763, + 22.94339, + 22.17575, + 22.08603, + 23.20221, + 22.06277, + 22.27312, + 22.69968, + 21.97287, + 21.98518, + 21.56896, + 21.97247, + 22.44083, + 22.13808, + 22.04357, + 22.1117, + 21.91148, + 21.8702, + 22.01261, + 22.23046, + 21.89266, + 22.19313, + 22.10151, + 22.10548, + 22.05675, + 22.64429, + 21.91852, + 21.90826, + 22.75417, + 22.09824, + 22.15108, + 22.95928, + 22.01593, + 21.98969, + 22.45724, + 22.07652, + 21.907, + 22.38014, + 21.88281, + 21.86258, + 21.91324, + 21.91422, + 21.87106, + 21.90118, + 22.25658, + 21.90246, + 21.89989, + 22.07162, + 22.0418, + 21.89729, + 21.75701, + 21.89276, + 22.09418, + 22.41572, + 22.00607, + 22.09298, + 22.54087, + 21.91413, + 21.90946, + 23.05955, + 21.93402, + 22.20568, + 22.84967, + 21.90794, + 21.94137, + 22.2126, + 22.07115, + 21.91625, + 22.17132, + 22.39414, + 21.34349, + 21.91209, + 22.21659, + 21.92665, + 22.28304, + 22.65754, + 21.91211, + 22.28527, + 21.93459, + 22.56003, + 22.74206, + 21.93342, + 22.09202, + 23.28637, + 22.09157, + 21.95656, + 22.8947, + 21.96243, + 21.9394, + 22.38718, + 21.59664, + 22.22617, + 22.21916, + 22.07887, + 21.93848, + 21.98941, + 22.01857, + 21.92522, + 22.32653, + 21.91902, + 22.77012, + 21.89258, + 22.05719, + 21.90374, + 21.98219, + 22.64801, + 22.20669, + 22.67932, + 22.67187, + 22.01469, + 22.15446, + 23.15926, + 22.09728, + 22.19881, + 22.07149, + 22.03691, + 21.97724, + 22.12679, + 21.95995, + 22.02123, + 22.18487, + 21.9739, + 21.96864, + 21.97257, + 22.22663, + 21.97249, + 21.97875, + 22.28503, + 21.9815, + 22.07268, + 22.10998, + 22.11118, + 21.98495, + 22.22104, + 21.9711, + 22.21139, + 22.67055, + 21.97117, + 21.97397, + 23.35298, + 22.19033, + 21.98968, + 22.80396, + 22.11866, + 22.25796, + 22.32182, + 22.39318, + 22.04391, + 22.15127, + 22.06453, + 22.05777, + 22.34845, + 21.96765, + 22.1485, + 22.07825, + 21.969, + 22.02032, + 21.95162, + 21.97527, + 21.97671, + 21.97859, + 22.49228, + 21.94657, + 22.04616, + 23.31876, + 22.23427, + 21.93586, + 23.03057, + 22.1601, + 21.97717, + 22.38684, + 21.94359, + 21.9093, + 22.23889, + 21.95759, + 22.07084, + 22.35077, + 21.98614, + 21.98721, + 21.99153, + 22.18873, + 21.95713, + 22.03424, + 22.33623, + 21.94898, + 22.03167, + 21.99354, + 22.0926, + 22.00058, + 22.49012, + 22.2445, + 21.99326, + 23.14098, + 22.00826, + 22.27556, + 22.66539, + 21.96698, + 22.19655, + 22.39693, + 21.95024, + 21.94962, + 22.39099, + 21.99116, + 22.00551, + 21.94971, + 21.97359, + 21.94154, + 21.9862, + 22.46948, + 21.99518, + 21.99948, + 21.95742, + 21.97806, + 22.29998, + 22.25772, + 21.97304, + 23.04687, + 22.02255, + 21.96136, + 22.63988, + 21.98201, + 22.44684, + 22.69289, + 21.91054, + 22.09969, + 22.15419, + 21.98784, + 22.34465, + 22.14339, + 22.22435, + 22.16608, + 22.04499, + 22.03883, + 22.0194, + 22.28322, + 22.16577, + 22.04861, + 22.01207, + 22.03022, + 22.03551, + 22.10007, + 22.20531, + 22.04516, + 22.01998, + 21.98422, + 22.19016, + 22.05819, + 22.04256, + 22.23628, + 22.04532, + 22.06464, + 21.97782, + 22.25726, + 23.50028, + 22.18097, + 21.98326, + 22.68992, + 22.10064, + 22.1042, + 22.09756, + 21.9846, + 22.2915, + 22.0134, + 21.98359, + 22.00443, + 22.3594, + 22.16943, + 22.10875, + 22.23036, + 22.02488, + 22.03753, + 22.11202, + 21.98034, + 22.04396, + 21.98521, + 22.31947, + 22.12728, + 21.96752, + 23.2102, + 22.00819, + 22.09734, + 23.2734, + 22.10175, + 22.00907, + 22.51192, + 21.99216, + 21.99815, + 22.23182, + 21.99145, + 21.96195, + 22.3484, + 22.15858, + 21.9582, + 21.98637, + 22.22783, + 21.97977, + 21.96251, + 22.15796, + 22.05459, + 22.03964, + 22.01487, + 22.37922, + 21.97776, + 22.02979, + 21.93978, + 22.00505, + 22.91704, + 22.0008, + 22.50814, + 23.5463, + 21.98618, + 21.96548, + 22.61999, + 21.97729, + 22.13021, + 22.01193, + 22.0045, + 22.00856, + 22.01993, + 22.06798, + 22.01047, + 22.60098, + 21.96739, + 22.01616, + 22.20296, + 21.9668, + 22.03036, + 23.0835, + 22.6443, + 22.01308, + 23.01417, + 22.51771, + 22.11776, + 23.18986, + 22.02416, + 22.01537, + 22.79275, + 21.98761, + 22.50517, + 21.96502, + 21.93878, + 21.94931, + 142.13861, + 22.39532, + 22.06472, + 23.17265, + 22.27286, + 22.20975, + 22.84169, + 22.02298, + 22.23592, + 22.55482, + 21.98098, + 22.00536, + 22.48102, + 21.98683, + 22.17384, + 22.35676, + 22.11801, + 21.92808, + 22.63972, + 21.97801, + 21.92817, + 21.95477, + 22.05367, + 22.05264, + 22.24046, + 21.99754, + 21.94995, + 21.88901, + 21.9762, + 22.15816, + 21.89293, + 22.08613, + 22.08702, + 21.90437, + 21.89442, + 21.89632, + 22.1366, + 21.90047, + 22.3612, + 21.93155, + 21.89009, + 22.4678, + 21.87928, + 21.99146, + 22.63725, + 22.12453, + 21.8854, + 23.11332, + 21.87945, + 21.91698, + 23.30958, + 22.06861, + 22.15321, + 23.12633, + 22.27345, + 22.16398, + 22.01246, + 22.1375, + 22.16237, + 22.04243, + 22.11127, + 22.18013, + 21.96813, + 22.01185, + 22.0346, + 22.20312, + 21.9984, + 22.00191, + 22.36888, + 21.99644, + 22.04733, + 22.25778, + 22.07293, + 21.96894, + 22.00403, + 22.37494, + 21.97663, + 21.97781, + 21.99943, + 22.1262, + 22.2965, + 22.12864, + 22.44026, + 21.94666, + 22.01049, + 22.02276, + 21.93438, + 21.93788, + 21.99422, + 22.94236, + 22.10934, + 22.00049, + 23.05529, + 22.19425, + 21.97173, + 22.81132, + 21.98524, + 22.15092, + 22.07076, + 22.19723, + 22.19315, + 21.95596, + 21.9444, + 21.909, + 22.27546, + 22.02288, + 22.21957, + 21.98733, + 21.95521, + 21.95763, + 21.94721, + 22.31026, + 22.0157, + 21.95551, + 22.63773, + 21.95335, + 21.97383, + 23.24275, + 22.10849, + 21.94298, + 22.98865, + 21.97692, + 21.94962, + 22.24428, + 22.14901, + 21.91759, + 21.9905, + 21.894, + 21.93218, + 22.17358, + 22.21614, + 21.92615, + 21.95192, + 21.93167, + 21.93223, + 21.94018, + 21.92842, + 21.98818, + 22.24216, + 21.92605, + 21.92489, + 23.30762, + 22.00282, + 22.23153, + 23.70756, + 21.95362, + 21.96965, + 22.48831, + 22.32396, + 22.59795, + 21.93239, + 21.93013, + 22.36592, + 22.21659, + 21.96341, + 23.07037, + 21.9989, + 21.97882, + 22.8066, + 21.89899, + 22.29705, + 22.50756, + 22.00453, + 21.87503, + 23.03505, + 21.87592, + 21.87096, + 23.11979, + 21.84632, + 21.85352, + 23.15894, + 21.86194, + 21.88866, + 22.85346, + 21.87683, + 21.83621, + 22.90984, + 21.81313, + 21.88593, + 22.51014, + 21.85441, + 22.00295, + 22.10692, + 22.11597, + 22.13581, + 21.93228, + 21.96083, + 21.97218, + 21.98125, + 21.83079, + 22.00393, + 21.97137, + 21.79148, + 21.79391, + 22.06623, + 21.8021, + 21.87739, + 22.57869, + 21.96111, + 21.8294, + 22.42445, + 21.82539, + 21.78304, + 22.76258, + 21.87705, + 22.39466, + 22.15284, + 21.91144, + 21.80806, + 21.89198, + 21.82063, + 21.78463, + 22.1367, + 21.79902, + 21.83569, + 21.8232, + 22.05093, + 21.80924, + 21.82128, + 21.94955, + 21.79657, + 21.85326, + 22.20561, + 22.08345, + 21.82835, + 22.714, + 21.97994, + 21.79499, + 22.61655, + 21.78305, + 22.19292, + 22.68875, + 21.80842, + 21.86604, + 22.1574, + 21.84699, + 21.7953, + 22.49977, + 21.83422, + 21.83876, + 21.87859, + 21.82252, + 21.79903, + 21.82918, + 21.78679, + 21.85667, + 21.83996, + 21.91973, + 21.99525, + 22.09814, + 21.9431, + 21.79477, + 22.53785, + 21.99228, + 21.99067, + 22.4957, + 21.91737, + 21.87883, + 22.45522, + 21.85888, + 22.20505, + 22.27021, + 21.95338, + 21.80428, + 21.8054, + 21.90604, + 21.80088, + 22.1636, + 22.03097, + 21.93403, + 22.10634, + 22.00156, + 21.94846, + 22.17914, + 21.93972, + 21.91467, + 21.86135, + 22.18961, + 21.86599, + 22.04627, + 22.10803, + 22.74719, + 21.89435, + 21.94254, + 23.82747, + 22.04257, + 21.99456, + 22.74565, + 21.97193, + 21.9267, + 22.38755, + 22.0684, + 21.86686, + 21.91021, + 21.87026, + 22.05928, + 21.87394, + 21.88032, + 22.05465, + 21.90457, + 21.87873, + 21.85079, + 22.11192, + 21.8833, + 21.87938, + 21.94757, + 22.36979, + 21.95247, + 21.95799, + 22.3807, + 21.91687, + 21.95121, + 23.12233, + 22.09942, + 21.88714, + 22.81775, + 22.0308, + 21.9125, + 22.42294, + 21.89738, + 22.14821, + 22.02139, + 21.85941, + 22.1295, + 22.06507, + 21.92367, + 21.89203, + 22.16508, + 21.86522, + 21.91719, + 21.99017, + 21.89352, + 21.93967, + 21.88254, + 22.20813, + 21.83993, + 21.84919, + 22.69724, + 21.88955, + 22.11138, + 23.59945, + 22.09364, + 21.93481, + 22.46647, + 21.92533, + 21.84766, + 22.25242, + 21.89277, + 22.02092, + 21.87456, + 22.23224, + 21.85141, + 21.98347, + 21.85346, + 22.33167, + 22.06509, + 21.84517, + 22.28148, + 22.5786, + 21.87647, + 21.82123, + 23.23129, + 21.86236, + 21.85248, + 23.31643, + 21.95381, + 22.05419, + 22.15946, + 21.83957, + 21.87428, + 21.98707, + 21.82906, + 21.84449, + 22.01626, + 21.87183, + 21.87889, + 22.00811, + 21.85775, + 21.90731, + 22.45462, + 22.02047, + 22.60295, + 21.98065, + 21.97552, + 22.20873, + 22.18311, + 21.99139, + 22.69954, + 22.05116, + 22.40658, + 21.90802, + 21.85639, + 22.015, + 21.88946, + 21.94592, + 22.14753, + 21.89762, + 22.02483, + 22.12046, + 21.84874, + 21.85095, + 21.89431, + 22.13549, + 21.91431, + 22.00004, + 22.08948, + 21.93019, + 21.93463, + 21.72272, + 21.64917, + 21.76523, + 21.78631, + 21.59759, + 21.71417, + 21.71277, + 21.6352, + 21.66456, + 21.79163, + 21.61727, + 21.61391, + 22.01, + 21.81964, + 21.65058, + 21.58351, + 22.39611, + 21.57187, + 21.5484, + 22.77818, + 21.95076, + 21.59944, + 22.48207, + 21.90988, + 21.60123, + 21.91667, + 21.55509, + 21.60043, + 21.71148, + 21.61902, + 21.71052, + 21.56121, + 21.79125, + 21.61895, + 21.82243, + 21.58892, + 21.56771, + 21.97018, + 21.55632, + 21.57243, + 21.54972, + 21.89003, + 21.56867, + 21.5805, + 22.49199, + 21.68268, + 21.63866, + 22.22682, + 21.75737, + 21.58986, + 22.98403, + 21.54404, + 21.66838, + 22.45726, + 21.57826, + 21.79136, + 21.72834, + 21.58094, + 21.55374, + 21.75886, + 21.52991, + 21.59133, + 21.93324, + 21.57468, + 21.58156, + 21.56442, + 21.70763, + 21.54559, + 22.67019, + 21.61771, + 21.78113, + 22.1951, + 21.51687, + 21.5471, + 22.79739, + 21.55815, + 21.5762, + 22.4953, + 21.60437, + 21.7942, + 21.84409, + 21.60122, + 21.69897, + 21.56287, + 21.80823, + 21.53247, + 21.90339, + 21.5872, + 21.54108, + 21.57595, + 21.58918, + 21.57443, + 21.56687, + 22.08588, + 21.55605, + 21.58208, + 22.29118, + 21.71883, + 21.81912, + 22.20041, + 21.87253, + 21.55853, + 22.76485, + 21.97927, + 21.68519, + 22.384, + 21.65105, + 21.56905, + 22.01037, + 21.57351, + 21.84402, + 21.93865, + 21.57359, + 21.57409, + 21.56773, + 22.17163, + 21.61912, + 21.57112, + 22.0843, + 21.72306, + 21.63203, + 22.80584, + 21.71512, + 21.62255, + 22.9722, + 21.65273, + 21.73816, + 21.56585, + 21.63462, + 21.84105, + 21.54243, + 21.55682, + 21.66568, + 21.6405, + 21.56556, + 21.55546, + 21.86375, + 21.72456, + 21.48658, + 21.65416, + 21.55668, + 21.69844, + 22.20503, + 22.06492, + 21.51941, + 22.84571, + 21.5346, + 21.499, + 22.80324, + 21.49194, + 21.50389, + 21.84848, + 21.92564, + 21.48695, + 21.69768, + 21.66972, + 21.52008, + 21.76282, + 21.52316, + 21.81372, + 21.53064, + 21.81821, + 21.51087, + 21.53629, + 21.64172, + 21.49074, + 21.55824, + 21.68024, + 21.67013, + 22.87816, + 21.53585, + 21.51361, + 22.50569, + 21.5219, + 22.20834, + 21.71869, + 21.48244, + 21.58961, + 21.54911, + 21.7198, + 21.5134, + 21.50591, + 21.94437, + 21.50681, + 21.56549, + 21.66914, + 21.52916, + 21.54661, + 21.806, + 21.78521, + 21.52422, + 22.4037, + 21.87564, + 21.52815, + 22.74947, + 21.51337, + 21.64755, + 22.27027, + 21.51728, + 22.11304, + 21.59328, + 21.71752, + 21.57915, + 21.47227, + 21.51114, + 21.7332, + 21.52916, + 21.46917, + 21.72661, + 21.47586, + 21.51426, + 21.46909, + 21.48341, + 21.78691, + 21.48813, + 21.75961, + 21.93572, + 21.84052, + 21.56804, + 22.46383, + 21.51143, + 21.53648, + 22.91481, + 21.6764, + 22.00167, + 22.16194, + 21.52871, + 21.52373, + 151.55295, + 21.82378, + 21.70948, + 22.69532, + 21.93156, + 21.65228, + 22.58118, + 21.69772, + 21.75235, + 22.32395, + 21.63565, + 21.66178, + 22.32896, + 21.66685, + 21.85512, + 22.45369, + 21.62199, + 21.62737, + 22.25415, + 21.68368, + 21.67747, + 22.18699, + 21.67863, + 21.65771, + 21.76783, + 21.87832, + 21.66377, + 21.64429, + 21.72954, + 21.63582, + 21.65568, + 21.63787, + 21.87094, + 21.64075, + 21.6436, + 21.65755, + 21.902, + 21.72626, + 21.6437, + 21.83108, + 21.55645, + 21.63674, + 22.40652, + 21.79753, + 21.65395, + 22.16056, + 21.65409, + 21.65837, + 22.46509, + 22.0882, + 21.63721, + 22.33517, + 21.62846, + 21.86158, + 22.356, + 21.69208, + 21.68824, + 21.81925, + 21.65616, + 21.63525, + 22.05059, + 21.65081, + 21.67372, + 21.62979, + 21.7075, + 21.71273, + 21.66647, + 22.56767, + 21.64273, + 21.6456, + 22.18868, + 21.68464, + 21.66484, + 22.5155, + 22.24424, + 21.64394, + 22.4389, + 21.6134, + 21.64674, + 22.07142, + 21.25747, + 21.84133, + 22.16199, + 21.63485, + 21.64806, + 22.06151, + 21.87458, + 21.65843, + 21.63718, + 21.66951, + 21.65164, + 21.91384, + 21.97839, + 21.84972, + 21.6567, + 22.12674, + 21.62995, + 21.63606, + 22.13262, + 21.91573, + 22.35869, + 21.63448, + 21.61452, + 22.47741, + 22.03423, + 22.18581, + 21.86574, + 21.64012, + 21.626, + 21.60879, + 21.65413, + 21.696, + 22.22939, + 22.26824, + 21.64161, + 21.62535, + 21.80349, + 21.84484, + 21.69425, + 22.08849, + 21.72068, + 21.55354, + 22.4506, + 21.61622, + 21.83088, + 22.40861, + 21.76977, + 21.5967, + 22.56649, + 21.56587, + 21.58908, + 22.69589, + 21.56429, + 21.58961, + 21.55196, + 21.5759, + 21.62071, + 21.82003, + 21.85126, + 21.77693, + 21.63889, + 21.65565, + 21.63356, + 21.64813, + 21.58359, + 21.84745, + 21.978, + 21.56287, + 21.89887, + 22.38138, + 21.53535, + 21.58376, + 22.65083, + 21.81246, + 21.5762, + 22.63054, + 21.56682, + 21.61128, + 21.94669, + 21.54736, + 21.61974, + 21.56308, + 21.78693, + 21.5687, + 21.73753, + 21.57136, + 21.54358, + 22.07465, + 21.58793, + 21.5559, + 21.56577, + 21.7909, + 21.61694, + 21.97116, + 21.56218, + 21.54515, + 21.57659, + 22.07294, + 21.88846, + 21.56917, + 22.49082, + 21.58161, + 21.57842, + 22.26622, + 21.78168, + 21.62129, + 22.18429, + 21.7378, + 21.51363, + 21.86942, + 21.64775, + 21.62395, + 21.59253, + 21.5974, + 21.5693, + 21.56175, + 21.64064, + 21.73298, + 21.93732, + 21.61726, + 21.55451, + 21.63414, + 21.85234, + 21.58293, + 22.038, + 22.68022, + 21.563, + 21.5389, + 22.24776, + 21.60902, + 21.53304, + 22.5903, + 21.68411, + 21.86177, + 21.56693, + 21.93658, + 21.73248, + 21.75682, + 22.02825, + 21.5784, + 21.54589, + 21.66703, + 21.74882, + 21.54907, + 21.52602, + 21.86369, + 21.76281, + 21.5797, + 21.64422, + 22.59989, + 21.89925, + 21.67147, + 21.78946, + 21.64474, + 21.63218, + 21.63518, + 21.65495, + 21.90246, + 21.73924, + 21.58303, + 21.61397, + 21.60397, + 21.60814, + 21.65283, + 21.91777, + 21.58087, + 21.59295, + 21.56074, + 21.74092, + 21.54031, + 21.62944, + 21.81124, + 21.63963, + 23.12883, + 21.66011, + 21.57737, + 22.41665, + 21.57356, + 21.5967, + 21.84927, + 21.67605, + 21.96464, + 21.6889, + 21.59797, + 21.70036, + 21.60604, + 21.62181, + 21.67803, + 21.84986, + 21.58628, + 21.56697, + 21.69355, + 21.65197, + 21.59211, + 21.85693, + 22.00741, + 21.58838, + 21.57172, + 22.84316, + 21.61741, + 21.60035, + 22.88768, + 21.57727, + 21.6491, + 22.52644, + 21.74342, + 21.77071, + 21.73386, + 21.69847, + 21.56891, + 21.58716, + 21.57728, + 21.67146, + 21.91794, + 21.58074, + 21.54423, + 21.57078, + 21.61197, + 21.60629, + 21.52761, + 21.84311, + 21.6082, + 21.62408, + 21.60308, + 21.69916, + 21.58556, + 22.33043, + 21.62978, + 21.60476, + 22.63116, + 21.62038, + 21.8278, + 22.82382, + 21.59286, + 21.84373, + 22.17928, + 21.62792, + 21.86093, + 21.58999, + 21.60063, + 21.60445, + 21.63382, + 22.03161, + 21.6142, + 22.22228, + 21.61925, + 21.65817, + 21.77623, + 21.58733, + 21.89899, + 22.35622, + 22.43633, + 21.55873, + 22.30825, + 21.65093, + 21.65475, + 22.55924, + 21.62029, + 21.76512, + 22.59398, + 21.78142, + 21.72865, + 22.06454, + 21.61566, + 21.61604, + 21.83513, + 21.61938, + 21.62506, + 21.62109, + 21.6272, + 21.79976, + 21.65784, + 21.61258, + 21.62815, + 21.56939, + 21.94439, + 21.55283, + 21.81701, + 21.55837, + 21.59135, + 21.55932, + 21.51552, + 21.83362, + 21.51843, + 22.01248, + 21.5495, + 21.53533, + 21.89116, + 21.77289, + 21.65211, + 22.44925, + 21.75326, + 21.55273, + 21.68788, + 21.68147, + 21.68405, + 21.57726, + 21.54934, + 21.56148, + 21.56606, + 21.54317, + 21.67813, + 21.53084, + 21.55274, + 21.64835, + 21.70918, + 21.62197, + 21.54325, + 21.88558, + 21.53776, + 21.55483, + 21.87672, + 21.94302, + 21.55986, + 22.7389, + 21.854, + 21.65241, + 22.70001, + 21.52581, + 21.89472, + 21.9015, + 21.56492, + 21.69495, + 21.65263, + 21.74936, + 21.51637, + 21.81002, + 21.60252, + 21.58355, + 21.53796, + 21.55804, + 21.53173, + 21.48751, + 21.47108, + 21.53239, + 22.0191, + 21.69831, + 21.53537, + 21.88987, + 21.7069, + 21.57018, + 22.55962, + 21.73724, + 21.48857, + 22.56757, + 21.54315, + 21.95433, + 22.01932, + 21.63421, + 21.96459, + 21.53721, + 21.79685, + 21.52909, + 21.7117, + 21.51667, + 21.68202, + 21.84814, + 21.77596, + 21.51305, + 21.516, + 22.22145, + 21.54059, + 21.57382, + 21.72287, + 21.88962, + 21.97017, + 22.36269, + 21.52348, + 21.70501, + 22.4914, + 21.69051, + 22.18999, + 22.16449, + 21.50469, + 21.50348, + 22.1642, + 21.53997, + 21.65783, + 21.82951, + 21.53457, + 21.58385, + 21.5099, + 136.63171, + 21.68244, + 21.58441, + 22.58458, + 21.71981, + 21.54, + 22.45638, + 21.5671, + 21.68709, + 22.28587, + 21.5795, + 21.61889, + 22.17575, + 21.58009, + 21.78561, + 22.27902, + 21.72767, + 21.61892, + 21.97467, + 21.57492, + 21.58488, + 22.02006, + 21.59664, + 21.5647, + 21.57561, + 21.77696, + 21.59375, + 21.55886, + 21.65411, + 21.57724, + 21.59547, + 21.5957, + 21.87417, + 21.53956, + 21.58601, + 21.87336, + 21.96485, + 21.6116, + 21.53532, + 22.70447, + 21.74116, + 21.57381, + 22.69849, + 21.59157, + 21.5731, + 22.58736, + 21.88272, + 21.57577, + 21.91797, + 21.76673, + 21.65596, + 21.49361, + 21.69173, + 21.54253, + 21.53864, + 21.89686, + 21.56388, + 22.06221, + 21.58559, + 21.88306, + 22.69777, + 21.56899, + 21.95677, + 22.52568, + 21.57915, + 21.56637, + 22.83046, + 21.57035, + 21.58179, + 22.38179, + 21.55364, + 21.61491, + 21.72159, + 21.94362, + 21.56172, + 21.54705, + 22.16372, + 21.86827, + 21.55448, + 21.51826, + 21.91613, + 21.54283, + 21.53507, + 21.75992, + 21.80093, + 22.05688, + 21.52552, + 21.56401, + 21.94125, + 21.69252, + 21.73504, + 22.62287, + 21.58912, + 21.58755, + 22.8816, + 21.80635, + 21.57159, + 22.12017, + 21.94203, + 21.58933, + 21.54906, + 21.66765, + 22.04293, + 21.57036, + 21.52805, + 21.99697, + 21.54062, + 21.89365, + 21.64669, + 22.15105, + 21.82581, + 21.55663, + 21.55671, + 21.9723, + 21.87363, + 21.65283, + 21.60476, + 21.72676, + 21.88276, + 21.61409, + 21.5905, + 22.03152, + 21.66849, + 21.89073, + 21.54827, + 21.8036, + 21.5708, + 21.69278, + 21.72254, + 21.59411, + 21.81518, + 21.56745, + 22.01509, + 21.59628, + 21.58522, + 21.6881, + 21.78942, + 22.00739, + 22.26501, + 21.79779, + 21.57775, + 22.53696, + 21.62551, + 21.55471, + 22.5533, + 21.79729, + 21.8075, + 22.76188, + 21.58442, + 21.58103, + 22.64152, + 21.65659, + 21.54801, + 21.72144, + 21.63657, + 21.73783, + 21.53477, + 21.62065, + 22.08425, + 21.75025, + 21.57749, + 22.05431, + 21.55263, + 21.55941, + 22.48433, + 21.95487, + 22.02954, + 22.65564, + 21.52373, + 21.67427, + 22.23854, + 21.93164, + 21.55903, + 22.33708, + 21.74249, + 21.57163, + 21.88797, + 21.71366, + 21.74071, + 21.57818, + 22.165, + 21.56903, + 21.63611, + 22.18623, + 21.58541, + 21.98815, + 21.84912, + 21.82375, + 21.61599, + 22.33696, + 22.11626, + 21.56298, + 22.37547, + 21.57281, + 21.7819, + 22.54384, + 21.57393, + 21.75278, + 21.95339, + 21.90502, + 21.61419, + 22.06952, + 21.6969, + 21.55399, + 21.90219, + 21.69707, + 21.84769, + 21.54528, + 21.92537, + 21.64732, + 21.55662, + 21.87083, + 21.60922, + 22.31197, + 21.85389, + 22.10234, + 21.64679, + 22.03962, + 21.80759, + 21.53678, + 22.49657, + 21.56291, + 21.79541, + 22.56068, + 21.70808, + 21.59511, + 22.13381, + 22.01638, + 21.62987, + 21.68787, + 21.59191, + 22.27096, + 21.65622, + 21.65535, + 21.67944, + 21.87005, + 21.72168, + 22.42433, + 21.78952, + 21.63349, + 22.57195, + 21.72304, + 21.86347, + 23.00344, + 21.80272, + 21.65009, + 22.95311, + 21.62943, + 21.61491, + 22.86763, + 21.59683, + 22.95715, + 21.78183, + 21.60624, + 22.49151, + 21.8046, + 21.65214, + 21.99899, + 22.05943, + 21.67257, + 21.97611, + 21.61917, + 21.79754, + 21.7178, + 21.62565, + 21.97799, + 21.60036, + 21.57731, + 21.60589, + 21.88809, + 21.60464, + 21.59186, + 21.70947, + 21.55285, + 21.662, + 21.77912, + 21.80357, + 21.68785, + 22.28477, + 21.68438, + 21.602, + 23.21924, + 21.82788, + 21.83267, + 22.21102, + 21.60302, + 21.77652, + 21.68499, + 21.76864, + 21.56026, + 21.63419, + 21.57534, + 21.55424, + 22.00135, + 21.65779, + 21.74632, + 21.56472, + 21.63263, + 21.57969, + 21.68821, + 21.87767, + 21.55614, + 21.97877, + 22.1321, + 21.69579, + 21.58538, + 22.40047, + 21.72507, + 21.58581, + 22.99751, + 21.59258, + 21.6901, + 22.79874, + 21.58407, + 21.57028, + 22.21932, + 21.89652, + 21.76627, + 22.2725, + 21.54544, + 21.6826, + 21.57891, + 21.52155, + 21.8777, + 21.57766, + 21.86917, + 21.5868, + 21.58119, + 21.81018, + 21.66853, + 21.75028, + 21.68756, + 21.73277, + 21.55003, + 21.85552, + 21.84644, + 21.63748, + 23.05416, + 21.5771, + 21.77141, + 22.42295, + 21.5426, + 21.75665, + 22.45468, + 21.70309, + 21.6274, + 21.55694, + 21.73986, + 21.59821, + 21.73266, + 21.78794, + 22.22515, + 21.75243, + 21.81952, + 22.92543, + 21.57938, + 21.51924, + 22.91805, + 21.50564, + 21.54366, + 21.84475, + 21.65069, + 21.52916, + 21.46206, + 21.53216, + 21.5666, + 21.91406, + 21.49215, + 21.48106, + 21.66519, + 21.62389, + 21.47563, + 21.80309, + 21.83562, + 21.76522, + 21.60353, + 21.69688, + 21.78853, + 21.47928, + 22.33244, + 21.48192, + 21.43361, + 22.47305, + 21.42368, + 21.43701, + 22.74971, + 21.81264, + 21.47023, + 21.741, + 21.55812, + 21.43555, + 22.22581, + 21.49308, + 21.57832, + 21.44682, + 21.50003, + 21.45481, + 21.44407, + 22.08694, + 21.44163, + 21.48675, + 21.58044, + 21.71608, + 21.43777, + 21.73142, + 21.71082, + 21.49479, + 21.93566, + 21.49392, + 21.61805, + 22.02037, + 21.49327, + 21.92543, + 22.39295, + 21.47744, + 21.48991, + 22.62925, + 21.7422, + 21.46264, + 21.89569, + 21.5788, + 21.45998, + 21.89958, + 21.93826, + 21.49643, + 21.45507, + 21.67425, + 21.6661, + 21.47589, + 21.60135, + 21.51766, + 21.47556, + 21.614, + 21.52802, + 21.92357, + 21.78433, + 21.44884, + 21.44659, + 22.11996, + 21.44306, + 21.45327, + 22.47322, + 21.52168, + 21.47706, + 22.28428, + 21.66654, + 21.48472, + 21.99957, + 22.05144, + 21.60125, + 21.66895, + 21.41358, + 21.49856, + 21.60013, + 21.80061, + 21.4953, + 21.93688, + 21.52449, + 21.64882, + 21.77471, + 22.47314, + 21.53808, + 21.52955, + 23.02877, + 22.01145, + 21.55342, + 23.06575, + 21.60921, + 21.47428, + 23.1464, + 21.575, + 21.48075, + 21.45599, + 21.5578, + 21.49987, + 21.47561, + 21.45568, + 21.44474, + 21.45348, + 21.48495, + 21.50041, + 21.60838, + 21.46336, + 21.55327, + 21.88429, + 21.50954, + 21.45561, + 22.54313, + 21.73337, + 21.45681, + 23.0479, + 21.73563, + 21.51128, + 22.0209, + 21.45315, + 21.42352, + 21.45035, + 21.6741, + 21.44737, + 21.43527, + 21.47702, + 21.50804, + 21.51431, + 21.44046, + 21.44285, + 21.72913, + 21.49306, + 21.47534, + 21.46813, + 21.67425, + 21.43789, + 21.47956, + 21.46762, + 21.73071, + 21.49577, + 22.38573, + 21.49366, + 21.4214, + 22.91327, + 21.67188, + 21.73738, + 22.53097, + 21.41509, + 21.48897, + 21.83018, + 21.42701, + 21.49333, + 21.44356, + 21.48265, + 21.43457, + 21.61751, + 21.42646, + 21.41981, + 21.69832, + 21.46145, + 21.41881, + 21.4058, + 21.59873, + 21.64021, + 21.43311, + 21.67352, + 21.56198, + 21.43013, + 22.21617, + 21.54359, + 21.70642, + 23.05833, + 21.46526, + 21.49916, + 21.97741, + 21.46583, + 22.34882, + 21.6075, + 21.68976, + 21.47015, + 21.42514, + 21.41413, + 21.41722, + 21.66907, + 21.41475, + 22.15442, + 21.44021, + 21.46236, + 21.44385, + 21.69637, + 21.44714, + 21.4207, + 22.33336, + 21.40789, + 21.7441, + 23.15104, + 21.53398, + 21.4527, + 22.07079, + 21.66019, + 21.48616, + 22.1905, + 142.8069, + 21.50322, + 21.5116, + 21.48465, + 21.6282, + 21.71555, + 21.52907, + 21.48035, + 21.51896, + 21.46203, + 21.48374, + 21.484, + 21.55581, + 21.48894, + 21.49048, + 21.48268, + 21.51904, + 21.694, + 21.60124, + 21.5014, + 21.50869, + 22.42254, + 21.61054, + 21.48395, + 22.36069, + 21.46131, + 21.48028, + 22.7717, + 21.61209, + 21.4578, + 22.40532, + 21.69094, + 21.52104, + 21.59249, + 21.58457, + 21.69248, + 21.57888, + 21.48798, + 21.51147, + 21.47921, + 21.47032, + 21.45736, + 21.70132, + 21.45491, + 21.5088, + 21.68301, + 22.14732, + 21.50698, + 21.47129, + 22.29572, + 21.49958, + 21.52491, + 22.55088, + 21.87606, + 21.52709, + 22.49417, + 21.52359, + 21.46711, + 22.61183, + 21.48452, + 21.47112, + 22.34735, + 21.43862, + 21.56923, + 21.59271, + 21.58337, + 21.55402, + 21.48213, + 21.84976, + 21.46791, + 21.47816, + 21.51783, + 21.46198, + 21.50114, + 21.45598, + 21.48008, + 22.12022, + 22.27965, + 21.4699, + 22.3084, + 21.47562, + 21.78045, + 22.52926, + 21.49684, + 21.68107, + 21.88065, + 21.62485, + 21.49029, + 21.58714, + 21.50628, + 21.49503, + 21.58564, + 21.51044, + 21.78372, + 21.62399, + 21.54225, + 21.55332, + 21.5355, + 21.75599, + 21.5098, + 21.56664, + 22.12525, + 22.23986, + 21.50774, + 22.23804, + 21.77882, + 21.47356, + 21.9393, + 21.50085, + 21.84186, + 22.18411, + 21.47083, + 21.8029, + 22.08525, + 21.51064, + 21.5307, + 21.79901, + 22.52934, + 21.65642, + 21.60962, + 23.02408, + 22.08945, + 21.69036, + 22.98063, + 21.68009, + 21.58362, + 23.0487, + 21.64721, + 21.85456, + 22.85459, + 21.68391, + 21.75407, + 22.51016, + 21.57963, + 21.58427, + 21.99586, + 21.57003, + 21.57963, + 21.57464, + 21.59734, + 21.59526, + 21.59161, + 21.96495, + 21.57056, + 21.70828, + 21.62271, + 21.61008, + 22.45152, + 21.59445, + 21.56591, + 22.46818, + 21.69018, + 21.93651, + 22.54885, + 21.62453, + 21.71384, + 21.88177, + 21.8953, + 21.62815, + 21.82053, + 21.71279, + 21.60486, + 21.64095, + 21.59952, + 21.62787, + 21.59293, + 21.57944, + 21.60423, + 21.73125, + 21.72972, + 21.59269, + 21.9238, + 21.95451, + 21.60263, + 22.76068, + 21.58194, + 21.61746, + 22.53708, + 21.60585, + 22.06127, + 22.3608, + 21.58855, + 21.57793, + 22.02168, + 21.98607, + 21.60375, + 21.80802, + 21.61122, + 21.58418, + 21.55624, + 21.80077, + 21.60522, + 21.57758, + 21.8121, + 21.56986, + 21.61115, + 21.68735, + 21.58259, + 21.79775, + 22.64034, + 21.60312, + 21.70466, + 22.56647, + 21.64692, + 21.59262, + 22.16153, + 21.59538, + 21.87165, + 22.35202, + 21.58603, + 21.56376, + 21.69425, + 21.91171, + 21.64526, + 21.58628, + 22.24154, + 21.65495, + 21.6447, + 21.83352, + 21.77844, + 21.62019, + 21.822, + 21.56919, + 21.62323, + 21.9777, + 21.59773, + 21.60118, + 22.0999, + 21.58842, + 21.60266, + 22.71779, + 21.71276, + 21.56083, + 146.56967, + 21.45808, + 21.5024, + 21.43204, + 21.45082, + 21.71256, + 21.42753, + 21.48536, + 21.4443, + 21.46259, + 21.45997, + 21.47048, + 21.52677, + 21.43538, + 21.43817, + 21.42289, + 21.58035, + 21.63596, + 21.42529, + 21.44615, + 21.41415, + 21.78891, + 21.6747, + 21.47311, + 21.87312, + 21.5834, + 21.48461, + 22.49995, + 21.4496, + 21.42049, + 22.73259, + 21.66057, + 21.56656, + 22.4381, + 21.41849, + 21.4069, + 21.82997, + 21.70164, + 21.42354, + 21.47467, + 21.42369, + 21.72058, + 21.41317, + 21.44279, + 21.41156, + 21.72298, + 21.4215, + 21.44296, + 22.17571, + 21.47875, + 21.6263, + 22.38635, + 22.13911, + 21.4686, + 22.29858, + 21.50379, + 21.43652, + 22.47829, + 21.45278, + 21.81296, + 21.67889, + 21.45739, + 21.57295, + 21.46393, + 21.47328, + 21.45979, + 21.41481, + 21.78815, + 21.4693, + 21.47041, + 21.47015, + 21.40857, + 21.42924, + 21.48908, + 21.91266, + 21.41579, + 22.04802, + 22.12431, + 21.4355, + 22.21189, + 21.4382, + 21.70653, + 22.29959, + 21.47712, + 21.96527, + 22.25433, + 21.495, + 21.4189, + 22.10533, + 21.44888, + 21.46879, + 21.64526, + 21.41628, + 21.4427, + 21.47358, + 21.41162, + 21.4308, + 21.41858, + 21.43157, + 21.64671, + 21.43574, + 21.41598, + 21.66396, + 21.54347, + 22.47212, + 21.50079, + 21.43311, + 22.33112, + 21.5431, + 22.10761, + 21.831, + 21.54832, + 21.45517, + 22.57453, + 21.6902, + 21.52412, + 22.08117, + 145.88203, + 21.71075, + 21.54059, + 21.5354, + 21.5675, + 21.73097, + 21.52441, + 21.56653, + 21.53841, + 21.49171, + 21.50596, + 21.498, + 21.59644, + 21.5032, + 21.512, + 21.52051, + 21.54917, + 21.61099, + 21.52134, + 21.53039, + 21.48055, + 21.62609, + 21.52657, + 21.52421, + 21.46705, + 21.51492, + 21.98726, + 21.83399, + 21.47299, + 22.62086, + 21.78829, + 21.49207, + 22.63745, + 21.55799, + 21.46961, + 21.84812, + 21.46944, + 21.46622, + 21.99589, + 21.47381, + 21.47848, + 21.61846, + 21.48407, + 21.49398, + 21.44872, + 21.67485, + 21.63505, + 21.46163, + 22.34559, + 21.47809, + 21.57469, + 21.77083, + 21.65937, + 21.57619, + 22.14579, + 21.76767, + 21.47012, + 22.61233, + 21.65102, + 21.47724, + 22.13934, + 21.4823, + 21.66911, + 21.97198, + 21.47686, + 21.4771, + 21.47093, + 21.64354, + 21.51281, + 21.62166, + 22.03233, + 21.51055, + 21.74672, + 21.48584, + 21.51262, + 21.46304, + 21.66524, + 21.78504, + 21.48946, + 21.76664, + 21.47263, + 21.64748, + 22.23729, + 21.49324, + 21.71291, + 22.69521, + 21.63739, + 21.68188, + 22.87513, + 21.49304, + 21.55095, + 21.61519, + 21.52643, + 21.59693, + 21.49414, + 22.54746, + 21.63094, + 21.49683, + 21.78281, + 21.47511, + 21.48744, + 21.48674, + 21.7982, + 21.57079, + 21.63743, + 21.58207, + 21.48284, + 21.78721, + 21.46952, + 21.65917, + 22.08725, + 21.4992, + 21.57851, + 21.99751, + 21.48665, + 21.59159, + 22.53135, + 22.42377, + 21.56328, + 21.53964, + 23.34228, + 22.13318, + 21.60877, + 23.10386, + 21.51107, + 22.24254, + 21.52256, + 22.25747, + 22.32143, + 21.53292, + 21.78864, + 21.6714, + 21.5156, + 21.53193, + 22.17002, + 21.6656, + 21.5585, + 21.53614, + 21.52829, + 21.50721, + 21.5401, + 22.1409, + 21.63641, + 21.50148, + 21.52724, + 21.51714, + 21.92943, + 21.4961, + 21.51644, + 21.63135, + 21.50551, + 21.55763, + 22.64879, + 21.91667, + 21.53831, + 23.03509, + 21.5096, + 21.54729, + 22.80404, + 21.51834, + 21.79143, + 21.51689, + 21.52294, + 21.52774, + 21.52755, + 21.85295, + 21.49936, + 21.5862, + 21.52196, + 21.51654, + 21.63153, + 21.49327, + 21.71434, + 21.49537, + 21.57787, + 21.51932, + 21.52773, + 22.19905, + 21.53399, + 22.03063, + 22.59632, + 21.53548, + 21.59096, + 22.68196, + 21.47887, + 21.46642, + 22.9559, + 21.48049, + 21.4988, + 21.88327, + 22.00504, + 21.59266, + 21.48892, + 21.78309, + 21.57641, + 21.48021, + 21.55056, + 21.49603, + 21.74652, + 21.6697, + 21.80577, + 21.52452, + 21.69905, + 21.47888, + 21.5028, + 21.99421, + 21.55231, + 21.65769, + 22.29546, + 21.51172, + 21.5093, + 22.49931, + 21.55806, + 21.46271, + 22.42236, + 22.03693, + 21.64107, + 21.72011, + 21.5809, + 21.71728, + 21.49746, + 21.68965, + 21.54438, + 21.58307, + 21.42611, + 21.48335, + 21.81653, + 21.52115, + 21.59352, + 21.79087, + 21.79479, + 21.56289, + 21.85769, + 21.56866, + 21.91235, + 21.53029, + 21.61246, + 21.65742, + 21.52113, + 21.50281, + 21.584, + 21.84119, + 21.75816, + 21.62656, + 21.50146, + 21.73751, + 21.52849, + 21.61599, + 21.71839, + 21.73666, + 21.65175, + 21.61274, + 22.08802, + 21.59661, + 21.79191, + 21.6944, + 21.61806, + 21.58048, + 21.64795, + 21.93579, + 21.822, + 21.57433, + 21.594, + 21.80216, + 21.6429, + 21.61486, + 21.77914, + 21.58244, + 21.60544, + 21.79309, + 21.86992, + 21.67645, + 21.602, + 21.61173, + 21.53684, + 21.57035, + 21.54446, + 21.6553, + 21.52828, + 21.50856, + 21.53533, + 21.51644, + 21.50335, + 21.56032, + 21.52578, + 21.63123, + 21.72904, + 21.56399, + 21.70109, + 21.57628, + 21.55785, + 22.13417, + 21.53338, + 22.57949, + 21.52532, + 21.705, + 21.61543, + 21.53494, + 21.52628, + 21.55159, + 21.7633, + 21.55347, + 21.84504, + 21.70438, + 21.54732, + 21.77428, + 21.5466, + 21.54042, + 21.6364, + 22.14655, + 21.52873, + 21.50331, + 22.14725, + 21.54372, + 21.53496, + 22.62301, + 21.50948, + 21.57116, + 23.08007, + 21.81751, + 21.5291, + 22.00298, + 21.53884, + 21.52971, + 21.54367, + 21.96324, + 21.53007, + 21.61884, + 21.89253, + 21.53172, + 21.52213, + 21.52903, + 21.66428, + 21.53107, + 21.54149, + 21.64372, + 21.49875, + 21.52825, + 21.53878, + 21.62825, + 21.97325, + 21.58806, + 21.80651, + 22.17837, + 21.61354, + 21.52312, + 22.51912, + 21.56807, + 21.52901, + 22.46097, + 21.93251, + 21.55098, + 21.77025, + 21.38795, + 21.45579, + 21.37344, + 21.36857, + 21.34813, + 21.40872, + 21.68973, + 21.48912, + 21.36768, + 21.37062, + 21.64229, + 21.39834, + 21.34632, + 21.52998, + 21.32887, + 21.34177, + 21.4569, + 21.56627, + 21.34089, + 21.43349, + 149.41389, + 21.52654, + 21.59368, + 21.56816, + 21.58154, + 21.67142, + 21.53662, + 21.54059, + 21.53109, + 21.56806, + 21.58924, + 21.55296, + 21.62975, + 21.52098, + 21.55582, + 21.56036, + 21.49619, + 21.85151, + 21.52779, + 21.51699, + 21.53346, + 21.61054, + 21.78313, + 21.49933, + 21.50669, + 21.53462, + 21.51713, + 21.97489, + 21.61486, + 21.5053, + 21.50298, + 21.50681, + 21.75626, + 21.463, + 21.48672, + 21.58988, + 21.72567, + 21.73965, + 21.51908, + 21.51784, + 21.45934, + 21.53754, + 23.01655, + 21.47714, + 21.54127, + 22.18103, + 21.67531, + 21.59345, + 21.47328, + 21.64961, + 21.48258, + 21.52313, + 21.54641, + 21.61563, + 21.4824, + 21.47113, + 21.84853, + 21.57625, + 21.51524, + 21.52997, + 21.50628, + 21.64664, + 21.58102, + 21.48271, + 22.05493, + 21.6616, + 21.4977, + 22.75326, + 21.59856, + 21.61931, + 22.3985, + 21.50767, + 21.65728, + 21.73722, + 21.54152, + 21.55252, + 21.57769, + 21.53825, + 21.50828, + 21.65716, + 21.15989, + 21.88503, + 21.47298, + 21.66755, + 21.52073, + 21.51004, + 21.69035, + 21.50243, + 21.84939, + 21.60291, + 21.52477, + 21.69724, + 22.24655, + 21.56001, + 21.54379, + 22.71299, + 21.50399, + 21.49905, + 22.36485, + 21.50131, + 20.91825, + 21.5623, + 21.59273, + 21.52829, + 21.72897, + 21.48931, + 21.54727, + 21.48473, + 21.58657, + 21.84502, + 21.84157, + 21.50338, + 22.06379, + 22.13465, + 21.54407, + 21.52397, + 22.57475, + 21.48901, + 22.02185, + 22.97197, + 21.83302, + 21.48891, + 21.54666, + 21.55527, + 21.44949, + 21.41495, + 21.51934, + 21.77577, + 21.5863, + 21.44902, + 21.45625, + 21.69513, + 21.55645, + 21.48493, + 21.6175, + 21.44225, + 21.41906, + 21.58026, + 21.66796, + 21.44687, + 21.51904, + 21.47391, + 21.44333, + 21.43228, + 21.43386, + 21.5319, + 21.45399, + 21.41062, + 21.46382, + 21.44175, + 21.44121, + 21.54329, + 21.43163, + 21.48617, + 21.61424, + 21.44527, + 21.48318, + 21.46964, + 21.46581, + 21.46561, + 21.44735, + 23.54856, + 21.42206, + 21.54659, + 21.56809, + 21.46545, + 21.43187, + 21.43565, + 21.57391, + 21.44946, + 21.67912, + 21.67854, + 21.42925, + 21.60362, + 21.4395, + 21.47978, + 21.43629, + 21.67325, + 21.41691, + 21.40849, + 21.57617, + 21.44286, + 21.44737, + 21.76506, + 21.44048, + 21.43151, + 23.13409, + 21.59008, + 21.43902, + 22.58402, + 21.44042, + 21.42973, + 22.02836, + 21.83129, + 21.49341, + 21.64447, + 21.75716, + 21.46585, + 21.47689, + 21.43305, + 21.52235, + 21.44002, + 21.43282, + 21.51689, + 21.41972, + 21.41654, + 21.44403, + 21.47841, + 21.4566, + 21.453, + 21.64254, + 21.57335, + 21.46264, + 21.45194, + 22.0507, + 21.45999, + 21.43745, + 22.97723, + 21.7691, + 21.44731, + 21.48336, + 21.84122, + 21.55548, + 21.45124, + 22.08764, + 21.43085, + 21.4739, + 21.61909, + 21.44926, + 21.44375, + 21.44155, + 21.54431, + 21.64954, + 21.58894, + 21.46746, + 21.70036, + 21.44327, + 21.60511, + 22.57814, + 21.72853, + 21.51416, + 22.9185, + 21.95488, + 21.64031, + 22.4101, + 21.51362, + 21.45811, + 21.56473, + 21.46649, + 21.45853, + 21.4747, + 21.44679, + 21.55151, + 21.44983, + 21.46462, + 21.54712, + 21.53437, + 21.46994, + 21.48958, + 21.51021, + 21.61304, + 21.46307, + 21.61999, + 21.44696, + 21.50673, + 21.43353, + 21.72038, + 21.78937, + 21.43614, + 23.14673, + 21.4319, + 21.4333, + 22.79548, + 21.47762, + 21.43184, + 21.43131, + 21.60482, + 21.42537, + 21.50112, + 21.42808, + 21.43978, + 21.49424, + 21.43013, + 21.54489, + 21.41546, + 21.50626, + 21.46931, + 21.45762, + 21.50328, + 21.40607, + 21.44674, + 21.47968, + 21.78925, + 21.75178, + 21.40919, + 21.4921, + 21.43849, + 22.33127, + 21.423, + 21.61097, + 23.08025, + 21.41651, + 21.45202, + 22.15586, + 21.46312, + 21.50652, + 21.54555, + 21.58263, + 21.45347, + 21.58255, + 21.42158, + 21.41072, + 21.42724, + 21.47008, + 21.43735, + 21.46616, + 21.56521, + 21.84152, + 21.42992, + 21.59851, + 21.82737, + 21.84893, + 21.42644, + 22.12304, + 23.14375, + 21.60519, + 21.45527, + 23.10497, + 21.4592, + 21.42501, + 21.89466, + 21.47457, + 21.50773, + 21.45204, + 21.5374, + 21.42299, + 21.41122, + 21.5085, + 21.44824, + 21.48767, + 21.41712, + 21.44367, + 21.51082, + 21.45433, + 21.4379, + 21.4432, + 21.93589, + 21.43155, + 22.06327, + 22.92958, + 21.41656, + 21.42872, + 22.94827, + 21.69178, + 21.46226, + 22.24065, + 21.79442, + 21.68378, + 21.63927, + 21.81347, + 21.66978, + 22.56515, + 21.61945, + 21.60239, + 21.91619, + 21.70785, + 21.57907, + 21.59388, + 21.58731, + 21.75914, + 21.59023, + 21.59088, + 21.70108, + 21.75731, + 21.63198, + 21.60036, + 21.59559, + 21.80771, + 21.60708, + 21.71292, + 21.82598, + 21.66252, + 21.57252, + 22.46304, + 21.95076, + 21.58654, + 23.18729, + 21.60266, + 21.57577, + 22.39223, + 21.58335, + 21.78007, + 21.74344, + 21.64603, + 21.57589, + 21.57082, + 21.76869, + 21.56773, + 21.82486, + 21.55803, + 21.61142, + 21.54349, + 21.5602, + 21.70089, + 21.58088, + 21.57338, + 21.55651, + 21.58702, + 21.58944, + 21.7049, + 21.86038, + 21.91736, + 21.73027, + 21.5464, + 22.589, + 21.56515, + 21.77919, + 22.85871, + 21.55888, + 21.71895, + 21.55665, + 21.58562, + 21.70024, + 22.13453, + 21.6026, + 21.5868, + 21.56531, + 21.57685, + 21.60075, + 21.58372, + 21.98746, + 21.5833, + 21.92795, + 21.74113, + 21.56639, + 22.51809, + 21.58413, + 21.75057, + 22.7856, + 21.55994, + 21.93107, + 22.63202, + 21.67662, + 21.60911, + 22.33818, + 21.55804, + 21.74773, + 22.33305, + 21.57394, + 21.70216, + 21.56695, + 21.58503, + 21.59897, + 21.601, + 21.61588, + 21.58364, + 21.93567, + 21.69898, + 21.58536, + 21.5903, + 21.93217, + 21.61726, + 21.62111, + 22.57579, + 21.62673, + 22.05375, + 22.47564, + 21.59261, + 21.60979, + 22.51018, + 21.77757, + 21.77647, + 148.99738, + 21.45087, + 21.45186, + 21.45362, + 21.41534, + 21.69003, + 21.41813, + 21.45619, + 21.60538, + 21.68758, + 21.41283, + 21.43567, + 21.41987, + 21.39449, + 21.58897, + 21.65373, + 21.40816, + 21.42618, + 22.23536, + 21.39327, + 21.49545, + 22.84484, + 21.41599, + 21.40939, + 22.64348, + 21.63325, + 21.46436, + 22.00187, + 21.58326, + 21.4316, + 21.43797, + 21.39769, + 21.92949, + 21.41308, + 21.42226, + 21.71479, + 21.43151, + 21.52, + 21.42525, + 21.59853, + 21.57578, + 21.43446, + 21.61681, + 21.43927, + 21.45015, + 21.44897, + 22.08352, + 21.55701, + 22.44639, + 21.42849, + 21.48295, + 22.51484, + 21.48636, + 21.72884, + 21.89283, + 21.42343, + 21.67812, + 21.64483, + 21.63708, + 21.41266, + 21.65123, + 21.44618, + 21.61533, + 21.86241, + 21.42007, + 21.44216, + 21.43338, + 21.39772, + 21.38327, + 21.50204, + 22.16446, + 21.40958, + 21.67229, + 22.39931, + 21.64397, + 21.39064, + 22.37575, + 21.48587, + 21.56677, + 22.40684, + 21.39897, + 21.66671, + 21.71957, + 21.41849, + 21.51428, + 21.45091, + 21.96433, + 21.42896, + 21.80562, + 21.43006, + 21.43935, + 21.45932, + 21.43191, + 21.60964, + 21.41457, + 22.24236, + 21.45485, + 21.41674, + 21.99351, + 21.41894, + 21.49025, + 22.22929, + 21.40828, + 21.47861, + 22.48122, + 21.52944, + 21.41681, + 22.04969, + 21.38011, + 21.57997, + 22.09864, + 21.43407, + 21.55106, + 22.19244, + 21.4537, + 21.57575, + 21.42574, + 21.75951, + 21.56903, + 21.74613, + 21.69635, + 21.5352, + 21.53788, + 21.55136, + 21.74194, + 21.66495, + 21.74068, + 21.53686, + 23.04973, + 21.71376, + 21.60627, + 22.65402, + 21.49118, + 21.56297, + 22.20888, + 21.47583, + 21.46699, + 21.49504, + 21.49498, + 26.34066, + 21.64714, + 22.01499, + 21.46068, + 21.70976, + 21.48282, + 21.67193, + 21.45333, + 21.48813, + 21.57205, + 21.74557, + 21.4878, + 21.72144, + 22.14816, + 22.06482, + 21.61135, + 22.40082, + 21.72118, + 21.53062, + 23.43495, + 21.49529, + 21.97108, + 22.04965, + 21.45288, + 21.48275, + 21.48481, + 22.44759, + 21.46132, + 21.80707, + 21.46533, + 21.44985, + 21.51299, + 21.6095, + 22.00613, + 21.44863, + 21.67141, + 21.51904, + 21.48117, + 21.54589, + 21.50514, + 21.81355, + 21.75925, + 21.60631, + 21.53182, + 22.58563, + 21.6423, + 21.5126, + 22.70399, + 21.5176, + 21.46538, + 22.3679, + 22.3979, + 21.50148, + 21.69178, + 22.1631, + 21.56535, + 21.47041, + 21.60833, + 21.98674, + 21.50263, + 21.47645, + 21.9439, + 21.49958, + 21.45705, + 21.68547, + 21.44871, + 21.75395, + 21.61946, + 22.05081, + 21.99069, + 21.47692, + 21.49688, + 22.04703, + 21.46369, + 21.48954, + 22.36658, + 22.19523, + 21.67834, + 22.40389, + 21.50949, + 21.62486, + 21.90676, + 21.48558, + 22.00095, + 21.7934, + 21.51948, + 21.46257, + 21.59903, + 21.47098, + 21.46803, + 21.97705, + 22.03763, + 21.45286, + 21.47488, + 144.60007, + 21.56963, + 21.5342, + 21.53681, + 21.56406, + 21.96356, + 21.54307, + 21.51891, + 21.52546, + 21.53364, + 21.50927, + 21.63958, + 21.58509, + 21.50613, + 21.49883, + 21.48584, + 21.5892, + 22.14145, + 21.48442, + 21.50465, + 23.71029, + 21.49158, + 21.48361, + 22.46544, + 21.4845, + 21.49207, + 21.75065, + 21.80818, + 21.59829, + 21.50598, + 21.70931, + 21.51391, + 21.60423, + 21.66108, + 21.62796, + 21.64064, + 21.49036, + 21.51825, + 22.12746, + 21.63203, + 21.60022, + 21.51107, + 22.32683, + 21.62702, + 21.68162, + 22.97898, + 21.54192, + 21.51468, + 22.38544, + 21.48763, + 21.51053, + 22.1996, + 21.59543, + 21.6692, + 21.49052, + 21.49631, + 21.47779, + 21.6864, + 21.58671, + 21.48205, + 21.62892, + 21.48467, + 21.48016, + 21.50617, + 21.7303, + 21.47185, + 21.50715, + 21.96781, + 21.49542, + 21.59906, + 22.6447, + 21.47831, + 21.66787, + 22.16209, + 21.63028, + 21.49444, + 22.3151, + 21.56746, + 21.50691, + 22.33439, + 21.66591, + 21.68378, + 21.60958, + 21.49365, + 21.56534, + 21.49094, + 21.9099, + 21.67978, + 21.49052, + 21.6604, + 21.5277, + 21.67594, + 21.5013, + 21.84143, + 21.55081, + 22.13372, + 21.55198, + 21.49173, + 22.34639, + 21.48882, + 21.70618, + 22.13215, + 21.66935, + 21.6016, + 22.1598, + 21.54518, + 21.51286, + 22.62902, + 21.50501, + 21.47023, + 22.13453, + 21.69733, + 21.594, + 21.50252, + 21.70252, + 21.54795, + 22.79333, + 21.59837, + 21.67672, + 23.2666, + 22.24294, + 21.75217, + 23.23928, + 21.74556, + 21.66679, + 22.93906, + 21.69355, + 21.98272, + 22.91322, + 21.99241, + 21.83147, + 22.5227, + 21.67384, + 21.62416, + 22.47656, + 21.67822, + 21.63718, + 21.64426, + 21.7326, + 21.76908, + 21.66174, + 21.79028, + 21.92622, + 21.64388, + 21.95417, + 21.67443, + 22.16162, + 21.66173, + 21.78984, + 22.66648, + 21.63336, + 22.12132, + 22.48049, + 21.71417, + 21.75484, + 22.52258, + 21.86187, + 21.68954, + 21.7817, + 21.78681, + 21.84849, + 21.62195, + 21.57876, + 21.88578, + 21.58939, + 21.61294, + 21.5879, + 21.81044, + 21.58273, + 21.81224, + 21.8226, + 21.68392, + 21.66322, + 21.59405, + 22.64067, + 21.68145, + 21.99891, + 22.12934, + 21.65859, + 21.76978, + 22.48611, + 21.64186, + 21.7664, + 22.76148, + 21.70806, + 21.66939, + 22.07162, + 21.72435, + 21.66379, + 21.67439, + 21.70436, + 21.64651, + 21.78717, + 22.14585, + 21.70251, + 21.63326, + 21.63268, + 21.6665, + 21.74414, + 21.7105, + 21.80335, + 21.86198, + 21.6546, + 21.62578, + 21.65526, + 22.23226, + 21.63566, + 22.01678, + 22.88632, + 21.64897, + 21.58507, + 22.62085, + 21.54297, + 21.57696, + 21.9491, + 21.56577, + 21.60951, + 21.62185, + 21.68652, + 21.79164, + 21.8505, + 21.5606, + 21.58963, + 21.66431, + 21.653, + 21.87288, + 22.06897, + 21.58569, + 21.57682, + 22.24193, + 21.64965, + 21.64543, + 22.77604, + 22.06601, + 21.51956, + 21.6099, + 21.52744, + 21.55185, + 21.5442, + 21.57829, + 21.90724, + 21.74616, + 21.53469, + 21.50715, + 21.71646, + 21.5009, + 21.55751, + 21.7219, + 21.48802, + 21.49234, + 21.75059, + 21.70982, + 21.49529, + 21.52759, + 21.54493, + 21.47167, + 22.24105, + 21.50892, + 21.47983, + 23.00498, + 21.82787, + 21.49047, + 22.297, + 21.47058, + 21.61332, + 21.45605, + 21.50505, + 21.67595, + 21.50675, + 21.75465, + 21.53391, + 21.71179, + 21.53099, + 21.50627, + 21.73101, + 21.47213, + 21.55113, + 21.50538, + 21.86218, + 21.47282, + 21.49278, + 22.29646, + 21.5022, + 21.51271, + 22.50128, + 21.75631, + 21.48092, + 22.77996, + 21.45921, + 21.51245, + 21.83765, + 21.49476, + 21.48503, + 21.53251, + 21.48063, + 21.47698, + 21.65149, + 21.47668, + 21.58117, + 21.49317, + 21.47561, + 21.47919, + 21.46605, + 21.66778, + 21.50228, + 21.76958, + 21.49623, + 21.72803, + 21.49773, + 21.73565, + 21.86163, + 21.51171, + 22.28914, + 21.5011, + 21.72346, + 21.50976, + 21.71791, + 21.90563, + 22.04996, + 21.4957, + 21.51403, + 21.47697, + 21.48074, + 21.62856, + 21.51559, + 21.81358, + 21.48551, + 21.69962, + 21.46548, + 21.545, + 21.54307, + 21.50453, + 21.61782, + 22.00138, + 22.11029, + 21.44758, + 22.03919, + 21.50162, + 21.48106, + 22.7933, + 21.50625, + 22.26604, + 22.44251, + 21.48965, + 21.58442, + 21.56795, + 21.50909, + 21.51488, + 21.72057, + 138.06879, + 21.54331, + 21.59938, + 21.5547, + 21.52649, + 21.74892, + 21.51106, + 21.58054, + 21.49594, + 21.5029, + 21.5216, + 21.48445, + 21.60748, + 21.50073, + 21.50445, + 21.52002, + 21.52854, + 21.75194, + 21.50781, + 21.50653, + 21.53886, + 21.6298, + 21.65182, + 21.53533, + 21.50952, + 21.50864, + 21.50241, + 21.61018, + 21.72447, + 21.50897, + 21.85884, + 21.5182, + 21.52365, + 22.42446, + 21.49897, + 22.17612, + 22.69951, + 21.67683, + 21.50679, + 21.79854, + 21.49739, + 21.51279, + 21.63616, + 21.48862, + 21.68302, + 21.50628, + 21.51613, + 21.57587, + 21.51114, + 21.54333, + 21.48607, + 21.67588, + 21.59783, + 21.48079, + 21.52143, + 21.71416, + 21.57711, + 21.47518, + 21.87652, + 21.65896, + 22.1036, + 22.50854, + 21.52687, + 21.53776, + 22.77522, + 21.48732, + 22.44962, + 22.01114, + 21.49217, + 21.72791, + 21.47052, + 21.51465, + 21.54685, + 21.66823, + 21.74246, + 21.49123, + 21.63798, + 21.51984, + 21.52589, + 21.9115, + 21.49533, + 22.02338, + 21.98291, + 21.50062, + 21.88354, + 22.5627, + 21.70596, + 21.61662, + 22.8774, + 21.49189, + 21.48763, + 22.67434, + 21.50889, + 21.64631, + 21.5299, + 21.64429, + 21.51915, + 21.61587, + 21.91783, + 21.52964, + 21.49414, + 21.67436, + 21.47715, + 21.49685, + 21.8267, + 21.49998, + 21.7164, + 22.01289, + 21.48126, + 21.51341, + 21.95688, + 21.53441, + 21.57615, + 22.40819, + 21.89717, + 21.50893, + 23.16485, + 21.69501, + 21.48232, + 21.41537, + 21.38971, + 21.38518, + 21.52319, + 21.59064, + 21.48896, + 21.38965, + 21.81098, + 21.41893, + 21.40796, + 21.94702, + 21.42209, + 21.45637, + 22.17652, + 21.56698, + 21.39951, + 22.85165, + 21.4428, + 21.41515, + 22.79811, + 21.6378, + 21.76793, + 22.69113, + 21.41487, + 21.4253, + 22.55215, + 21.40327, + 21.38558, + 21.39117, + 21.73987, + 21.39844, + 21.45017, + 21.53394, + 21.58961, + 21.35484, + 21.41395, + 21.43696, + 21.3739, + 21.36349, + 21.56645, + 22.28961, + 21.40661, + 21.36429, + 22.58153, + 21.36807, + 21.3614, + 22.44318, + 21.37492, + 21.50228, + 21.36326, + 21.35049, + 21.35776, + 21.34075, + 21.86766, + 21.40763, + 21.62003, + 21.39304, + 21.36419, + 21.41556, + 21.39511, + 21.73395, + 22.1611, + 21.85372, + 21.35844, + 22.49488, + 21.37574, + 21.34082, + 22.17738, + 21.46568, + 21.65194, + 21.91737, + 21.3546, + 21.35563, + 22.09611, + 21.57015, + 21.36296, + 21.65684, + 21.38988, + 21.89342, + 21.37261, + 21.38784, + 21.45537, + 21.40085, + 21.40078, + 21.36291, + 21.57958, + 21.55214, + 21.4854, + 21.6568, + 22.21302, + 21.43191, + 21.3881, + 22.48263, + 21.40361, + 21.36188, + 22.04883, + 21.36292, + 21.40056, + 22.04438, + 21.4135, + 21.36996, + 21.78072, + 21.70589, + 21.89188, + 21.38765, + 21.37718, + 21.38495, + 21.44516, + 21.38011, + 21.74122, + 21.65781, + 21.57116, + 21.36509, + 21.463, + 21.74009, + 21.34059, + 22.03207, + 21.56668, + 21.67216, + 21.52077, + 21.50537, + 21.50874, + 21.57077, + 21.98333, + 21.76201, + 21.5267, + 21.52984, + 21.87834, + 21.53708, + 21.54364, + 21.86814, + 21.56252, + 21.51746, + 21.74017, + 21.78962, + 21.52029, + 22.44086, + 21.51157, + 21.69183, + 22.34575, + 21.54969, + 21.48917, + 22.506, + 21.48875, + 21.56243, + 22.30615, + 21.77465, + 21.90519, + 21.73146, + 21.52625, + 21.54631, + 21.69025, + 21.5488, + 21.56662, + 21.88325, + 21.52429, + 21.50921, + 21.75135, + 21.56104, + 21.59957, + 21.79159, + 22.10465, + 21.54364, + 21.54337, + 22.85307, + 21.5478, + 21.5128, + 22.62147, + 21.53764, + 21.5388, + 23.90517, + 21.59492, + 21.90876, + 21.97001, + 21.79117, + 21.53523, + 22.19261, + 21.53661, + 21.7136, + 22.36243, + 21.52343, + 21.51417, + 21.55357, + 21.54353, + 21.52721, + 21.5431, + 21.71187, + 21.54911, + 21.56912, + 21.64602, + 21.57613, + 21.55509, + 22.00905, + 21.74969, + 21.52967, + 22.46437, + 21.52287, + 21.73389, + 22.11148, + 21.51169, + 21.55012, + 21.77282, + 21.51785, + 21.57759, + 22.36341, + 21.69684, + 21.53758, + 21.94524, + 21.53507, + 21.55589, + 21.88176, + 22.28848, + 21.52125, + 21.71257, + 21.57439, + 21.54072, + 21.99073, + 21.70533, + 21.58484, + 22.27408, + 21.54493, + 21.50619, + 21.849, + 21.52803, + 22.09462, + 22.22558, + 21.54106, + 21.81695, + 21.91092, + 21.5503, + 21.5956, + 21.78116, + 21.47605, + 21.65239, + 21.63147, + 21.55044, + 21.48025, + 21.47696, + 21.44423, + 21.46434, + 21.73214, + 21.66346, + 21.4976, + 21.46224, + 21.45179, + 21.51423, + 21.68325, + 21.47243, + 21.55736, + 21.44322, + 21.55522, + 21.50095, + 21.46918, + 21.80503, + 21.48958, + 21.51648, + 21.72704, + 21.42354, + 21.56669, + 21.51237, + 21.55172, + 21.43708, + 21.44087, + 21.65083, + 21.41974, + 21.4329, + 21.40905, + 21.59595, + 21.48127, + 21.4148, + 21.65783, + 21.41608, + 21.4282, + 21.54184, + 21.53227, + 21.44629, + 21.39053, + 22.54517, + 21.45127, + 21.4446, + 23.09391, + 21.57436, + 21.50443, + 21.81119, + 21.4344, + 21.45899, + 21.41381, + 21.61591, + 21.64419, + 21.42327, + 21.4053, + 21.4521, + 21.48417, + 21.43413, + 21.49747, + 21.61283, + 21.42577, + 21.44671, + 21.40714, + 21.46935, + 21.44229, + 21.43852, + 21.7933, + 21.43263, + 21.41851, + 21.97102, + 21.57809, + 21.43128, + 23.03788, + 21.43543, + 21.44999, + 22.51562, + 21.4061, + 21.77855, + 21.55755, + 21.41287, + 21.4319, + 21.88834, + 21.47312, + 22.12378, + 21.43149, + 21.43806, + 21.48273, + 21.44891, + 21.61332, + 21.46153, + 22.06796, + 21.42466, + 21.4657, + 22.29121, + 21.41982, + 21.46533, + 22.59104, + 21.62388, + 21.41068, + 21.92067, + 21.52139, + 21.46856, + 22.54698, + 21.43628, + 21.47125, + 21.76083, + 21.44383, + 21.59312, + 21.72431, + 21.45776, + 21.4234, + 21.45174, + 21.5624, + 22.3904, + 21.41565, + 21.39251, + 22.8605, + 22.05914, + 21.42754, + 23.04352, + 21.50099, + 21.51449, + 22.71483, + 21.41468, + 21.928, + 22.99737, + 21.42427, + 21.54309, + 22.51813, + 21.38641, + 21.51526, + 22.25174, + 21.39354, + 21.40944, + 21.66403, + 21.46622, + 21.39181, + 21.46091, + 21.95235, + 21.32834, + 21.36681, + 21.40896, + 21.37978, + 21.35006, + 21.3709, + 21.45846, + 21.39653, + 21.36419, + 21.54063, + 21.70045, + 21.37952, + 21.55238, + 22.72036, + 21.55484, + 21.35218, + 23.35183, + 21.53639, + 21.36385, + 21.49827, + 21.53132, + 21.35807, + 21.44452, + 21.73125, + 21.37169, + 21.42118, + 21.36254, + 21.54614, + 21.48963, + 21.36327, + 21.34729, + 21.39861, + 21.46427, + 21.33024, + 21.48868, + 21.50216, + 21.40308, + 21.55654, + 21.80919, + 21.49762, + 21.35313, + 21.36458, + 21.403, + 21.61012, + 21.40521, + 21.46027, + 21.36232, + 22.13297, + 21.52458, + 21.35949, + 21.675, + 21.43788, + 21.36499, + 21.37114, + 21.4986, + 21.3778, + 21.40485, + 21.64723, + 21.70011, + 21.48531, + 21.40276, + 21.37167, + 22.57043, + 21.59715, + 21.7825, + 23.36697, + 21.37002, + 21.36447, + 21.90403, + 21.63566, + 21.40192, + 21.47657, + 22.42685, + 21.47748, + 21.36917, + 21.62378, + 21.51085, + 21.42121, + 21.5183, + 21.39837, + 21.44077, + 21.38947, + 21.54976, + 21.73644, + 21.37281, + 21.36561, + 21.34189, + 21.76994, + 21.36634, + 21.40091, + 22.67479, + 21.4168, + 21.84795, + 21.40952, + 21.56366, + 21.51928, + 21.3866, + 21.39426, + 21.42005, + 21.79225, + 21.54788, + 21.39025, + 21.39838, + 21.66749, + 21.41071, + 21.36489, + 21.72653, + 21.37733, + 21.37247, + 21.46795, + 21.58604, + 21.49767, + 21.37405, + 21.52769, + 21.49965, + 21.40553, + 21.34805, + 21.32949, + 21.34316, + 21.32771, + 21.58136, + 21.61554, + 21.34298, + 21.29521, + 21.33676, + 21.40774, + 21.50525, + 21.42292, + 21.45998, + 21.35281, + 21.39203, + 21.50322, + 21.34026, + 21.78005, + 21.34328, + 21.3879, + 21.88154, + 21.46838, + 21.32902, + 22.55373, + 21.89904, + 21.30783, + 23.00034, + 21.45179, + 21.50976, + 22.82893, + 21.31915, + 21.82285, + 22.46257, + 21.39383, + 21.42254, + 21.79387, + 21.32108, + 21.44551, + 21.29847, + 21.47652, + 21.48548, + 21.29082, + 21.39804, + 21.34507, + 21.32278, + 21.3314, + 21.35476, + 21.73363, + 21.33135, + 21.39398, + 22.22256, + 21.44464, + 21.33411, + 22.65172, + 21.5205, + 21.8818, + 21.72054, + 21.36415, + 21.51948, + 21.31411, + 21.30877, + 21.33811, + 21.47744, + 21.32705, + 21.33504, + 21.54803, + 21.42194, + 21.45602, + 21.31921, + 21.29194, + 21.33044, + 21.38243, + 21.43781, + 21.29897, + 21.31547, + 22.03249, + 21.32423, + 21.29168, + 22.25559, + 21.45617, + 21.84155, + 22.94252, + 21.34163, + 21.34062, + 21.70744, + 42.37025, + 21.23082, + 21.8854, + 21.32675, + 21.3041, + 21.56448, + 21.49498, + 21.31515, + 21.31956, + 21.3252, + 21.59975, + 21.32988, + 21.33545, + 21.41687, + 21.64913, + 21.31671, + 21.31149, + 22.77766, + 21.29084, + 21.44871, + 22.93316, + 21.36997, + 21.31667, + 21.64206, + 21.57804, + 21.41466, + 21.82442, + 21.2932, + 21.30838, + 21.53247, + 21.67147, + 21.69564, + 21.71125, + 21.85515, + 22.49339, + 21.58926, + 21.51499, + 22.92025, + 21.49793, + 22.12625, + 22.39743, + 21.73316, + 21.48606, + 21.48727, + 21.49479, + 21.53268, + 21.50948, + 21.80451, + 21.52356, + 21.4528, + 21.47147, + 21.5196, + 21.66782, + 21.45963, + 21.45878, + 21.74641, + 21.50149, + 21.54905, + 21.47198, + 21.5413, + 21.464, + 21.46073, + 21.60428, + 21.45293, + 22.0467, + 22.6225, + 21.64651, + 21.47144, + 22.79697, + 21.60685, + 21.48925, + 23.28353, + 21.46856, + 21.52191, + 21.72009, + 21.50695, + 21.52918, + 21.57529, + 21.47933, + 21.50925, + 21.6805, + 21.52058, + 21.45812, + 21.61922, + 21.46568, + 21.4796, + 21.52748, + 21.68843, + 21.59617, + 21.68122, + 21.5904, + 21.50377, + 21.48779, + 21.70515, + 21.63938, + 21.47998, + 23.19242, + 21.49981, + 21.45223, + 23.12997, + 21.54318, + 21.49499, + 21.82821, + 21.48072, + 21.50372, + 21.49967, + 21.501, + 21.47864, + 21.47223, + 21.50483, + 21.49144, + 21.45406, + 21.57046, + 21.69256, + 21.47656, + 21.58561, + 21.49092, + 21.99757, + 21.51684, + 21.4778, + 22.20366, + 21.52083, + 21.4842, + 23.05357, + 21.73083, + 21.49291, + 22.78123, + 22.09088, + 21.49528, + 21.77238, + 21.4985, + 21.71434, + 21.50878, + 21.65577, + 21.69337, + 21.49433, + 21.59404, + 21.49991, + 21.52433, + 21.46667, + 21.49769, + 21.58025, + 21.77447, + 21.53856, + 21.69528, + 140.88046, + 21.50567, + 21.52767, + 21.54513, + 21.73718, + 21.70434, + 21.68278, + 21.75726, + 21.50469, + 21.75843, + 21.50908, + 21.67016, + 21.50596, + 21.50605, + 21.86186, + 22.0345, + 21.63119, + 21.50867, + 22.57252, + 21.51117, + 21.51261, + 22.71534, + 21.63224, + 21.49328, + 21.97537, + 21.77538, + 21.48623, + 21.56988, + 21.51687, + 21.4577, + 21.49192, + 21.66786, + 21.67671, + 21.49823, + 21.49661, + 21.86857, + 21.48432, + 21.50297, + 21.49771, + 21.81696, + 21.85007, + 21.54679, + 21.66561, + 21.96876, + 21.5589, + 21.65483, + 22.24072, + 21.57089, + 21.49358, + 22.62678, + 21.53527, + 21.49976, + 22.53759, + 21.48928, + 21.55455, + 21.87598, + 21.61069, + 21.74809, + 21.55618, + 21.6859, + 21.52414, + 21.59845, + 21.50869, + 21.48695, + 21.88519, + 21.59971, + 21.50933, + 21.60103, + 21.46312, + 21.52861, + 22.36, + 21.45089, + 21.72037, + 23.26463, + 21.47603, + 21.47435, + 22.70337, + 21.90371, + 21.48702, + 21.86955, + 21.52135, + 21.879, + 21.51374, + 21.49992, + 21.61309, + 21.49249, + 21.89408, + 21.49203, + 21.77342, + 21.49828, + 21.51173, + 21.57722, + 21.54473, + 21.67017, + 21.51232, + 22.31113, + 21.58524, + 21.49967, + 21.9219, + 21.49739, + 21.53436, + 22.39809, + 22.00699, + 21.53994, + 22.57789, + 21.73743, + 21.4719, + 21.9773, + 21.58742, + 22.00943, + 21.82804, + 21.50696, + 21.92103, + 21.65572, + 21.48257, + 21.5109, + 21.55255, + 21.94602, + 21.57032, + 21.8089, + 21.55935, + 21.57463, + 21.66593, + 21.63316, + 21.91181, + 21.64982, + 21.56321, + 21.51924, + 21.56886, + 21.5423, + 21.71634, + 22.45646, + 21.58003, + 21.64402 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml index 585d9bb2c..1aab9ae73 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml @@ -6,9 +6,7 @@ ENV_VARS: NVTE_BWD_LAYERNORM_SM_MARGIN: 16 NCCL_P2P_NET_CHUNKSIZE: 2097152 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - TEST_TYPE: "release" - MODEL_ARGS: # Distributed args --distributed-timeout-minutes: 60 @@ -18,7 +16,6 @@ MODEL_ARGS: --overlap-grad-reduce: true --overlap-param-gather: true --no-ckpt-fully-parallel-save: true - # Training args --use-mcore-models: true --sequence-parallel: true @@ -28,23 +25,20 @@ MODEL_ARGS: --global-batch-size: 1024 --train-samples: 24414063 --exit-duration-in-mins: 230 - # Transformer Engine args --transformer-impl: transformer_engine - # Data args --data-cache-path: ${DATA_CACHE_PATH} --tokenizer-type: GPTSentencePieceTokenizer - --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model + --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model --data-path: $DATA_BLEND --split: 99,1,0 --no-mmap-bin-files: true --num-workers: 6 - # Add network size args --untie-embeddings-and-output-weights: true - --no-position-embedding: true --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container --rotary-percent: 0.5 --normalization: RMSNorm --swiglu: true @@ -57,13 +51,11 @@ MODEL_ARGS: --seq-length: 4096 --max-position-embeddings: 4096 --make-vocab-size-divisible-by: 128 - # Add regularization args --attention-dropout: 0.0 --hidden-dropout: 0.0 --clip-grad: 1.0 --weight-decay: 0.1 - # Add learning rate args --lr-decay-samples: 1949218748 --lr-warmup-samples: 3906252 @@ -72,7 +64,6 @@ MODEL_ARGS: --lr-decay-style: cosine --adam-beta1: 0.9 --adam-beta2: 0.95 - # Add MoE args --expert-model-parallel-size: 4 --num-experts: 8 @@ -81,19 +72,15 @@ MODEL_ARGS: --moe-grouped-gemm: true --moe-aux-loss-coeff: 1e-2 --moe-token-dispatcher-type: alltoall - # Add validation args --eval-iters: 32 --eval-interval: 200 - # Add checkpointing args --load: ${OUTPUT_PATH}/checkpoints --save: ${OUTPUT_PATH}/checkpoints - --save-interval: 500 - + --save-interval: 5000 # Add initialization args --init-method-std: 0.010 - # Add logging args --log-timers-to-tensorboard: true --log-memory-to-tensorboard: true @@ -105,6 +92,5 @@ MODEL_ARGS: --tensorboard-dir: ${OUTPUT_PATH}/tensorboard --wandb-project: megatron-core-release-runs --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args --bf16: true diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml index 22607416a..c7ca1b0eb 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml @@ -6,9 +6,7 @@ ENV_VARS: NVTE_BWD_LAYERNORM_SM_MARGIN: 16 NCCL_P2P_NET_CHUNKSIZE: 2097152 NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 - TEST_TYPE: "release" - MODEL_ARGS: # Distributed args --distributed-timeout-minutes: 60 @@ -18,7 +16,6 @@ MODEL_ARGS: --overlap-grad-reduce: true --overlap-param-gather: true --no-ckpt-fully-parallel-save: true - # Training args --use-mcore-models: true --sequence-parallel: true @@ -28,23 +25,20 @@ MODEL_ARGS: --global-batch-size: 1024 --train-samples: 6103515 --exit-duration-in-mins: 230 - # Transformer Engine args --transformer-impl: transformer_engine - # Data args --data-cache-path: ${DATA_CACHE_PATH} --tokenizer-type: GPTSentencePieceTokenizer - --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model + --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model --data-path: $DATA_BLEND --split: 99,1,0 --no-mmap-bin-files: true --num-workers: 6 - # Add network size args --untie-embeddings-and-output-weights: true - --no-position-embedding: true --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container --rotary-percent: 0.5 --normalization: RMSNorm --swiglu: true @@ -57,13 +51,11 @@ MODEL_ARGS: --seq-length: 4096 --max-position-embeddings: 4096 --make-vocab-size-divisible-by: 128 - # Add regularization args --attention-dropout: 0.0 --hidden-dropout: 0.0 --clip-grad: 1.0 --weight-decay: 0.1 - # Add learning rate args --lr-decay-samples: 1949218748 --lr-warmup-samples: 3906252 @@ -72,7 +64,6 @@ MODEL_ARGS: --lr-decay-style: cosine --adam-beta1: 0.9 --adam-beta2: 0.95 - # Add MoE args --expert-model-parallel-size: 4 --num-experts: 8 @@ -81,19 +72,15 @@ MODEL_ARGS: --moe-grouped-gemm: true --moe-aux-loss-coeff: 1e-2 --moe-token-dispatcher-type: alltoall - # Add validation args --eval-iters: 32 --eval-interval: 200 - # Add checkpointing args --load: ${OUTPUT_PATH}/checkpoints --save: ${OUTPUT_PATH}/checkpoints --save-interval: 500 - # Add initialization args --init-method-std: 0.010 - # Add logging args --log-timers-to-tensorboard: true --log-memory-to-tensorboard: true @@ -105,6 +92,5 @@ MODEL_ARGS: --tensorboard-dir: ${OUTPUT_PATH}/tensorboard --wandb-project: megatron-core-release-runs --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args --bf16: true diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.9.0.json new file mode 100644 index 000000000..3b0155ac3 --- /dev/null +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.9.0.json @@ -0,0 +1,275 @@ +{ + "mem-allocated-bytes": { + "start_step": 0, + "end_step": 420, + "step_interval": 5, + "values": [ + 20705730560.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705730560.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705730560.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0, + 20705732608.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 420, + "step_interval": 5, + "values": [ + 174.32498, + 5.03933, + 5.07613, + 7.42013, + 169.24701, + 3.36681, + 3.34591, + 3.34951, + 3.52622, + 3.5358, + 3.48786, + 3.36115, + 3.35303, + 3.33904, + 3.3418, + 3.45107, + 3.34203, + 3.51434, + 3.40521, + 3.31669, + 3.29789, + 3.31313, + 3.29411, + 3.29085, + 3.27948, + 3.2839, + 3.33829, + 3.2764, + 3.27646, + 3.28108, + 3.26077, + 3.26767, + 3.25715, + 3.26524, + 3.26767, + 3.26115, + 3.26032, + 3.25141, + 3.27231, + 3.24855, + 3.25906, + 3.38416, + 3.26765, + 3.26154, + 169.37907, + 3.29826, + 3.29074, + 3.32167, + 3.54332, + 3.56011, + 3.41217, + 3.29645, + 3.30239, + 3.28493, + 3.28615, + 3.38222, + 3.27917, + 3.42778, + 3.35594, + 3.27354, + 3.23432, + 3.24867, + 3.24654, + 3.23251, + 3.22087, + 3.21832, + 3.27523, + 3.21564, + 3.21386, + 3.21731, + 3.21401, + 3.21026, + 3.20818, + 3.20512, + 3.20698, + 3.21101, + 3.19753, + 3.20163, + 3.22271, + 3.18466, + 3.19733, + 3.32646, + 3.19771, + 3.19899 + ] + }, + "throughput": { + "start_step": 0, + "end_step": 420, + "step_interval": 5, + "values": [ + 7.79399, + 269.61679, + 267.66226, + 183.10829, + 8.02784, + 403.55313, + 406.07434, + 405.63708, + 385.30963, + 384.26593, + 389.54803, + 404.2323, + 405.21173, + 406.90967, + 406.57309, + 393.69977, + 406.54602, + 386.612, + 399.0025, + 409.65109, + 411.98703, + 410.09161, + 412.46014, + 412.86859, + 414.30011, + 413.74167, + 407.00095, + 414.68881, + 414.68198, + 414.09723, + 416.67682, + 415.79745, + 417.14041, + 416.10687, + 415.79706, + 416.6282, + 416.73474, + 417.87595, + 415.20795, + 418.24426, + 416.89496, + 401.48453, + 415.79965, + 416.57834, + 8.02158, + 411.94022, + 412.88141, + 409.03793, + 383.4502, + 381.64218, + 398.18808, + 412.16641, + 411.42493, + 413.61191, + 413.45926, + 401.71454, + 414.33859, + 396.37567, + 404.85992, + 415.05142, + 420.0842, + 418.22919, + 418.50348, + 420.31937, + 421.83838, + 422.17279, + 414.83759, + 422.52484, + 422.75912, + 422.30557, + 422.73874, + 423.2323, + 423.50696, + 423.91129, + 423.66608, + 423.13437, + 424.918, + 424.37387, + 421.59784, + 426.63443, + 424.94376, + 408.44785, + 424.89417, + 424.72318 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/model_config.yaml index 39421a887..27e09ba59 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/model_config.yaml @@ -4,9 +4,7 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 PYTORCH_CUDA_ALLOC_CONF: expandable_segments:True NCCL_NVLS_ENABLE: 0 - TEST_TYPE: "release" - MODEL_ARGS: # Distributed args --distributed-timeout-minutes: 60 @@ -16,7 +14,6 @@ MODEL_ARGS: --use-distributed-optimizer: true --overlap-grad-reduce: true --overlap-param-gather: true - # Training args --use-mcore-models: true --sequence-parallel: true @@ -26,10 +23,8 @@ MODEL_ARGS: --global-batch-size: 256 --train-samples: 51200 --exit-duration-in-mins: 230 - # Transformer Engine args --transformer-impl: transformer_engine - # Data args --data-cache-path: ${DATA_CACHE_PATH} --tokenizer-type: Llama2Tokenizer @@ -38,11 +33,10 @@ MODEL_ARGS: --split: 99,1,0 --no-mmap-bin-files: true --num-workers: 6 - # Add network size args --untie-embeddings-and-output-weights: true - --no-position-embedding: true --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container --rotary-percent: 1.0 --normalization: RMSNorm --swiglu: true @@ -55,13 +49,11 @@ MODEL_ARGS: --seq-length: 4096 --max-position-embeddings: 4096 --make-vocab-size-divisible-by: 128 - # Add regularization args --attention-dropout: 0.0 --hidden-dropout: 0.0 --clip-grad: 1.0 --weight-decay: 0.1 - # Add learning rate args --lr-decay-samples: 255126953 --lr-warmup-samples: 162761 @@ -70,7 +62,6 @@ MODEL_ARGS: --lr-decay-style: cosine --adam-beta1: 0.9 --adam-beta2: 0.95 - # Add MoE args --expert-model-parallel-size: 8 --num-experts: 8 @@ -79,11 +70,9 @@ MODEL_ARGS: --moe-grouped-gemm: true --moe-aux-loss-coeff: 1e-2 --moe-token-dispatcher-type: alltoall - # Add validation args --eval-iters: 32 --eval-interval: 200 - # Add checkpointing args --finetune: true --auto-detect-ckpt-format: true @@ -91,10 +80,8 @@ MODEL_ARGS: --save: ${OUTPUT_PATH}/checkpoints --no-ckpt-fully-parallel-save: true --save-interval: 500 - # Add initialization args --init-method-std: 0.008 - # Add logging args --log-timers-to-tensorboard: true --log-memory-to-tensorboard: true @@ -106,6 +93,5 @@ MODEL_ARGS: --tensorboard-dir: ${OUTPUT_PATH}/tensorboard --wandb-project: megatron-core-release-runs --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args --bf16: true diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..fdcf15222 --- /dev/null +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.1349, + 9.13328, + 9.129, + 9.11325, + 9.05402, + 9.0423, + 8.98255, + 8.93259, + 8.88939, + 8.78786 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 3477378.0, + 3584431.0, + 3475109.0, + 3382848.0, + 3699812.0, + 3478561.0, + 3397873.0, + 3453618.0, + 3424934.0, + 3585113.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.79473, + 0.31292, + 0.31229, + 0.31273, + 0.31218, + 0.31206, + 0.31234, + 0.3114, + 0.31226, + 0.31109 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml index 6da0c3a85..b3b81d503 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 624 diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..74173ee84 --- /dev/null +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.16172, + 9.16209, + 9.15685, + 9.1402, + 9.09395, + 9.07144, + 9.01399, + 8.96508, + 8.91879, + 8.8258 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 3557267.0, + 3663904.0, + 3554934.0, + 3462955.0, + 3780144.0, + 3559102.0, + 3477361.0, + 3533886.0, + 3504942.0, + 3665022.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 19.12182, + 0.63754, + 0.63824, + 0.6364, + 0.62383, + 0.62352, + 0.62268, + 0.62428, + 0.63616, + 0.6281 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/model_config.yaml index 816aa8bf1..cdfdac5ff 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 624 diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_dev.json new file mode 100644 index 000000000..a7ef0e1fa --- /dev/null +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 9.19864, + 9.20111, + 9.19601, + 9.17296, + 9.11705, + 9.10224, + 9.04016, + 8.98428, + 8.94016, + 8.8386 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 3717664.0, + 3824288.0, + 3714705.0, + 3622894.0, + 3939791.0, + 3718740.0, + 3637227.0, + 3694225.0, + 3665435.0, + 3825408.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 12.72076, + 0.81802, + 0.8164, + 0.81573, + 0.81376, + 0.81495, + 0.81587, + 0.8178, + 0.82291, + 0.82279 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values.json rename to tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/model_config.yaml index 180e6beed..22f816cd8 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/model_config.yaml @@ -4,7 +4,6 @@ ENV_VARS: NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 GPUS_PER_NODE: 7 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 624 diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/model_config.yaml index 1fade8fd4..4a829aca1 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/model_config.yaml @@ -4,7 +4,6 @@ ENV_VARS: NCCL_ALGO: Tree CUBLAS_WORKSPACE_CONFIG: :4096:8 GPUS_PER_NODE: 7 - N_REPEATS: 5 MODEL_ARGS: --num-layers: 12 --hidden-size: 624 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values.json deleted file mode 100644 index bcff77766..000000000 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [19.39068, 0.66038, 0.65673, 0.66493, 0.65894, 0.6473, 0.65746, 0.64942, 0.66259, 0.65247, 0.65165, 0.64944, 0.81313, 0.65069, 0.64982, 0.65247, 0.65149, 0.65284, 0.64913, 0.6496]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [3.63253, 0.27412, 0.26777, 0.27338, 0.26922, 0.26445, 0.27043, 0.26308, 0.27178, 0.26246, 0.26565, 0.26691, 0.42095, 0.26741, 0.26653, 0.26546, 0.26547, 0.26403, 0.26266, 0.26606]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.0264, 0.24005, 0.23751, 0.24162, 0.24102, 0.23888, 0.24027, 0.23829, 0.24182, 0.24308, 0.24109, 0.23964, 0.23841, 0.24005, 0.23898, 0.23896, 0.24052, 0.23894, 0.24242, 0.23863]}, "forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [8.32911, 0.07441, 0.07755, 0.07578, 0.07557, 0.07223, 0.0737, 0.07404, 0.07108, 0.07174, 0.07137, 0.07162, 0.07437, 0.07185, 0.07129, 0.07247, 0.0719, 0.07573, 0.07292, 0.07122]}, "forward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.47287, 0.00053, 0.00063, 0.00048, 0.00045, 0.00047, 0.00046, 0.00045, 0.00046, 0.00063, 0.00044, 0.00046, 0.00047, 0.00045, 0.00056, 0.00046, 0.00045, 0.00046, 0.00045, 0.00044]}, "backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.1444, 0.13179, 0.12767, 0.13592, 0.1279, 0.12912, 0.13033, 0.1328, 0.13106, 0.13249, 0.12957, 0.12877, 0.13334, 0.12829, 0.12815, 0.13128, 0.12985, 0.13117, 0.12901, 0.1277]}, "backward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00065, 0.00056, 0.00066, 0.00067, 0.0006, 0.00059, 0.00064, 0.00067, 0.00068, 0.0006, 0.00056, 0.00058, 0.00059, 0.00056, 0.00064, 0.00058, 0.00049, 0.00079, 0.00081, 0.0006]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [12.49425, 0.23291, 0.228, 0.22475, 0.22786, 0.22525, 0.22534, 0.22597, 0.23004, 0.22656, 0.22342, 0.22577, 0.38374, 0.22857, 0.22673, 0.22371, 0.22908, 0.23017, 0.23145, 0.23191]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5.02478, 0.00608, 0.00441, 0.00414, 0.0093, 0.00347, 0.00363, 0.00527, 0.0093, 0.00705, 0.00369, 0.00633, 0.00834, 0.00352, 0.0034, 0.00565, 0.00346, 0.00354, 0.00341, 0.0035]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5e-05, 2e-05, 2e-05, 3e-05, 3e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.47745, 0.00052, 0.00064, 0.00053, 0.00052, 0.0006, 0.00052, 0.00062, 0.00052, 0.00056, 0.00065, 0.00056, 0.00054, 0.00053, 0.00058, 0.00052, 0.00052, 0.00052, 0.00055, 0.00053]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.43086, 0.00036, 0.00041, 0.00037, 0.00032, 0.00037, 0.00048, 0.00044, 0.00043, 0.00045, 0.00034, 0.00044, 0.00037, 0.00043, 0.00044, 0.00032, 0.00032, 0.00045, 0.00045, 0.00045]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00053, 0.00034, 0.00032, 0.00033, 0.00034, 0.00031, 0.00033, 0.00035, 0.00032, 0.00033, 0.00036, 0.00035, 0.00033, 0.00033, 0.00034, 0.00035, 0.00033, 0.00034, 0.00032, 0.00035]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.26638, 0.00127, 0.00123, 0.00144, 0.00125, 0.00123, 0.00128, 0.00162, 0.00128, 0.00131, 0.00138, 0.00133, 0.00142, 0.0013, 0.00136, 0.00137, 0.00133, 0.00135, 0.00129, 0.00136]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01282, 0.00738, 0.00728, 0.00736, 0.00738, 0.00733, 0.00738, 0.00735, 0.00731, 0.00727, 0.00897, 0.00755, 0.0073, 0.00721, 0.00734, 0.00746, 0.00736, 0.00734, 0.00737, 0.00726]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00984, 0.00108, 0.00105, 0.00108, 0.00105, 0.00105, 0.00107, 0.00104, 0.00105, 0.00106, 0.00106, 0.00105, 0.0012, 0.00106, 0.00105, 0.00105, 0.00105, 0.00106, 0.00104, 0.00106]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0011, 0.00101, 0.00102, 0.00102, 0.00101, 0.00102, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.00101, 0.0015, 0.00102, 0.00101, 0.00101, 0.00102, 0.00268, 0.00101, 0.00101]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.29197, 0.01172, 0.01152, 0.01191, 0.01165, 0.01156, 0.0117, 0.01199, 0.01159, 0.01161, 0.0134, 0.01194, 0.01269, 0.01155, 0.01172, 0.01186, 0.01173, 0.01343, 0.01172, 0.01165]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41489, 9.20451, 8.62156, 8.34435, 8.08472, 7.96931, 7.68116, 7.39495, 7.26108, 7.19145, 7.31028, 7.16653, 7.05979, 6.99436, 6.85568, 6.93225, 6.95525, 7.02522, 6.66561, 6.93924]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41489, 9.20451, 8.62156, 8.34435, 8.08472, 7.96931, 7.68116, 7.39495, 7.26108, 7.19145, 7.31028, 7.16653, 7.05979, 6.99436, 6.85568, 6.93225, 6.95525, 7.02522, 6.66561, 6.93924]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51239, 2.98952, 3.27663, 2.61225, 2.39588, 1.99758, 1.81287, 1.93167, 1.62175, 1.51416, 1.16291, 1.32388, 1.20328, 1.10814, 1.5007, 2.15295, 1.65903, 1.42013, 2.08526, 1.2754]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51239, 2.98952, 3.27663, 2.61225, 2.39588, 1.99758, 1.81287, 1.93167, 1.62175, 1.51416, 1.16291, 1.32388, 1.20328, 1.10814, 1.5007, 2.15295, 1.65903, 1.42013, 2.08526, 1.2754]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115745.0, 111070.0, 117081.0, 112381.0, 118700.0, 116957.0, 111399.0, 114013.0, 118460.0, 116959.0, 111499.0, 115613.0, 108489.0, 119947.0, 115772.0, 116922.0, 119841.0, 120380.0, 121396.0, 118455.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115745.0, 111070.0, 117081.0, 112381.0, 118700.0, 116957.0, 111399.0, 114013.0, 118460.0, 116959.0, 111499.0, 115613.0, 108489.0, 119947.0, 115772.0, 116922.0, 119841.0, 120380.0, 121396.0, 118455.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48447, 309.52603, 309.57944, 309.64523, 309.72018, 309.80231, 309.8884, 309.97391, 310.05591, 310.13483, 310.20755, 310.27094, 310.32535, 310.37161, 310.40887, 310.43597, 310.45648, 310.47238, 310.48444]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48447, 309.52603, 309.57944, 309.64523, 309.72018, 309.80231, 309.8884, 309.97391, 310.05591, 310.13483, 310.20755, 310.27094, 310.32535, 310.37161, 310.40887, 310.43597, 310.45648, 310.47238, 310.48444]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.7057, 0.68569, 0.68236, 0.69077, 0.68415, 0.67238, 0.68288, 0.67481, 0.6874, 0.67748, 0.6785, 0.67478, 0.83941, 0.6755, 0.67503, 0.67787, 0.67668, 0.67904, 0.67443, 0.67541]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.86582]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.86582]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [958.93542]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [958.93542]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..57cec7359 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [15.71288, 0.61814, 0.60061, 0.609, 0.60606, 0.59974, 0.60053, 0.59718, 0.59636, 0.5993, 0.59616, 0.5993, 0.60208, 0.59842, 0.59448, 0.59772, 0.59415, 0.59624, 0.59651, 0.5939]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [3.12459, 0.22962, 0.23245, 0.23195, 0.2326, 0.23265, 0.23278, 0.23264, 0.23178, 0.23401, 0.23274, 0.23172, 0.23112, 0.23126, 0.23154, 0.23126, 0.23103, 0.23016, 0.23056, 0.2307]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.75709, 0.24327, 0.23169, 0.23456, 0.23046, 0.23375, 0.23087, 0.2308, 0.23214, 0.23045, 0.23106, 0.23154, 0.23148, 0.2296, 0.23124, 0.23083, 0.23167, 0.23065, 0.23137, 0.23138]}, "forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5.98096, 0.06178, 0.06132, 0.06307, 0.06477, 0.06243, 0.06383, 0.06234, 0.06107, 0.06323, 0.06113, 0.06283, 0.06447, 0.06275, 0.06124, 0.06359, 0.06095, 0.06391, 0.06239, 0.0601]}, "forward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.46683, 0.00046, 0.00053, 0.00048, 0.00057, 0.00042, 0.00051, 0.00053, 0.00042, 0.00054, 0.00044, 0.00051, 0.00053, 0.00042, 0.00076, 0.00043, 0.00042, 0.00051, 0.00053, 0.00051]}, "backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.12574, 0.1199, 0.11997, 0.12137, 0.12141, 0.12166, 0.12187, 0.12333, 0.12271, 0.12397, 0.12208, 0.12564, 0.12261, 0.12247, 0.12167, 0.1226, 0.12277, 0.12102, 0.12155, 0.12196]}, "backward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00058, 0.00051, 0.00055, 0.00049, 0.00052, 0.0005, 0.00055, 0.00054, 0.00056, 0.0005, 0.00049, 0.00056, 0.0005, 0.00055, 0.00056, 0.00056, 0.00057, 0.00055, 0.00055, 0.00055]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.64124, 0.21304, 0.19661, 0.2004, 0.20279, 0.21188, 0.21084, 0.20759, 0.20948, 0.20864, 0.20899, 0.21203, 0.20325, 0.1982, 0.20653, 0.21049, 0.2105, 0.20347, 0.20699, 0.20667]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4.27348, 0.0208, 0.00376, 0.01105, 0.00428, 0.00581, 0.00423, 0.00361, 0.00435, 0.00393, 0.00433, 0.00662, 0.00407, 0.00384, 0.00455, 0.00466, 0.00417, 0.00513, 0.00494, 0.00456]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6e-05, 3e-05, 3e-05, 3e-05, 2e-05, 3e-05, 2e-05, 3e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 2e-05, 3e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.36384, 0.00053, 0.00053, 0.00052, 0.00053, 0.00053, 0.00053, 0.00052, 0.00052, 0.00052, 0.00054, 0.00054, 0.00052, 0.00053, 0.00052, 0.00053, 0.00052, 0.00051, 0.00053, 0.00051]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.35375, 0.00038, 0.00043, 0.00041, 0.00041, 0.0004, 0.00043, 0.00038, 0.00038, 0.00041, 0.00038, 0.00043, 0.00032, 0.00033, 0.00033, 0.00037, 0.00038, 0.00036, 0.00037, 0.00037]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0004, 0.00033, 0.00032, 0.00035, 0.00033, 0.00031, 0.00031, 0.00032, 0.00033, 0.00032, 0.00033, 0.00032, 0.00032, 0.00031, 0.00031, 0.00032, 0.0003, 0.0003, 0.0003, 0.0003]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.70516, 0.00125, 0.00124, 0.00125, 0.00126, 0.00121, 0.00122, 0.00122, 0.00123, 0.00122, 0.00126, 0.00125, 0.00124, 0.00119, 0.00128, 0.0012, 0.00121, 0.00122, 0.00125, 0.00124]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01732, 0.00791, 0.00778, 0.00782, 0.00776, 0.00784, 0.00778, 0.00777, 0.00777, 0.00789, 0.00777, 0.00776, 0.00774, 0.00776, 0.00787, 0.00778, 0.00785, 0.00775, 0.00775, 0.00781]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01232, 0.00107, 0.00103, 0.00105, 0.00103, 0.00104, 0.00103, 0.00105, 0.00103, 0.00104, 0.00103, 0.00104, 0.00103, 0.00103, 0.00104, 0.00104, 0.00103, 0.00104, 0.00103, 0.00104]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00143, 0.00103, 0.00099, 0.00099, 0.00099, 0.00099, 0.00098, 0.00099, 0.00099, 0.00099, 0.00098, 0.00098, 0.00099, 0.00099, 0.00104, 0.001, 0.00099, 0.00098, 0.00098, 0.00099]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.73804, 0.01225, 0.01201, 0.01214, 0.01201, 0.01205, 0.01198, 0.012, 0.012, 0.01212, 0.01203, 0.01202, 0.01198, 0.01192, 0.01221, 0.01199, 0.01202, 0.01192, 0.01194, 0.01204]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41485, 9.20437, 8.6213, 8.34434, 8.0846, 7.96908, 7.68085, 7.3943, 7.2612, 7.19123, 7.30996, 7.16658, 7.0596, 6.99443, 6.85568, 6.93181, 6.95482, 7.02465, 6.66523, 6.93912]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41485, 9.20437, 8.6213, 8.34434, 8.0846, 7.96908, 7.68085, 7.3943, 7.2612, 7.19123, 7.30996, 7.16658, 7.0596, 6.99443, 6.85568, 6.93181, 6.95482, 7.02465, 6.66523, 6.93912]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51805, 2.98993, 3.27236, 2.61222, 2.39606, 1.99737, 1.81218, 1.91449, 1.62396, 1.50901, 1.16214, 1.3245, 1.20365, 1.10605, 1.5131, 2.1239, 1.65989, 1.41738, 2.05605, 1.27075]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51805, 2.98993, 3.27236, 2.61222, 2.39606, 1.99737, 1.81218, 1.91449, 1.62396, 1.50901, 1.16214, 1.3245, 1.20365, 1.10605, 1.5131, 2.1239, 1.65989, 1.41738, 2.05605, 1.27075]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115733.0, 111077.0, 117083.0, 112366.0, 118719.0, 116953.0, 111389.0, 114012.0, 118474.0, 116947.0, 111514.0, 115608.0, 108500.0, 119951.0, 115760.0, 116926.0, 119844.0, 120384.0, 121401.0, 118454.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115733.0, 111077.0, 117083.0, 112366.0, 118719.0, 116953.0, 111389.0, 114012.0, 118474.0, 116947.0, 111514.0, 115608.0, 108500.0, 119951.0, 115760.0, 116926.0, 119844.0, 120384.0, 121401.0, 118454.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48447, 309.52603, 309.57944, 309.64526, 309.72028, 309.80237, 309.88846, 309.97403, 310.056, 310.13495, 310.2077, 310.27109, 310.32544, 310.37173, 310.40884, 310.43594, 310.45645, 310.47226, 310.48434]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48447, 309.52603, 309.57944, 309.64526, 309.72028, 309.80237, 309.88846, 309.97403, 310.056, 310.13495, 310.2077, 310.27109, 310.32544, 310.37173, 310.40884, 310.43594, 310.45645, 310.47226, 310.48434]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [16.47856, 0.644, 0.62616, 0.63468, 0.63159, 0.62541, 0.626, 0.62264, 0.62187, 0.62505, 0.62162, 0.62466, 0.62765, 0.62375, 0.62026, 0.62331, 0.61955, 0.62155, 0.62176, 0.61929]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.86562]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [6.86562]}, "lm loss validation ppl": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [958.74249]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [958.74249]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..dbe209536 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [19.90333, 0.58856, 0.59469, 0.58216, 0.59341, 0.57994, 0.58185, 0.5789, 0.57607, 0.58, 0.58007, 0.5753, 0.58464, 0.58037, 0.57413, 0.57523, 0.57405, 0.58554, 0.60294, 0.58005]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4.42353, 0.2341, 0.23716, 0.23094, 0.23623, 0.22774, 0.22931, 0.22826, 0.22425, 0.22847, 0.22935, 0.22676, 0.23322, 0.22908, 0.22555, 0.22469, 0.22599, 0.22742, 0.25133, 0.2259]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.95079, 0.22368, 0.2273, 0.22252, 0.22476, 0.22289, 0.22216, 0.22126, 0.22084, 0.22183, 0.22121, 0.22178, 0.22286, 0.22446, 0.22459, 0.22527, 0.22402, 0.22983, 0.22118, 0.22371]}, "forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [8.01714, 0.06124, 0.06125, 0.0607, 0.06434, 0.06119, 0.06293, 0.06164, 0.06064, 0.06042, 0.06086, 0.06143, 0.06321, 0.06163, 0.05988, 0.0612, 0.05934, 0.06152, 0.06486, 0.05962]}, "forward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.40091, 0.00043, 0.00062, 0.00053, 0.00045, 0.00042, 0.00068, 0.00049, 0.00045, 0.00043, 0.00058, 0.00043, 0.00053, 0.00043, 0.00056, 0.00042, 0.00042, 0.00044, 0.00042, 0.00055]}, "backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.11724, 0.11466, 0.11811, 0.11163, 0.11217, 0.11093, 0.11231, 0.11875, 0.11788, 0.11954, 0.11946, 0.11548, 0.11898, 0.11974, 0.11993, 0.11865, 0.12113, 0.11927, 0.12228, 0.1208]}, "backward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00051, 0.00051, 0.0005, 0.00066, 0.00066, 0.00056, 0.00055, 0.00046, 0.00064, 0.00048, 0.00047, 0.00048, 0.00046, 0.00045, 0.00045, 0.00043, 0.00046, 0.00046, 0.00047, 0.00043]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [13.497, 0.20707, 0.2087, 0.20974, 0.2204, 0.21082, 0.21043, 0.20604, 0.20439, 0.20846, 0.20868, 0.20842, 0.2171, 0.21065, 0.20419, 0.20475, 0.2067, 0.21521, 0.22812, 0.2131]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4.98676, 0.02107, 0.02298, 0.01837, 0.01578, 0.01755, 0.01567, 0.01438, 0.01344, 0.01755, 0.01789, 0.01555, 0.01944, 0.01458, 0.01433, 0.01406, 0.01503, 0.01809, 0.03277, 0.01271]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 3e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.46106, 0.00051, 0.00051, 0.00052, 0.00051, 0.00052, 0.00051, 0.00051, 0.00051, 0.00062, 0.00051, 0.00053, 0.00051, 0.00051, 0.00052, 0.00051, 0.00051, 0.00059, 0.00051, 0.00063]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.40205, 0.00032, 0.00032, 0.00035, 0.00031, 0.00037, 0.00031, 0.0003, 0.00038, 0.00034, 0.00031, 0.00046, 0.00035, 0.00036, 0.00035, 0.00031, 0.00034, 0.00031, 0.00031, 0.0003]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00038, 0.00032, 0.00032, 0.00031, 0.00032, 0.0003, 0.00031, 0.00031, 0.00031, 0.00031, 0.00031, 0.00032, 0.00031, 0.00031, 0.00031, 0.00031, 0.00031, 0.00031, 0.0003, 0.00031]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.12765, 0.00122, 0.00122, 0.00122, 0.0012, 0.00121, 0.00121, 0.00121, 0.00123, 0.0012, 0.00121, 0.00137, 0.00125, 0.00125, 0.00126, 0.00124, 0.00127, 0.00121, 0.0012, 0.00122]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01111, 0.00722, 0.0072, 0.00709, 0.0071, 0.00708, 0.0071, 0.0071, 0.00715, 0.00709, 0.00708, 0.00888, 0.00709, 0.00704, 0.00711, 0.00709, 0.00705, 0.00716, 0.00716, 0.00707]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00991, 0.00103, 0.00104, 0.00103, 0.00103, 0.00103, 0.00101, 0.00102, 0.00103, 0.00102, 0.00103, 0.00105, 0.00103, 0.00103, 0.00102, 0.00102, 0.00103, 0.00103, 0.00102, 0.00102]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00112, 0.00098, 0.00098, 0.00098, 0.00098, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00098, 0.00098, 0.00097, 0.00097, 0.00098, 0.00097, 0.00097, 0.00098, 0.00097, 0.00097]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.15127, 0.01146, 0.01139, 0.01122, 0.01123, 0.01123, 0.01121, 0.01121, 0.01131, 0.01118, 0.0112, 0.01322, 0.01125, 0.01119, 0.01128, 0.01123, 0.01122, 0.01127, 0.01125, 0.01118]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41485, 9.20435, 8.6213, 8.34427, 8.08473, 7.96923, 7.68106, 7.39444, 7.26111, 7.19106, 7.31002, 7.16668, 7.05964, 6.99445, 6.85574, 6.93197, 6.95538, 7.0248, 6.66527, 6.93928]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41485, 9.20435, 8.6213, 8.34427, 8.08473, 7.96923, 7.68106, 7.39444, 7.26111, 7.19106, 7.31002, 7.16668, 7.05964, 6.99445, 6.85574, 6.93197, 6.95538, 7.0248, 6.66527, 6.93928]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51805, 2.9898, 3.27355, 2.61215, 2.39606, 1.99744, 1.81243, 1.91693, 1.62391, 1.50884, 1.1615, 1.33045, 1.20489, 1.10832, 1.51113, 2.13636, 1.66573, 1.41358, 2.06016, 1.27144]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51805, 2.9898, 3.27355, 2.61215, 2.39606, 1.99744, 1.81243, 1.91693, 1.62391, 1.50884, 1.1615, 1.33045, 1.20489, 1.10832, 1.51113, 2.13636, 1.66573, 1.41358, 2.06016, 1.27144]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115733.0, 111077.0, 117061.0, 112406.0, 118709.0, 116945.0, 111380.0, 114030.0, 118469.0, 116944.0, 111511.0, 115606.0, 108490.0, 119961.0, 115771.0, 116922.0, 119839.0, 120381.0, 121405.0, 118441.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115733.0, 111077.0, 117061.0, 112406.0, 118709.0, 116945.0, 111380.0, 114030.0, 118469.0, 116944.0, 111511.0, 115606.0, 108490.0, 119961.0, 115771.0, 116922.0, 119839.0, 120381.0, 121405.0, 118441.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48444, 309.52603, 309.57944, 309.64526, 309.72025, 309.80234, 309.88849, 309.97403, 310.056, 310.13495, 310.20767, 310.27103, 310.32535, 310.3717, 310.40875, 310.43588, 310.45633, 310.47214, 310.48419]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48444, 309.52603, 309.57944, 309.64526, 309.72025, 309.80234, 309.88849, 309.97403, 310.056, 310.13495, 310.20767, 310.27103, 310.32535, 310.3717, 310.40875, 310.43588, 310.45633, 310.47214, 310.48419]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.07582, 0.61292, 0.61886, 0.60601, 0.61744, 0.60406, 0.60575, 0.60271, 0.60001, 0.60403, 0.60393, 0.60127, 0.6086, 0.60424, 0.59816, 0.59917, 0.59804, 0.60976, 0.62704, 0.60404]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.86596]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.86596]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [959.06805]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [959.06805]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 076389c3d..e781e0980 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: ^NVLS CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --encoder-num-layers: 12 --decoder-num-layers: 12 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index b0d00b8f8..33daffa1e 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: ^NVLS CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --encoder-num-layers: 12 --decoder-num-layers: 12 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values.json deleted file mode 100644 index c59b98b90..000000000 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values.json +++ /dev/null @@ -1 +0,0 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [11.55278, 0.77358, 0.76856, 0.77172, 0.75887, 0.76061, 0.75836, 0.76125, 0.76192, 0.76187, 0.76171, 0.76045, 0.7599, 0.76535, 0.76121, 0.76796, 0.76998, 0.76511, 0.76167, 0.75816]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6.97639, 0.39525, 0.3898, 0.39437, 0.37749, 0.38195, 0.37908, 0.37821, 0.38433, 0.38023, 0.38359, 0.37973, 0.37768, 0.37754, 0.38336, 0.38173, 0.39026, 0.38845, 0.38337, 0.37691]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [3.32964, 0.37495, 0.37481, 0.37567, 0.37884, 0.37558, 0.37486, 0.37929, 0.37612, 0.37965, 0.37608, 0.37503, 0.37843, 0.38541, 0.37552, 0.38094, 0.37923, 0.37628, 0.37437, 0.37757]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.89543, 0.00188, 0.00211, 0.00164, 0.00165, 0.00162, 0.00162, 0.00162, 0.00184, 0.00165, 0.00164, 0.00208, 0.00162, 0.00167, 0.0016, 0.00168, 0.00165, 0.00163, 0.00164, 0.00161]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00146, 0.00105, 0.00105, 0.00102, 0.00107, 0.00107, 0.00107, 0.00109, 0.00105, 0.00106, 0.00107, 0.00106, 0.00106, 0.00106, 0.00108, 0.00108, 0.00107, 0.00104, 0.00103, 0.0011]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.50022, 0.00376, 0.00381, 0.00329, 0.00321, 0.00354, 0.00371, 0.00375, 0.00366, 0.00301, 0.00349, 0.00372, 0.00349, 0.00369, 0.00297, 0.00283, 0.00369, 0.00377, 0.00388, 0.00369]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.04986, 0.02302, 0.02299, 0.02588, 0.02338, 0.0231, 0.02293, 0.0231, 0.02309, 0.02329, 0.02328, 0.02332, 0.02304, 0.02327, 0.02287, 0.02321, 0.02315, 0.0234, 0.02312, 0.02327]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0158, 0.00219, 0.00221, 0.00411, 0.0022, 0.0022, 0.00216, 0.0022, 0.00217, 0.00218, 0.00218, 0.00225, 0.00233, 0.00219, 0.00223, 0.00222, 0.00212, 0.0022, 0.00222, 0.00225]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00301, 0.00302, 0.00302, 0.00339, 0.003, 0.00302, 0.00302, 0.00301, 0.00301, 0.00301, 0.003, 0.00301, 0.00302, 0.00304, 0.003, 0.00301, 0.00299, 0.00304, 0.00303, 0.00303]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.57167, 0.03386, 0.03382, 0.03847, 0.03353, 0.03358, 0.03363, 0.03394, 0.03377, 0.03326, 0.03368, 0.03412, 0.03363, 0.03407, 0.03281, 0.03316, 0.03373, 0.03419, 0.03396, 0.034]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32677, 9.4141, 8.86401, 8.56564, 8.28782, 8.1035, 7.83676, 7.53769, 7.39294, 7.29345, 7.37746, 7.22535, 7.11277, 7.06759, 6.91832, 6.96664, 6.97845, 7.04885, 6.7213, 6.98241]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32677, 9.4141, 8.86401, 8.56564, 8.28782, 8.1035, 7.83676, 7.53769, 7.39294, 7.29345, 7.37746, 7.22535, 7.11277, 7.06759, 6.91832, 6.96664, 6.97845, 7.04885, 6.7213, 6.98241]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26434, 2.17404, 2.50103, 2.08973, 1.92522, 1.69977, 1.63605, 1.57256, 1.48469, 1.29632, 1.00932, 1.0148, 0.95539, 1.04571, 0.94482, 0.77816, 1.07456, 1.17593, 1.12335, 0.8491]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26434, 2.17404, 2.50103, 2.08973, 1.92522, 1.69977, 1.63605, 1.57256, 1.48469, 1.29632, 1.00932, 1.0148, 0.95539, 1.04571, 0.94482, 0.77816, 1.07456, 1.17593, 1.12335, 0.8491]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43306.0, 40955.0, 43967.0, 41614.0, 44764.0, 43923.0, 41108.0, 42464.0, 44664.0, 43899.0, 41152.0, 43230.0, 39719.0, 45367.0, 43334.0, 43903.0, 45349.0, 45688.0, 46166.0, 44691.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43306.0, 40955.0, 43967.0, 41614.0, 44764.0, 43923.0, 41108.0, 42464.0, 44664.0, 43899.0, 41152.0, 43230.0, 39719.0, 45367.0, 43334.0, 43903.0, 45349.0, 45688.0, 46166.0, 44691.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05212, 284.1051, 284.15643, 284.20459, 284.25775, 284.30682, 284.34848, 284.38312, 284.41144, 284.43539, 284.45441, 284.46988, 284.48172, 284.49054]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05212, 284.1051, 284.15643, 284.20459, 284.25775, 284.30682, 284.34848, 284.38312, 284.41144, 284.43539, 284.45441, 284.46988, 284.48172, 284.49054]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [13.15856, 0.82951, 0.82427, 0.83168, 0.8147, 0.81581, 0.81386, 0.8171, 0.8176, 0.81664, 0.81719, 0.81685, 0.81547, 0.82136, 0.81551, 0.82315, 0.82591, 0.82132, 0.81777, 0.81414]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9202]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9202]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.5238]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.5238]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..494043e34 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.71086, 0.71893, 0.72885, 0.70321, 0.70401, 0.7141, 0.70976, 0.70408, 0.70335, 0.70493, 0.7093, 0.7085, 0.7048, 0.70419, 0.7078, 0.70467, 0.69381, 0.69597, 0.69193, 0.69684]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6.79062, 0.35414, 0.36513, 0.33889, 0.34029, 0.3472, 0.34538, 0.33905, 0.33883, 0.3403, 0.34588, 0.34318, 0.34002, 0.33934, 0.33993, 0.34056, 0.32859, 0.33199, 0.32739, 0.33349]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [3.26804, 0.36177, 0.36023, 0.3614, 0.36044, 0.3688, 0.36315, 0.36233, 0.36183, 0.36219, 0.36248, 0.36207, 0.36158, 0.36184, 0.36344, 0.36275, 0.36265, 0.36201, 0.36266, 0.36271]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [7e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.72582, 0.0016, 0.00158, 0.0016, 0.00159, 0.0016, 0.00159, 0.00159, 0.00161, 0.0016, 0.00159, 0.00161, 0.00158, 0.00159, 0.00163, 0.0016, 0.00159, 0.00159, 0.00158, 0.00162]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00128, 0.00108, 0.00105, 0.00111, 0.00111, 0.00109, 0.00108, 0.00108, 0.00108, 0.00103, 0.00112, 0.00109, 0.00108, 0.00108, 0.00108, 0.00105, 0.00107, 0.00108, 0.00104, 0.00102]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.69392, 0.0034, 0.00322, 0.00351, 0.00348, 0.00346, 0.00349, 0.00351, 0.00338, 0.0036, 0.0035, 0.00345, 0.0032, 0.00342, 0.00312, 0.0032, 0.00325, 0.00328, 0.00326, 0.00293]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.04331, 0.02443, 0.02426, 0.02439, 0.02443, 0.02433, 0.02433, 0.02454, 0.02465, 0.0246, 0.02426, 0.02413, 0.02402, 0.0243, 0.02477, 0.0241, 0.02419, 0.02427, 0.02391, 0.02396]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0211, 0.00227, 0.00227, 0.00224, 0.00225, 0.00228, 0.00227, 0.00225, 0.0022, 0.00228, 0.00222, 0.00225, 0.00231, 0.0022, 0.00226, 0.00228, 0.00215, 0.00214, 0.0022, 0.00214]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00418, 0.00293, 0.00293, 0.00293, 0.00363, 0.00311, 0.00295, 0.00294, 0.00294, 0.00292, 0.00294, 0.00293, 0.00294, 0.00293, 0.00293, 0.00294, 0.00288, 0.00287, 0.00286, 0.00288]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.7649, 0.03478, 0.03443, 0.03485, 0.03558, 0.03495, 0.03478, 0.03499, 0.03496, 0.0351, 0.03473, 0.03451, 0.03421, 0.03459, 0.03483, 0.03425, 0.03418, 0.03429, 0.03391, 0.03358]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32668, 9.41412, 8.86385, 8.56561, 8.2879, 8.10364, 7.83672, 7.53771, 7.3931, 7.29349, 7.3775, 7.22521, 7.11281, 7.06743, 6.91842, 6.96698, 6.97826, 7.04906, 6.72131, 6.98252]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32668, 9.41412, 8.86385, 8.56561, 8.2879, 8.10364, 7.83672, 7.53771, 7.3931, 7.29349, 7.3775, 7.22521, 7.11281, 7.06743, 6.91842, 6.96698, 6.97826, 7.04906, 6.72131, 6.98252]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26364, 2.17403, 2.49719, 2.08969, 1.92529, 1.69973, 1.63605, 1.57249, 1.48395, 1.29577, 1.00881, 1.01474, 0.95564, 1.04584, 0.94469, 0.77682, 1.06965, 1.16858, 1.12415, 0.84938]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26364, 2.17403, 2.49719, 2.08969, 1.92529, 1.69973, 1.63605, 1.57249, 1.48395, 1.29577, 1.00881, 1.01474, 0.95564, 1.04584, 0.94469, 0.77682, 1.06965, 1.16858, 1.12415, 0.84938]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43310.0, 40962.0, 43962.0, 41624.0, 44767.0, 43912.0, 41094.0, 42478.0, 44664.0, 43895.0, 41151.0, 43234.0, 39728.0, 45361.0, 43347.0, 43904.0, 45366.0, 45690.0, 46175.0, 44681.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43310.0, 40962.0, 43962.0, 41624.0, 44767.0, 43912.0, 41094.0, 42478.0, 44664.0, 43895.0, 41151.0, 43234.0, 39728.0, 45361.0, 43347.0, 43904.0, 45366.0, 45690.0, 46175.0, 44681.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05209, 284.1051, 284.15646, 284.20462, 284.25775, 284.30688, 284.34857, 284.38318, 284.4115, 284.43536, 284.4545, 284.46991, 284.48178, 284.49057]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05209, 284.1051, 284.15646, 284.20462, 284.25775, 284.30688, 284.34857, 284.38318, 284.4115, 284.43536, 284.4545, 284.46991, 284.48178, 284.49057]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [11.50028, 0.77522, 0.78519, 0.75964, 0.76022, 0.77024, 0.76566, 0.76033, 0.75984, 0.76147, 0.76589, 0.76431, 0.76018, 0.76013, 0.76364, 0.7591, 0.7484, 0.75044, 0.74626, 0.75089]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.92026]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.92026]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.58026]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.58026]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 000000000..9b48e0802 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.71001, 0.98167, 0.67602, 0.67957, 0.67383, 0.67833, 0.6786, 0.67439, 0.67925, 0.6775, 0.67433, 0.67851, 0.6788, 0.67556, 0.68114, 0.67962, 0.6773, 0.67444, 0.68438, 0.68066]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6.44785, 0.63132, 0.32811, 0.32906, 0.32792, 0.32848, 0.32661, 0.32879, 0.33029, 0.33137, 0.32765, 0.32823, 0.33021, 0.32849, 0.33404, 0.33227, 0.33082, 0.32824, 0.33316, 0.32945]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [3.10727, 0.34793, 0.34464, 0.34976, 0.34367, 0.34625, 0.34888, 0.34392, 0.34602, 0.34354, 0.34321, 0.34724, 0.34855, 0.34401, 0.34584, 0.34631, 0.34721, 0.34247, 0.34765, 0.34807]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [7e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 3e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.87223, 0.00177, 0.00184, 0.00158, 0.00162, 0.00156, 0.00156, 0.00155, 0.00156, 0.00155, 0.00156, 0.00157, 0.00156, 0.00154, 0.00179, 0.00155, 0.00155, 0.00155, 0.00181, 0.00156]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00108, 0.00104, 0.00095, 0.00093, 0.00095, 0.00095, 0.00096, 0.00094, 0.00096, 0.00095, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00095, 0.00093, 0.00093, 0.00093, 0.00092]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.44019, 0.00288, 0.00273, 0.0024, 0.00284, 0.00269, 0.00268, 0.0027, 0.00269, 0.00276, 0.00264, 0.0026, 0.00231, 0.00265, 0.00233, 0.00234, 0.00242, 0.00248, 0.00264, 0.00257]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.04271, 0.02276, 0.02251, 0.02261, 0.02452, 0.02248, 0.02262, 0.02283, 0.02299, 0.02287, 0.02278, 0.02297, 0.02272, 0.02268, 0.02282, 0.02275, 0.02281, 0.02271, 0.02275, 0.02318]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0133, 0.00197, 0.00183, 0.00183, 0.0037, 0.00184, 0.00184, 0.00184, 0.00186, 0.00184, 0.00183, 0.00185, 0.00184, 0.00188, 0.00183, 0.00183, 0.00183, 0.00184, 0.00185, 0.00184]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0028, 0.00282, 0.0028, 0.00275, 0.00296, 0.00276, 0.00275, 0.00276, 0.00276, 0.00277, 0.00275, 0.00276, 0.00274, 0.00275, 0.16325, 0.00275, 0.00274, 0.00276, 0.00275, 0.00275]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.50116, 0.03223, 0.03151, 0.03113, 0.03576, 0.03131, 0.03147, 0.03168, 0.03187, 0.03178, 0.03155, 0.03172, 0.03115, 0.0315, 0.19184, 0.03127, 0.03135, 0.03135, 0.03159, 0.03196]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32658, 9.41412, 8.86391, 8.56555, 8.28783, 8.10358, 7.83667, 7.53748, 7.39311, 7.29338, 7.37752, 7.22518, 7.1129, 7.06753, 6.91822, 6.96679, 6.97834, 7.04893, 6.72125, 6.98236]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32658, 9.41412, 8.86391, 8.56555, 8.28783, 8.10358, 7.83667, 7.53748, 7.39311, 7.29338, 7.37752, 7.22518, 7.1129, 7.06753, 6.91822, 6.96679, 6.97834, 7.04893, 6.72125, 6.98236]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26881, 2.17405, 2.50113, 2.08969, 1.9252, 1.69978, 1.63604, 1.57247, 1.48489, 1.29657, 1.0094, 1.01529, 0.95501, 1.04473, 0.94493, 0.77746, 1.07392, 1.16913, 1.12613, 0.84986]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26881, 2.17405, 2.50113, 2.08969, 1.9252, 1.69978, 1.63604, 1.57247, 1.48489, 1.29657, 1.0094, 1.01529, 0.95501, 1.04473, 0.94493, 0.77746, 1.07392, 1.16913, 1.12613, 0.84986]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43303.0, 40954.0, 43957.0, 41612.0, 44782.0, 43938.0, 41086.0, 42465.0, 44666.0, 43893.0, 41158.0, 43221.0, 39725.0, 45367.0, 43342.0, 43903.0, 45362.0, 45687.0, 46160.0, 44706.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43303.0, 40954.0, 43957.0, 41612.0, 44782.0, 43938.0, 41086.0, 42465.0, 44666.0, 43893.0, 41158.0, 43221.0, 39725.0, 45367.0, 43342.0, 43903.0, 45362.0, 45687.0, 46160.0, 44706.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05212, 284.10513, 284.15649, 284.20465, 284.25775, 284.30688, 284.34854, 284.38315, 284.41147, 284.43546, 284.45453, 284.46994, 284.48181, 284.49063]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05212, 284.10513, 284.15649, 284.20465, 284.25775, 284.30688, 284.34854, 284.38315, 284.41147, 284.43546, 284.45453, 284.46994, 284.48181, 284.49063]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [12.23694, 1.03463, 0.72739, 0.72966, 0.72882, 0.72883, 0.72924, 0.72542, 0.73039, 0.72858, 0.72719, 0.7292, 0.72931, 0.72642, 0.89265, 0.73026, 0.72781, 0.72495, 0.73526, 0.7318]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9202]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9202]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.52478]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.52478]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/model_config.yaml index d1b9e8429..ac40afa88 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: ^NVLS CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --encoder-num-layers: 12 --decoder-num-layers: 12 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index 540d4c1b7..7a1690768 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: ^NVLS CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --encoder-num-layers: 12 --decoder-num-layers: 12 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..caf2cc75e --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,83 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.41501, + 9.20443, + 8.62112, + 8.34419, + 8.08454, + 7.96905, + 7.68086, + 7.39418, + 7.26109, + 7.19122, + 7.31005, + 7.16619, + 7.0595, + 6.99421, + 6.85589, + 6.93084, + 6.95438, + 7.02457, + 6.6649, + 6.93863 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 115751.0, + 111072.0, + 117055.0, + 112398.0, + 118712.0, + 116944.0, + 111387.0, + 114025.0, + 118464.0, + 116959.0, + 111517.0, + 115593.0, + 108490.0, + 119945.0, + 115762.0, + 116949.0, + 119851.0, + 120399.0, + 121398.0, + 118446.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 16.98551, + 0.62295, + 0.61568, + 0.61161, + 0.6044, + 0.60388, + 0.60536, + 0.60715, + 0.68076, + 0.60177, + 0.61031, + 0.60267, + 0.60068, + 0.60561, + 0.60094, + 0.60637, + 0.59738, + 0.60486, + 0.59557, + 0.6812 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 8abace27d..2df13fd07 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: ^NVLS CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --encoder-num-layers: 12 --decoder-num-layers: 12 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index c1a6d51bf..23f9be284 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: ^NVLS CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --encoder-num-layers: 12 --decoder-num-layers: 12 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 000000000..d752d31b3 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,83 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.32658, + 9.41413, + 8.86432, + 8.56546, + 8.2877, + 8.1035, + 7.83646, + 7.5377, + 7.39282, + 7.29333, + 7.37736, + 7.22498, + 7.11249, + 7.06739, + 6.91817, + 6.96674, + 6.97821, + 7.0494, + 6.72101, + 6.98229 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43310.0, + 40943.0, + 43952.0, + 41616.0, + 44789.0, + 43937.0, + 41093.0, + 42468.0, + 44652.0, + 43894.0, + 41154.0, + 43226.0, + 39719.0, + 45362.0, + 43332.0, + 43913.0, + 45362.0, + 45695.0, + 46170.0, + 44701.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 11.09527, + 0.74337, + 0.74502, + 0.74411, + 1.06685, + 0.74366, + 0.74354, + 0.74287, + 0.7419, + 0.74299, + 1.02516, + 0.74651, + 0.74175, + 0.74347, + 0.7457, + 0.74253, + 0.74391, + 0.74341, + 0.74261, + 0.74236 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values.json rename to tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/model_config.yaml index 6aae44ca7..3f19d3a3f 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: ^NVLS CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --encoder-num-layers: 12 --decoder-num-layers: 12 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index 6e9731d4c..243e1fc05 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -3,7 +3,6 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 NCCL_ALGO: ^NVLS CUBLAS_WORKSPACE_CONFIG: :4096:8 - N_REPEATS: 5 MODEL_ARGS: --encoder-num-layers: 12 --decoder-num-layers: 12 diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml similarity index 98% rename from tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml rename to tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml index 6556baeb5..798f00c90 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml @@ -51,4 +51,4 @@ MODEL_ARGS: --deterministic-mode: true --attention-softmax-in-fp32: true --ckpt-format: torch -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_dev.json new file mode 100644 index 000000000..570eca043 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_dev.json @@ -0,0 +1 @@ +{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [8.5793, 0.62156, 0.34426, 0.34959, 0.34301, 0.34282, 0.35085, 0.34342, 0.34419, 0.34313, 0.34469, 0.3443, 0.34409, 0.34468, 0.34387, 0.34425, 0.34364, 0.34422, 0.34383, 0.34972]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5.11833, 0.43748, 0.16255, 0.16704, 0.16205, 0.16151, 0.16942, 0.16138, 0.16252, 0.16175, 0.16312, 0.16223, 0.16308, 0.16294, 0.16207, 0.16265, 0.1619, 0.16234, 0.16178, 0.16665]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.7297, 0.17954, 0.17726, 0.17654, 0.17682, 0.17671, 0.17681, 0.17739, 0.17716, 0.17701, 0.17743, 0.17721, 0.177, 0.17726, 0.17669, 0.17644, 0.1773, 0.17687, 0.17734, 0.17678]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 2e-05, 3e-05, 4e-05, 3e-05, 3e-05, 3e-05, 2e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5e-05, 5e-05, 4e-05, 4e-05, 4e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 6e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.58321, 0.00365, 0.00367, 0.00381, 0.00361, 0.00362, 0.00361, 0.00361, 0.00361, 0.00362, 0.0036, 0.00362, 0.00363, 0.00361, 0.00362, 0.00362, 0.00366, 0.00366, 0.00366, 0.00362]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00128, 0.00104, 0.0009, 0.001, 0.00093, 0.0009, 0.00099, 0.00091, 0.00089, 0.00095, 0.00099, 0.00091, 0.00095, 0.00097, 0.00096, 0.00097, 0.00095, 0.00093, 0.00091, 0.00099]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.63878, 0.00531, 0.00498, 0.0055, 0.00476, 0.00472, 0.00508, 0.00477, 0.00474, 0.00476, 0.00488, 0.00414, 0.00418, 0.00419, 0.00476, 0.00458, 0.00422, 0.00478, 0.00475, 0.00476]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.03577, 0.02714, 0.02668, 0.02764, 0.0269, 0.02684, 0.02714, 0.02679, 0.02694, 0.02664, 0.02712, 0.02686, 0.02672, 0.02711, 0.02707, 0.02682, 0.02668, 0.02697, 0.02671, 0.02705]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01745, 0.00284, 0.00279, 0.00296, 0.0028, 0.0028, 0.00281, 0.00284, 0.0028, 0.00279, 0.00282, 0.00281, 0.0028, 0.0028, 0.00281, 0.00283, 0.00281, 0.0028, 0.00278, 0.00282]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00437, 0.00308, 0.00301, 0.00318, 0.00303, 0.00302, 0.00304, 0.00303, 0.00312, 0.003, 0.00305, 0.00302, 0.00304, 0.00303, 0.00305, 0.00304, 0.00303, 0.00302, 0.00302, 0.00306]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.69859, 0.04007, 0.03899, 0.04112, 0.03904, 0.03889, 0.03968, 0.03901, 0.03916, 0.03877, 0.03957, 0.03839, 0.03832, 0.03874, 0.03928, 0.03886, 0.03831, 0.03913, 0.03887, 0.03931]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.39855, 9.41105, 8.88302, 8.56266, 8.28771, 8.10231, 7.83818, 7.53405, 7.39422, 7.28751, 7.36793, 7.22187, 7.10601, 7.05271, 6.91418, 6.96486, 6.973, 7.03533, 6.70377, 6.97036]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.39855, 9.41105, 8.88302, 8.56266, 8.28771, 8.10231, 7.83818, 7.53405, 7.39422, 7.28751, 7.36793, 7.22187, 7.10601, 7.05271, 6.91418, 6.96486, 6.973, 7.03533, 6.70377, 6.97036]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.34142, 2.20568, 2.60115, 2.08118, 1.91833, 1.69112, 1.62099, 1.56865, 1.46236, 1.32506, 1.0147, 0.9197, 0.96922, 0.92739, 1.02635, 0.93686, 0.8341, 1.06816, 1.06549, 1.00001]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.34142, 2.20568, 2.60115, 2.08118, 1.91833, 1.69112, 1.62099, 1.56865, 1.46236, 1.32506, 1.0147, 0.9197, 0.96922, 0.92739, 1.02635, 0.93686, 0.8341, 1.06816, 1.06549, 1.00001]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43312.0, 40948.0, 43970.0, 41602.0, 44746.0, 43922.0, 41250.0, 42504.0, 44676.0, 43887.0, 41135.0, 43266.0, 39677.0, 45400.0, 43322.0, 43888.0, 45339.0, 45685.0, 46189.0, 44648.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43312.0, 40948.0, 43970.0, 41602.0, 44746.0, 43922.0, 41250.0, 42504.0, 44676.0, 43887.0, 41135.0, 43266.0, 39677.0, 45400.0, 43322.0, 43888.0, 45339.0, 45685.0, 46189.0, 44648.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80814, 283.83228, 283.87, 283.91107, 283.95694, 284.00665, 284.05945, 284.11234, 284.1626, 284.21048, 284.26324, 284.31342, 284.35516, 284.39047, 284.41962, 284.44382, 284.46329, 284.47849, 284.49078, 284.50015]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80814, 283.83228, 283.87, 283.91107, 283.95694, 284.00665, 284.05945, 284.11234, 284.1626, 284.21048, 284.26324, 284.31342, 284.35516, 284.39047, 284.41962, 284.44382, 284.46329, 284.47849, 284.49078, 284.50015]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [9.31458, 0.68504, 0.40618, 0.41526, 0.40511, 0.40469, 0.4134, 0.40519, 0.4059, 0.40491, 0.40713, 0.40544, 0.40546, 0.40622, 0.406, 0.40584, 0.40459, 0.40637, 0.40544, 0.41191]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.91036]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [6.91036]}, "lm loss validation ppl": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [1002.60657]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [1002.60657]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json new file mode 100644 index 000000000..9eeb96153 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json @@ -0,0 +1 @@ +{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.81404, 0.34462, 0.3516, 0.34439, 0.34393, 0.34401, 0.34441, 0.34482, 0.34542, 0.34424, 0.34662, 0.34945, 0.34949, 0.35118, 0.34866, 0.35191, 0.36263, 0.34951, 0.34899, 0.34768]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6.31355, 0.16455, 0.16846, 0.16401, 0.16385, 0.16431, 0.16442, 0.16553, 0.16499, 0.16496, 0.16485, 0.16563, 0.16533, 0.16845, 0.16921, 0.16981, 0.1806, 0.16911, 0.16754, 0.16714]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.99825, 0.17436, 0.17778, 0.1744, 0.17441, 0.17407, 0.17356, 0.17524, 0.17452, 0.175, 0.17682, 0.17918, 0.17946, 0.17646, 0.1748, 0.17691, 0.17882, 0.17598, 0.17491, 0.17482]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 3e-05, 4e-05, 3e-05, 3e-05, 4e-05, 3e-05, 3e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 3e-05, 4e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.32584, 0.00364, 0.00361, 0.00362, 0.00361, 0.00362, 0.00361, 0.00378, 0.00364, 0.0036, 0.00362, 0.00359, 0.00361, 0.00363, 0.00361, 0.0037, 0.0037, 0.0036, 0.00362, 0.0036]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00127, 0.00097, 0.00102, 0.00098, 0.00096, 0.00097, 0.00096, 0.001, 0.00097, 0.00101, 0.00097, 0.00099, 0.00091, 0.00096, 0.00097, 0.001, 0.00099, 0.00097, 0.00096, 0.00098]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.82922, 0.00468, 0.00493, 0.00495, 0.00501, 0.00506, 0.00519, 0.00518, 0.00505, 0.00512, 0.00509, 0.00462, 0.00457, 0.0046, 0.00508, 0.00493, 0.00442, 0.00498, 0.00507, 0.00494]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.03499, 0.02591, 0.02578, 0.0258, 0.02614, 0.026, 0.02589, 0.02598, 0.026, 0.02573, 0.02873, 0.02584, 0.02574, 0.02595, 0.02589, 0.02585, 0.02573, 0.02574, 0.02577, 0.02573]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01559, 0.00285, 0.00288, 0.00284, 0.00283, 0.00286, 0.00287, 0.00298, 0.00288, 0.0041, 0.00302, 0.00287, 0.00288, 0.00286, 0.00287, 0.00293, 0.00287, 0.00287, 0.00285, 0.00287]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00316, 0.00308, 0.00312, 0.0031, 0.00346, 0.0031, 0.00311, 0.0031, 0.00312, 0.00459, 0.00309, 0.00308, 0.0031, 0.00311, 0.0031, 0.00312, 0.00307, 0.00309, 0.00308, 0.00308]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.88542, 0.03816, 0.03835, 0.03835, 0.03902, 0.03861, 0.03864, 0.03888, 0.03865, 0.04122, 0.04158, 0.03801, 0.03781, 0.0381, 0.03851, 0.0385, 0.03778, 0.03827, 0.03833, 0.03823]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.39855, 9.41112, 8.88304, 8.56269, 8.28765, 8.10224, 7.83813, 7.53409, 7.39411, 7.28757, 7.3679, 7.22194, 7.10575, 7.0526, 6.91422, 6.96483, 6.97306, 7.03511, 6.70374, 6.97038]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.39855, 9.41112, 8.88304, 8.56269, 8.28765, 8.10224, 7.83813, 7.53409, 7.39411, 7.28757, 7.3679, 7.22194, 7.10575, 7.0526, 6.91422, 6.96483, 6.97306, 7.03511, 6.70374, 6.97038]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.34142, 2.20571, 2.60016, 2.0812, 1.91834, 1.69111, 1.62094, 1.56876, 1.46252, 1.32493, 1.01436, 0.91945, 0.9683, 0.92765, 1.02683, 0.93685, 0.8336, 1.06608, 1.06564, 1.00043]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.34142, 2.20571, 2.60016, 2.0812, 1.91834, 1.69111, 1.62094, 1.56876, 1.46252, 1.32493, 1.01436, 0.91945, 0.9683, 0.92765, 1.02683, 0.93685, 0.8336, 1.06608, 1.06564, 1.00043]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43312.0, 40958.0, 43972.0, 41597.0, 44750.0, 43923.0, 41262.0, 42494.0, 44656.0, 43889.0, 41161.0, 43247.0, 39676.0, 45397.0, 43316.0, 43882.0, 45349.0, 45684.0, 46190.0, 44647.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43312.0, 40958.0, 43972.0, 41597.0, 44750.0, 43923.0, 41262.0, 42494.0, 44656.0, 43889.0, 41161.0, 43247.0, 39676.0, 45397.0, 43316.0, 43882.0, 45349.0, 45684.0, 46190.0, 44647.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80814, 283.83228, 283.87, 283.91107, 283.95691, 284.00662, 284.05942, 284.1123, 284.1626, 284.21048, 284.26328, 284.31339, 284.35516, 284.39047, 284.41965, 284.44385, 284.46332, 284.47849, 284.49078, 284.50018]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80814, 283.83228, 283.87, 283.91107, 283.95691, 284.00662, 284.05942, 284.1123, 284.1626, 284.21048, 284.26328, 284.31339, 284.35516, 284.39047, 284.41965, 284.44385, 284.46332, 284.47849, 284.49078, 284.50018]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [11.73555, 0.40514, 0.41329, 0.40506, 0.40504, 0.40534, 0.4059, 0.40634, 0.40634, 0.40933, 0.41129, 0.40992, 0.4098, 0.41183, 0.40987, 0.41385, 0.42316, 0.41023, 0.40995, 0.40824]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9103]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9103]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1002.54486]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1002.54486]}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml similarity index 98% rename from tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml rename to tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml index 70077b84a..df56656bd 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml @@ -51,4 +51,4 @@ MODEL_ARGS: --deterministic-mode: true --attention-softmax-in-fp32: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_dev.json new file mode 100644 index 000000000..cac516107 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_dev.json @@ -0,0 +1,763 @@ +{ + "forward-backward-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 9.31314, + 0.40373, + 0.40036, + 0.40377, + 0.40009, + 0.40024, + 0.40008, + 0.40025, + 0.40037, + 0.40077, + 0.39995, + 0.39931, + 0.39853, + 0.40105, + 0.40045, + 0.40088, + 0.39933, + 0.39867, + 0.39862, + 0.40146 + ] + }, + "forward-compute-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 5.20489, + 0.17867, + 0.17875, + 0.18291, + 0.18015, + 0.18089, + 0.18006, + 0.1809, + 0.18013, + 0.18084, + 0.18042, + 0.18048, + 0.17867, + 0.18032, + 0.18036, + 0.17967, + 0.17941, + 0.1796, + 0.17815, + 0.18228 + ] + }, + "backward-compute-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 2.81105, + 0.21748, + 0.21374, + 0.21269, + 0.21168, + 0.21226, + 0.2121, + 0.21196, + 0.211, + 0.21203, + 0.21167, + 0.2108, + 0.21104, + 0.21136, + 0.21186, + 0.21203, + 0.21083, + 0.21074, + 0.21117, + 0.21195 + ] + }, + "layernorm-grads-all-reduce-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.00512, + 0.00431, + 0.00431, + 0.00429, + 0.00441, + 0.00434, + 0.00441, + 0.00436, + 0.00493, + 0.00433, + 0.00438, + 0.00473, + 0.00441, + 0.00528, + 0.00439, + 0.0044, + 0.00435, + 0.00437, + 0.00441, + 0.0045 + ] + }, + "embedding-grads-all-reduce-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 4e-05, + 4e-05, + 4e-05, + 4e-05, + 4e-05, + 4e-05, + 5e-05, + 4e-05, + 4e-05, + 4e-05, + 4e-05, + 5e-05, + 4e-05, + 4e-05, + 4e-05, + 4e-05, + 4e-05, + 4e-05, + 4e-05, + 4e-05 + ] + }, + "all-grads-sync-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 1.05666, + 0.00366, + 0.00367, + 0.00368, + 0.00368, + 0.00368, + 0.00366, + 0.00366, + 0.00363, + 0.00367, + 0.00366, + 0.00368, + 0.00367, + 0.00368, + 0.00368, + 0.00369, + 0.00367, + 0.0037, + 0.00368, + 0.00368 + ] + }, + "optimizer-copy-to-main-grad-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.0011, + 0.00069, + 0.00071, + 0.00073, + 0.00072, + 0.00072, + 0.00077, + 0.00071, + 0.00075, + 0.00074, + 0.00076, + 0.00075, + 0.00075, + 0.00089, + 0.00076, + 0.00076, + 0.00075, + 0.00076, + 0.00077, + 0.00076 + ] + }, + "optimizer-clip-main-grad-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.70283, + 0.00449, + 0.00444, + 0.00452, + 0.00448, + 0.00448, + 0.00443, + 0.00452, + 0.00448, + 0.00445, + 0.00453, + 0.00385, + 0.00391, + 0.00488, + 0.00448, + 0.00393, + 0.00454, + 0.00395, + 0.0045, + 0.00395 + ] + }, + "optimizer-count-zeros-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.03309, + 0.02705, + 0.02695, + 0.02681, + 0.02743, + 0.0274, + 0.02716, + 0.02692, + 0.02696, + 0.02694, + 0.02683, + 0.02723, + 0.02741, + 0.02693, + 0.02688, + 0.02703, + 0.02721, + 0.02743, + 0.02725, + 0.02672 + ] + }, + "optimizer-inner-step-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.01276, + 0.00279, + 0.00278, + 0.00279, + 0.00281, + 0.00283, + 0.0028, + 0.00278, + 0.00278, + 0.00277, + 0.00277, + 0.00282, + 0.00282, + 0.00286, + 0.00283, + 0.00278, + 0.00281, + 0.0028, + 0.00283, + 0.00281 + ] + }, + "optimizer-copy-main-to-model-params-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.00299, + 0.00342, + 0.00298, + 0.00298, + 0.00301, + 0.00299, + 0.00321, + 0.00299, + 0.00297, + 0.00296, + 0.00298, + 0.00298, + 0.00309, + 0.00309, + 0.00298, + 0.00299, + 0.00299, + 0.00298, + 0.00304, + 0.00303 + ] + }, + "optimizer-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.75369, + 0.03908, + 0.03853, + 0.03848, + 0.03909, + 0.03905, + 0.03905, + 0.03857, + 0.03857, + 0.0385, + 0.03853, + 0.03832, + 0.03863, + 0.0393, + 0.03858, + 0.03814, + 0.03897, + 0.03856, + 0.03903, + 0.03795 + ] + }, + "learning-rate": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.0001, + 0.0001, + 9e-05, + 9e-05, + 8e-05, + 8e-05, + 7e-05, + 7e-05, + 6e-05, + 6e-05, + 5e-05, + 5e-05, + 5e-05, + 4e-05, + 4e-05, + 3e-05, + 3e-05, + 2e-05, + 2e-05, + 1e-05 + ] + }, + "learning-rate vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.0001, + 0.0001, + 9e-05, + 9e-05, + 8e-05, + 8e-05, + 7e-05, + 7e-05, + 6e-05, + 6e-05, + 5e-05, + 5e-05, + 5e-05, + 4e-05, + 4e-05, + 3e-05, + 3e-05, + 2e-05, + 2e-05, + 1e-05 + ] + }, + "batch-size": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0 + ] + }, + "batch-size vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0 + ] + }, + "lm loss": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.39767, + 9.41317, + 8.87813, + 8.5684, + 8.2951, + 8.11103, + 7.84414, + 7.5425, + 7.39999, + 7.29586, + 7.3749, + 7.23104, + 7.11682, + 7.06328, + 6.92509, + 6.97755, + 6.98393, + 7.04582, + 6.71802, + 6.98051 + ] + }, + "lm loss vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.39767, + 9.41317, + 8.87813, + 8.5684, + 8.2951, + 8.11103, + 7.84414, + 7.5425, + 7.39999, + 7.29586, + 7.3749, + 7.23104, + 7.11682, + 7.06328, + 6.92509, + 6.97755, + 6.98393, + 7.04582, + 6.71802, + 6.98051 + ] + }, + "loss-scale": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + "loss-scale vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + "grad-norm": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 22.49022, + 2.20544, + 2.51715, + 2.08127, + 1.91884, + 1.69272, + 1.62465, + 1.57572, + 1.4803, + 1.31751, + 1.06666, + 0.8993, + 0.90904, + 1.01869, + 1.52232, + 0.87585, + 1.08829, + 0.93451, + 1.30493, + 0.90059 + ] + }, + "grad-norm vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 22.49022, + 2.20544, + 2.51715, + 2.08127, + 1.91884, + 1.69272, + 1.62465, + 1.57572, + 1.4803, + 1.31751, + 1.06666, + 0.8993, + 0.90904, + 1.01869, + 1.52232, + 0.87585, + 1.08829, + 0.93451, + 1.30493, + 0.90059 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43305.0, + 40966.0, + 43940.0, + 41620.0, + 44783.0, + 43929.0, + 41225.0, + 42517.0, + 44642.0, + 43905.0, + 41141.0, + 43266.0, + 39698.0, + 45369.0, + 43290.0, + 43888.0, + 45355.0, + 45686.0, + 46159.0, + 44703.0 + ] + }, + "num-zeros vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43305.0, + 40966.0, + 43940.0, + 41620.0, + 44783.0, + 43929.0, + 41225.0, + 42517.0, + 44642.0, + 43905.0, + 41141.0, + 43266.0, + 39698.0, + 45369.0, + 43290.0, + 43888.0, + 45355.0, + 45686.0, + 46159.0, + 44703.0 + ] + }, + "params-norm": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 283.80814, + 283.8324, + 283.87021, + 283.9111, + 283.95691, + 284.00668, + 284.05994, + 284.11295, + 284.16342, + 284.21112, + 284.26437, + 284.31451, + 284.35611, + 284.39172, + 284.42053, + 284.44376, + 284.46249, + 284.47748, + 284.48962, + 284.49857 + ] + }, + "params-norm vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 283.80814, + 283.8324, + 283.87021, + 283.9111, + 283.95691, + 284.00668, + 284.05994, + 284.11295, + 284.16342, + 284.21112, + 284.26437, + 284.31451, + 284.35611, + 284.39172, + 284.42053, + 284.44376, + 284.46249, + 284.47748, + 284.48962, + 284.49857 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.11234, + 0.4649, + 0.46098, + 0.46501, + 0.46182, + 0.46156, + 0.46171, + 0.46107, + 0.4613, + 0.46164, + 0.46086, + 0.46018, + 0.45981, + 0.4639, + 0.46112, + 0.46197, + 0.46097, + 0.45954, + 0.46005, + 0.4621 + ] + }, + "lm loss validation": { + "start_step": 0, + "end_step": 2, + "step_interval": 5, + "values": [ + 6.91467 + ] + }, + "lm loss validation vs samples": { + "start_step": 0, + "end_step": 1, + "step_interval": 5, + "values": [ + 6.91467 + ] + }, + "lm loss validation ppl": { + "start_step": 0, + "end_step": 1, + "step_interval": 5, + "values": [ + 1006.93915 + ] + }, + "lm loss validation ppl vs samples": { + "start_step": 0, + "end_step": 1, + "step_interval": 5, + "values": [ + 1006.93915 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json new file mode 100644 index 000000000..27e890fd9 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json @@ -0,0 +1,763 @@ +{ + "forward-backward-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.9967, + 0.401, + 0.40147, + 0.3912, + 0.39873, + 0.39107, + 0.39949, + 0.40485, + 0.39712, + 0.39832, + 0.39764, + 0.40869, + 0.39232, + 0.39721, + 0.39904, + 0.40227, + 0.39138, + 0.39833, + 0.40047, + 0.39544 + ] + }, + "forward-compute-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 6.48719, + 0.1808, + 0.18642, + 0.17754, + 0.18021, + 0.17845, + 0.17971, + 0.18366, + 0.18445, + 0.17837, + 0.18213, + 0.1862, + 0.17839, + 0.18306, + 0.17791, + 0.18267, + 0.17785, + 0.17902, + 0.1859, + 0.18165 + ] + }, + "backward-compute-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 2.90603, + 0.21569, + 0.20801, + 0.20679, + 0.21361, + 0.20617, + 0.21449, + 0.21342, + 0.20709, + 0.21379, + 0.20706, + 0.21465, + 0.20741, + 0.2069, + 0.2142, + 0.21282, + 0.20722, + 0.21411, + 0.20809, + 0.20825 + ] + }, + "layernorm-grads-all-reduce-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.00474, + 0.00397, + 0.00441, + 0.00441, + 0.0045, + 0.00432, + 0.00444, + 0.00454, + 0.00446, + 0.00429, + 0.00445, + 0.00452, + 0.00445, + 0.0045, + 0.00452, + 0.00501, + 0.00425, + 0.00435, + 0.00446, + 0.00455 + ] + }, + "embedding-grads-all-reduce-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 6e-05, + 4e-05, + 4e-05, + 3e-05, + 3e-05, + 4e-05, + 3e-05, + 3e-05, + 3e-05, + 4e-05, + 4e-05, + 3e-05, + 3e-05, + 3e-05, + 3e-05, + 4e-05, + 4e-05, + 3e-05, + 3e-05, + 3e-05 + ] + }, + "all-grads-sync-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 1.3196, + 0.00359, + 0.0036, + 0.00358, + 0.00357, + 0.00358, + 0.0036, + 0.0036, + 0.00358, + 0.00361, + 0.00359, + 0.00357, + 0.00357, + 0.00359, + 0.0036, + 0.00374, + 0.00358, + 0.00358, + 0.00358, + 0.00357 + ] + }, + "optimizer-copy-to-main-grad-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.00118, + 0.0006, + 0.0006, + 0.00059, + 0.00059, + 0.00059, + 0.00063, + 0.00059, + 0.00058, + 0.00064, + 0.00061, + 0.00059, + 0.00059, + 0.00058, + 0.0006, + 0.00065, + 0.00059, + 0.00058, + 0.00059, + 0.00058 + ] + }, + "optimizer-clip-main-grad-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.7916, + 0.00452, + 0.00459, + 0.00449, + 0.00456, + 0.00447, + 0.00456, + 0.00447, + 0.00454, + 0.00455, + 0.00455, + 0.00396, + 0.00391, + 0.00458, + 0.00535, + 0.00401, + 0.00486, + 0.00387, + 0.00445, + 0.00389 + ] + }, + "optimizer-count-zeros-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.03344, + 0.02605, + 0.02598, + 0.02583, + 0.02597, + 0.02572, + 0.02605, + 0.02578, + 0.02584, + 0.0262, + 0.03104, + 0.02591, + 0.026, + 0.02602, + 0.02589, + 0.02577, + 0.02595, + 0.02611, + 0.02591, + 0.02596 + ] + }, + "optimizer-inner-step-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.01284, + 0.00279, + 0.00282, + 0.00304, + 0.00277, + 0.00295, + 0.00282, + 0.0028, + 0.0028, + 0.0028, + 0.00322, + 0.00286, + 0.00278, + 0.00281, + 0.0028, + 0.00289, + 0.00281, + 0.0028, + 0.00283, + 0.00281 + ] + }, + "optimizer-copy-main-to-model-params-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.00383, + 0.00307, + 0.00307, + 0.00478, + 0.00306, + 0.00377, + 0.00308, + 0.00307, + 0.00306, + 0.00304, + 0.00394, + 0.00305, + 0.00306, + 0.00305, + 0.00307, + 0.00305, + 0.00394, + 0.00307, + 0.00307, + 0.00306 + ] + }, + "optimizer-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.84399, + 0.03764, + 0.03767, + 0.03939, + 0.03757, + 0.03834, + 0.03775, + 0.03732, + 0.03742, + 0.03785, + 0.04398, + 0.03697, + 0.03696, + 0.03764, + 0.03838, + 0.03699, + 0.03925, + 0.03705, + 0.03746, + 0.03691 + ] + }, + "learning-rate": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.0001, + 0.0001, + 9e-05, + 9e-05, + 8e-05, + 8e-05, + 7e-05, + 7e-05, + 6e-05, + 6e-05, + 5e-05, + 5e-05, + 5e-05, + 4e-05, + 4e-05, + 3e-05, + 3e-05, + 2e-05, + 2e-05, + 1e-05 + ] + }, + "learning-rate vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 0.0001, + 0.0001, + 9e-05, + 9e-05, + 8e-05, + 8e-05, + 7e-05, + 7e-05, + 6e-05, + 6e-05, + 5e-05, + 5e-05, + 5e-05, + 4e-05, + 4e-05, + 3e-05, + 3e-05, + 2e-05, + 2e-05, + 1e-05 + ] + }, + "batch-size": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0 + ] + }, + "batch-size vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0, + 32.0 + ] + }, + "lm loss": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.39767, + 9.41313, + 8.87826, + 8.56837, + 8.29503, + 8.11096, + 7.84414, + 7.54251, + 7.39997, + 7.29573, + 7.37498, + 7.23101, + 7.11673, + 7.06342, + 6.92492, + 6.97751, + 6.98396, + 7.04575, + 6.71801, + 6.98043 + ] + }, + "lm loss vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.39767, + 9.41313, + 8.87826, + 8.56837, + 8.29503, + 8.11096, + 7.84414, + 7.54251, + 7.39997, + 7.29573, + 7.37498, + 7.23101, + 7.11673, + 7.06342, + 6.92492, + 6.97751, + 6.98396, + 7.04575, + 6.71801, + 6.98043 + ] + }, + "loss-scale": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + "loss-scale vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + "grad-norm": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 22.49022, + 2.20552, + 2.51692, + 2.08126, + 1.91884, + 1.69274, + 1.62471, + 1.57573, + 1.48035, + 1.31762, + 1.06619, + 0.8992, + 0.90925, + 1.01884, + 1.52306, + 0.87798, + 1.08796, + 0.9338, + 1.30663, + 0.90086 + ] + }, + "grad-norm vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 22.49022, + 2.20552, + 2.51692, + 2.08126, + 1.91884, + 1.69274, + 1.62471, + 1.57573, + 1.48035, + 1.31762, + 1.06619, + 0.8992, + 0.90925, + 1.01884, + 1.52306, + 0.87798, + 1.08796, + 0.9338, + 1.30663, + 0.90086 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43305.0, + 40957.0, + 43944.0, + 41613.0, + 44764.0, + 43920.0, + 41215.0, + 42515.0, + 44647.0, + 43902.0, + 41129.0, + 43274.0, + 39706.0, + 45365.0, + 43273.0, + 43897.0, + 45345.0, + 45686.0, + 46161.0, + 44705.0 + ] + }, + "num-zeros vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43305.0, + 40957.0, + 43944.0, + 41613.0, + 44764.0, + 43920.0, + 41215.0, + 42515.0, + 44647.0, + 43902.0, + 41129.0, + 43274.0, + 39706.0, + 45365.0, + 43273.0, + 43897.0, + 45345.0, + 45686.0, + 46161.0, + 44705.0 + ] + }, + "params-norm": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 283.80814, + 283.83237, + 283.87021, + 283.9111, + 283.95691, + 284.00668, + 284.05994, + 284.11295, + 284.16345, + 284.21112, + 284.2644, + 284.31454, + 284.35611, + 284.39169, + 284.42053, + 284.44376, + 284.46249, + 284.47751, + 284.48962, + 284.49857 + ] + }, + "params-norm vs samples": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 283.80814, + 283.83237, + 283.87021, + 283.9111, + 283.95691, + 284.00668, + 284.05994, + 284.11295, + 284.16345, + 284.21112, + 284.2644, + 284.31454, + 284.35611, + 284.39169, + 284.42053, + 284.44376, + 284.46249, + 284.47751, + 284.48962, + 284.49857 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 11.88485, + 0.46024, + 0.46083, + 0.45067, + 0.45779, + 0.45103, + 0.45872, + 0.46374, + 0.45605, + 0.45774, + 0.46418, + 0.46713, + 0.45087, + 0.45645, + 0.45979, + 0.46102, + 0.45129, + 0.45737, + 0.45953, + 0.45489 + ] + }, + "lm loss validation": { + "start_step": 0, + "end_step": 2, + "step_interval": 5, + "values": [ + 6.91465 + ] + }, + "lm loss validation vs samples": { + "start_step": 0, + "end_step": 2, + "step_interval": 5, + "values": [ + 6.91465 + ] + }, + "lm loss validation ppl": { + "start_step": 0, + "end_step": 2, + "step_interval": 5, + "values": [ + 1006.91901 + ] + }, + "lm loss validation ppl vs samples": { + "start_step": 0, + "end_step": 2, + "step_interval": 5, + "values": [ + 1006.91901 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml similarity index 98% rename from tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml rename to tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml index 3a1793957..940b85cfa 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml @@ -52,4 +52,4 @@ MODEL_ARGS: --deterministic-mode: true --attention-softmax-in-fp32: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_dev.json new file mode 100644 index 000000000..8150d5539 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_dev.json @@ -0,0 +1,83 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.33709, + 9.42687, + 8.8635, + 8.56221, + 8.28399, + 8.10587, + 7.84887, + 7.53552, + 7.41074, + 7.29558, + 7.393, + 7.21933, + 7.10287, + 7.04869, + 6.90401, + 6.95994, + 6.9644, + 7.03536, + 6.70027, + 6.96648 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43333.0, + 41002.0, + 44020.0, + 41734.0, + 44800.0, + 43940.0, + 41271.0, + 42543.0, + 44725.0, + 43906.0, + 41149.0, + 43283.0, + 39763.0, + 45410.0, + 43320.0, + 43922.0, + 45383.0, + 45713.0, + 46318.0, + 44723.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 9.40905, + 0.23547, + 0.23339, + 0.23504, + 0.23331, + 0.23198, + 0.23546, + 0.22987, + 0.2342, + 0.23143, + 0.49625, + 0.2285, + 0.22833, + 0.22775, + 0.23156, + 0.22944, + 0.23033, + 0.23074, + 0.23117, + 0.22948 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values.json rename to tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml similarity index 98% rename from tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml rename to tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml index 233023af3..a05129f53 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml @@ -50,4 +50,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --deterministic-mode: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml similarity index 98% rename from tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml rename to tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml index 43afd7336..91c6e2e22 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml @@ -50,4 +50,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --deterministic-mode: true --ckpt-format: torch -TEST_TYPE: ckpt-resume \ No newline at end of file +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_dev.json new file mode 100644 index 000000000..77be5e6a8 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_dev.json @@ -0,0 +1,83 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.39854, + 9.4111, + 8.88311, + 8.56273, + 8.2877, + 8.10231, + 7.83823, + 7.53415, + 7.39419, + 7.28768, + 7.36789, + 7.22197, + 7.10581, + 7.05271, + 6.91415, + 6.9649, + 6.97292, + 7.03514, + 6.70368, + 6.97028 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43320.0, + 40947.0, + 43974.0, + 41600.0, + 44757.0, + 43928.0, + 41251.0, + 42505.0, + 44666.0, + 43890.0, + 41139.0, + 43267.0, + 39680.0, + 45388.0, + 43300.0, + 43886.0, + 45357.0, + 45697.0, + 46190.0, + 44658.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 11.77537, + 0.4173, + 0.41286, + 0.4207, + 0.40449, + 0.40246, + 0.40398, + 0.40397, + 0.83597, + 0.40504, + 0.40483, + 0.40662, + 0.40436, + 0.40355, + 0.40635, + 0.40423, + 0.40489, + 0.40503, + 0.40616, + 0.40556 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_lts.json similarity index 100% rename from tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values.json rename to tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_lts.json diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml similarity index 98% rename from tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml rename to tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml index 47ff5b038..cf95759fc 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml @@ -50,4 +50,4 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --deterministic-mode: true --ckpt-format: torch -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values.json b/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values.json deleted file mode 100644 index cb39f6cc3..000000000 --- a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.39855, - 9.41112, - 8.88304, - 8.56269, - 8.28765, - 8.10224, - 7.83813, - 7.53409, - 7.39411, - 7.28757, - 7.3679, - 7.22194, - 7.10575, - 7.0526, - 6.91422, - 6.96483, - 6.97306, - 7.03511, - 6.70374, - 6.97038 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43312.0, - 40958.0, - 43972.0, - 41597.0, - 44750.0, - 43923.0, - 41262.0, - 42494.0, - 44656.0, - 43889.0, - 41161.0, - 43247.0, - 39676.0, - 45397.0, - 43316.0, - 43882.0, - 45349.0, - 45684.0, - 46190.0, - 44647.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 16.16815, - 0.59042, - 0.4284, - 0.43391, - 0.42668, - 0.42919, - 0.42816, - 0.43087, - 0.4328, - 0.42988, - 0.42869, - 0.42651, - 0.42621, - 0.43082, - 0.43114, - 0.42943, - 0.42758, - 0.43083, - 0.43032, - 0.43533 - ] - } -} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json new file mode 100644 index 000000000..a7c9546ff --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json @@ -0,0 +1,83 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.39855, + 9.41109, + 8.88313, + 8.56278, + 8.28768, + 8.10234, + 7.83838, + 7.53397, + 7.39419, + 7.28773, + 7.36796, + 7.22195, + 7.10579, + 7.05267, + 6.91422, + 6.96482, + 6.97307, + 7.03514, + 6.70371, + 6.9703 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43322.0, + 40946.0, + 43968.0, + 41616.0, + 44753.0, + 43934.0, + 41256.0, + 42507.0, + 44661.0, + 43892.0, + 41151.0, + 43273.0, + 39672.0, + 45392.0, + 43312.0, + 43883.0, + 45348.0, + 45682.0, + 46204.0, + 44646.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 12.30166, + 0.42729, + 0.41761, + 0.41344, + 0.41613, + 0.41633, + 0.4052, + 0.40853, + 0.40652, + 0.40913, + 0.40766, + 0.40719, + 0.40688, + 0.40636, + 0.40674, + 0.41103, + 0.4072, + 0.40761, + 0.40819, + 0.40941 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values.json b/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values.json deleted file mode 100644 index 021c05496..000000000 --- a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.39236, - 9.4128, - 8.88319, - 8.56427, - 8.29039, - 8.10532, - 7.84044, - 7.53655, - 7.39743, - 7.28828, - 7.36794, - 7.22149, - 7.10817, - 7.05287, - 6.92212, - 6.96976, - 6.98418, - 7.04401, - 6.71005, - 6.97246 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43310.0, - 40945.0, - 43941.0, - 41610.0, - 44749.0, - 43933.0, - 41233.0, - 42463.0, - 44633.0, - 43892.0, - 41120.0, - 43253.0, - 39705.0, - 45385.0, - 43275.0, - 43884.0, - 45347.0, - 45687.0, - 46131.0, - 44708.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 13.97669, - 0.63681, - 0.47949, - 0.48069, - 0.46755, - 0.4765, - 0.47458, - 0.46609, - 0.48646, - 0.47931, - 0.46563, - 0.47271, - 0.49037, - 0.46898, - 0.47713, - 0.472, - 0.46796, - 0.47359, - 0.47799, - 0.46934 - ] - } -} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json new file mode 100644 index 000000000..36f8fd5a4 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json @@ -0,0 +1,83 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 10.39257, + 9.41283, + 8.88294, + 8.56436, + 8.29051, + 8.10533, + 7.84065, + 7.53655, + 7.39754, + 7.28829, + 7.36795, + 7.22148, + 7.10831, + 7.05254, + 6.92215, + 6.96944, + 6.98389, + 7.04412, + 6.70984, + 6.97234 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 43301.0, + 40948.0, + 43949.0, + 41608.0, + 44754.0, + 43932.0, + 41231.0, + 42444.0, + 44636.0, + 43905.0, + 41105.0, + 43237.0, + 39698.0, + 45372.0, + 43280.0, + 43896.0, + 45342.0, + 45688.0, + 46127.0, + 44699.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 100, + "step_interval": 5, + "values": [ + 11.7555, + 0.6076, + 0.4422, + 0.45329, + 0.45345, + 0.44251, + 0.44943, + 0.45554, + 0.46083, + 0.44973, + 0.45086, + 0.45835, + 0.45794, + 0.44841, + 0.44994, + 0.47213, + 0.46165, + 0.44817, + 0.44916, + 0.45906 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.9.0.json new file mode 100644 index 000000000..d33c47aa1 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.9.0.json @@ -0,0 +1,40223 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 100000, + "step_interval": 5, + "values": [ + 10.34371, + 10.32721, + 10.114, + 9.86649, + 9.64797, + 9.51665, + 9.43107, + 9.36024, + 9.28612, + 9.18335, + 9.13366, + 9.09549, + 9.00463, + 8.94915, + 8.92068, + 8.89332, + 8.84261, + 8.7778, + 8.74577, + 8.68576, + 8.66147, + 8.60809, + 8.59325, + 8.51331, + 8.45583, + 8.4516, + 8.39704, + 8.3649, + 8.28404, + 8.22978, + 8.2033, + 8.19542, + 8.12821, + 8.09811, + 8.02864, + 7.98128, + 7.91556, + 7.89997, + 7.87425, + 7.79892, + 7.72069, + 7.65651, + 7.64137, + 7.55316, + 7.45861, + 7.42432, + 7.36863, + 7.33937, + 7.22899, + 7.16727, + 7.11539, + 7.04258, + 7.0373, + 6.94246, + 6.85809, + 6.86439, + 6.80298, + 6.76349, + 6.70962, + 6.69861, + 6.66691, + 6.59053, + 6.54721, + 6.5453, + 6.51752, + 6.44991, + 6.54001, + 6.41416, + 6.38233, + 6.42955, + 6.37093, + 6.39886, + 6.36007, + 6.35539, + 6.31348, + 6.32511, + 6.26057, + 6.26525, + 6.25167, + 6.24934, + 6.24069, + 6.16234, + 6.18815, + 6.17433, + 6.1698, + 6.11567, + 6.11808, + 6.07284, + 6.12117, + 6.06599, + 6.03319, + 6.02723, + 6.0445, + 6.02115, + 6.0124, + 5.91088, + 5.97814, + 5.85118, + 5.87578, + 5.94438, + 5.91215, + 5.84502, + 5.85452, + 5.86563, + 5.82595, + 5.8257, + 5.84499, + 5.78783, + 5.76449, + 5.78957, + 5.75028, + 5.7297, + 5.77474, + 5.74849, + 5.73995, + 5.6496, + 5.68544, + 5.68631, + 5.62859, + 5.65657, + 5.64569, + 5.6526, + 5.64158, + 5.64334, + 5.55456, + 5.52606, + 5.54254, + 5.58907, + 5.61788, + 5.58637, + 5.51853, + 5.54271, + 5.55124, + 5.53125, + 5.55615, + 5.54975, + 5.54612, + 5.50163, + 5.53401, + 5.47103, + 5.44242, + 5.49341, + 5.43964, + 5.4582, + 5.38404, + 5.44417, + 5.45729, + 5.40678, + 5.48959, + 5.37385, + 5.40525, + 5.39967, + 5.37509, + 5.33497, + 5.39374, + 5.33408, + 5.37224, + 5.36061, + 5.29049, + 5.29867, + 5.33922, + 5.28809, + 5.28297, + 5.29188, + 5.31675, + 5.32539, + 5.32902, + 5.22632, + 5.33654, + 5.30256, + 5.29351, + 5.28235, + 5.29219, + 5.19923, + 5.23118, + 5.22195, + 5.24248, + 5.20525, + 5.19331, + 5.17488, + 5.20168, + 5.13312, + 5.23356, + 5.15915, + 5.14987, + 5.12961, + 5.17959, + 5.16337, + 5.17791, + 5.13279, + 5.15866, + 5.11402, + 5.10809, + 5.16762, + 5.0967, + 5.08165, + 5.13643, + 5.14252, + 5.14628, + 5.07924, + 5.11738, + 5.04207, + 5.04119, + 5.07161, + 5.02141, + 5.05205, + 5.06739, + 5.06261, + 5.01499, + 5.05365, + 5.05105, + 5.06245, + 5.01509, + 5.01269, + 5.02778, + 5.0117, + 4.99525, + 4.96393, + 4.98399, + 5.03623, + 5.0127, + 4.96259, + 5.00467, + 4.99258, + 4.91176, + 4.9443, + 4.99796, + 4.99819, + 4.94077, + 4.93736, + 4.96306, + 4.91808, + 4.92228, + 4.87653, + 4.95257, + 4.9784, + 4.90774, + 4.90829, + 4.84604, + 4.88128, + 4.94029, + 4.89162, + 4.8621, + 4.89156, + 4.86422, + 4.78927, + 4.88608, + 4.84052, + 4.85941, + 4.84103, + 4.92018, + 4.87086, + 4.75272, + 4.81387, + 4.81981, + 4.81054, + 4.86339, + 4.83061, + 4.88123, + 4.83057, + 4.81621, + 4.82811, + 4.81344, + 4.87048, + 4.85872, + 4.7662, + 4.88862, + 4.83712, + 4.82332, + 4.85606, + 4.82294, + 4.83144, + 4.71875, + 4.82615, + 4.76198, + 4.7181, + 4.7939, + 4.78762, + 4.77938, + 4.81392, + 4.75002, + 4.73173, + 4.78803, + 4.81845, + 4.74332, + 4.84571, + 4.80402, + 4.73229, + 4.7338, + 4.70098, + 4.77377, + 4.76931, + 4.75162, + 4.73874, + 4.75287, + 4.72182, + 4.74306, + 4.76364, + 4.74807, + 4.75593, + 4.71463, + 4.73093, + 4.71701, + 4.6946, + 4.73624, + 4.71605, + 4.66674, + 4.67845, + 4.716, + 4.69358, + 4.65051, + 4.70965, + 4.71412, + 4.67758, + 4.69109, + 4.62664, + 4.67108, + 4.66478, + 4.64889, + 4.69847, + 4.66109, + 4.60784, + 4.64061, + 4.72245, + 4.66823, + 4.69203, + 4.62672, + 4.56931, + 4.69906, + 4.6596, + 4.60592, + 4.66496, + 4.63112, + 4.66863, + 4.6666, + 4.69607, + 4.70907, + 4.63781, + 4.57693, + 4.64554, + 4.62399, + 4.5774, + 4.65926, + 4.63967, + 4.61865, + 4.65526, + 4.65787, + 4.62302, + 4.63163, + 4.62148, + 4.62259, + 4.55848, + 4.57079, + 4.58421, + 4.57123, + 4.57655, + 4.58359, + 4.59391, + 4.57222, + 4.65079, + 4.58564, + 4.58319, + 4.53181, + 4.54073, + 4.55527, + 4.60676, + 4.62171, + 4.53496, + 4.61109, + 4.61188, + 4.64368, + 4.57979, + 4.46449, + 4.57862, + 4.62607, + 4.56378, + 4.62886, + 4.54314, + 4.56404, + 4.5332, + 4.54747, + 4.56644, + 4.5655, + 4.50503, + 4.53438, + 4.53179, + 4.54529, + 4.50102, + 4.45783, + 4.46511, + 4.53787, + 4.56745, + 4.53006, + 4.50951, + 4.52579, + 4.55778, + 4.53446, + 4.53667, + 4.57361, + 4.55073, + 4.46018, + 4.55381, + 4.47448, + 4.54257, + 4.53436, + 4.46738, + 4.51397, + 4.52642, + 4.52233, + 4.51263, + 4.47809, + 4.51756, + 4.49554, + 4.56551, + 4.49964, + 4.50747, + 4.50212, + 4.47716, + 4.53627, + 4.56063, + 4.46399, + 4.45834, + 4.46807, + 4.4765, + 4.48007, + 4.49675, + 4.45521, + 4.44142, + 4.48267, + 4.48807, + 4.49728, + 4.54687, + 4.44415, + 4.46507, + 4.47678, + 4.4658, + 4.43037, + 4.48776, + 4.38539, + 4.51719, + 4.38865, + 4.40015, + 4.4873, + 4.44821, + 4.52269, + 4.50812, + 4.45893, + 4.42479, + 4.458, + 4.41173, + 4.38105, + 4.45432, + 4.48549, + 4.53234, + 4.49588, + 4.47487, + 4.40138, + 4.39951, + 4.40127, + 4.42078, + 4.40868, + 4.38337, + 4.45332, + 4.40609, + 4.42202, + 4.43767, + 4.44993, + 4.44147, + 4.44211, + 4.43367, + 4.47342, + 4.46464, + 4.37303, + 4.40851, + 4.39862, + 4.39781, + 4.43557, + 4.34771, + 4.41679, + 4.3494, + 4.35542, + 4.43877, + 4.43076, + 4.42589, + 4.37757, + 4.36102, + 4.325, + 4.38068, + 4.41097, + 4.44037, + 4.40652, + 4.36263, + 4.37697, + 4.30277, + 4.39542, + 4.32018, + 4.31759, + 4.42157, + 4.30335, + 4.37803, + 4.33683, + 4.36159, + 4.33094, + 4.27205, + 4.36141, + 4.38782, + 4.31195, + 4.42062, + 4.35485, + 4.31702, + 4.38093, + 4.25977, + 4.35765, + 4.36693, + 4.35076, + 4.28993, + 4.37813, + 4.28099, + 4.25841, + 4.3138, + 4.50574, + 4.30034, + 4.31952, + 4.32474, + 4.28206, + 4.40133, + 4.388, + 4.30447, + 4.34673, + 4.27437, + 4.27176, + 4.27178, + 4.31596, + 4.35738, + 4.36794, + 4.32901, + 4.32664, + 4.32511, + 4.31891, + 4.44161, + 4.38934, + 4.26593, + 4.24697, + 4.29139, + 4.29503, + 4.2805, + 4.30744, + 4.28106, + 4.29376, + 4.34339, + 4.31353, + 4.26455, + 4.34641, + 4.28986, + 4.27105, + 4.30687, + 4.31653, + 4.26322, + 4.285, + 4.25663, + 4.27059, + 4.23069, + 4.24971, + 4.29641, + 4.26077, + 4.22965, + 4.33005, + 4.24435, + 4.30421, + 4.27765, + 4.28617, + 4.3374, + 4.2579, + 4.19155, + 4.29224, + 4.275, + 4.27895, + 4.2813, + 4.21387, + 4.28236, + 4.30258, + 4.23456, + 4.24197, + 4.28329, + 4.28855, + 4.27254, + 4.24467, + 4.2486, + 4.27674, + 4.2098, + 4.21438, + 4.22464, + 4.28206, + 4.20106, + 4.29616, + 4.31549, + 4.27454, + 4.14934, + 4.18408, + 4.20249, + 4.1185, + 4.1766, + 4.25452, + 4.19783, + 4.21276, + 4.23118, + 4.18627, + 4.19913, + 4.2984, + 4.1896, + 4.19412, + 4.21993, + 4.23492, + 4.18918, + 4.21499, + 4.21815, + 4.18563, + 4.27453, + 4.19027, + 4.26236, + 4.25247, + 4.17194, + 4.23365, + 4.24633, + 4.21542, + 4.20471, + 4.11623, + 4.19141, + 4.19803, + 4.13584, + 4.22584, + 4.16821, + 4.22986, + 4.17502, + 4.20157, + 4.2042, + 4.15438, + 4.24046, + 4.15936, + 4.22629, + 4.15451, + 4.16778, + 4.21398, + 4.16408, + 4.27656, + 4.14559, + 4.24873, + 4.2216, + 4.10827, + 4.24151, + 4.14706, + 4.14237, + 4.15029, + 4.24328, + 4.1494, + 4.13806, + 4.16209, + 4.18968, + 4.19807, + 4.18528, + 4.15336, + 4.1921, + 4.21955, + 4.19537, + 4.17252, + 4.05469, + 4.23591, + 4.22929, + 4.16159, + 4.19924, + 4.13351, + 4.17162, + 4.22112, + 4.13728, + 4.19262, + 4.09591, + 4.18966, + 4.19159, + 4.16153, + 4.18441, + 4.24495, + 4.05146, + 4.11675, + 4.14561, + 4.13856, + 4.12771, + 4.13412, + 4.17317, + 4.10954, + 4.10103, + 4.10564, + 4.15103, + 4.06347, + 4.14064, + 4.13554, + 4.16036, + 4.13806, + 4.1411, + 4.13207, + 4.17111, + 4.13161, + 4.10581, + 4.14351, + 4.1418, + 4.12685, + 4.12491, + 4.17053, + 4.17197, + 4.08125, + 4.10622, + 4.08518, + 4.19901, + 4.18373, + 4.11784, + 4.13605, + 4.09085, + 4.16172, + 4.14396, + 4.08926, + 4.09725, + 4.07033, + 4.14794, + 4.09602, + 4.04872, + 4.11956, + 4.13134, + 4.17571, + 4.15728, + 4.04606, + 4.11036, + 4.10569, + 4.09439, + 4.08918, + 4.10652, + 4.04153, + 4.07967, + 4.14483, + 4.09258, + 4.11661, + 4.11553, + 4.05931, + 4.04687, + 4.05492, + 4.00914, + 4.14169, + 4.07154, + 4.01417, + 4.07498, + 4.05379, + 4.07445, + 4.12242, + 4.15678, + 4.09118, + 4.05464, + 4.09967, + 4.10054, + 4.07838, + 4.08205, + 4.10016, + 4.0927, + 4.0386, + 4.03104, + 4.09228, + 4.07933, + 4.03997, + 4.0703, + 4.0725, + 4.12135, + 4.05437, + 4.09376, + 4.10395, + 4.03578, + 4.05649, + 4.06444, + 3.99069, + 4.07636, + 4.06502, + 4.01864, + 4.09135, + 4.07911, + 4.06304, + 4.07942, + 4.00587, + 3.98571, + 4.01844, + 4.01845, + 4.0133, + 4.06635, + 4.05238, + 4.0415, + 4.08197, + 4.06864, + 4.06148, + 4.02985, + 4.1108, + 3.99637, + 4.02393, + 4.03333, + 4.00233, + 4.01089, + 3.99421, + 4.01976, + 3.98557, + 4.02879, + 4.02915, + 3.98361, + 4.01303, + 3.99182, + 4.01082, + 4.02917, + 3.98966, + 4.03798, + 3.98693, + 4.02806, + 3.9804, + 3.99154, + 3.95308, + 4.06131, + 3.98503, + 4.02242, + 4.04947, + 4.04755, + 4.05749, + 4.01964, + 4.04691, + 4.01903, + 4.00368, + 4.0223, + 3.96534, + 3.94413, + 3.95022, + 3.91459, + 4.01865, + 4.01447, + 4.01825, + 4.04712, + 3.90945, + 4.01035, + 3.93134, + 4.02347, + 4.0289, + 4.01944, + 4.02268, + 4.00379, + 3.98438, + 3.98494, + 4.00751, + 4.00539, + 4.01471, + 3.97883, + 3.96691, + 3.98118, + 3.95196, + 3.96805, + 3.9616, + 3.91135, + 3.9818, + 3.95048, + 3.96692, + 4.04797, + 3.95094, + 3.98129, + 4.00291, + 3.94687, + 3.99493, + 3.99943, + 3.91944, + 4.02828, + 3.97374, + 3.9849, + 4.02134, + 3.8844, + 4.0135, + 3.93749, + 3.9895, + 3.89734, + 3.91075, + 3.95003, + 3.94921, + 3.9051, + 3.86905, + 3.99393, + 3.95241, + 3.96172, + 3.99877, + 3.91178, + 3.97539, + 3.91908, + 3.989, + 3.95961, + 3.91376, + 3.89508, + 3.94791, + 3.85501, + 3.92824, + 3.9345, + 3.91217, + 3.91427, + 3.93805, + 3.93775, + 3.93593, + 4.00061, + 3.99358, + 3.85265, + 3.92745, + 3.86778, + 3.88336, + 3.91641, + 3.86977, + 3.94184, + 3.99253, + 3.9565, + 3.90893, + 3.95547, + 3.91539, + 4.00609, + 3.94149, + 3.88706, + 3.88884, + 3.87887, + 3.84859, + 3.96994, + 3.83642, + 3.91187, + 3.93243, + 3.99307, + 3.94405, + 3.89238, + 3.85897, + 3.90837, + 3.94427, + 3.89752, + 3.90644, + 3.91271, + 3.86256, + 3.94143, + 3.89318, + 3.94167, + 3.86062, + 3.88939, + 3.86926, + 3.92992, + 3.89863, + 3.89253, + 3.87386, + 3.7964, + 3.92208, + 3.89098, + 3.86265, + 3.83529, + 3.88205, + 3.89735, + 3.88953, + 3.89208, + 3.87159, + 3.87154, + 3.85348, + 3.84535, + 3.81758, + 3.9064, + 3.92085, + 3.91365, + 3.83899, + 3.86635, + 3.87412, + 3.83715, + 3.86589, + 3.82874, + 3.87186, + 3.96878, + 3.88596, + 3.86261, + 3.84512, + 3.87305, + 3.93143, + 3.8972, + 3.91724, + 3.82514, + 3.87908, + 3.84294, + 3.87977, + 3.85227, + 3.88875, + 3.83649, + 3.91289, + 3.75757, + 3.90332, + 3.84783, + 3.78191, + 3.82763, + 3.87901, + 3.8072, + 3.94452, + 3.89707, + 3.82348, + 3.75937, + 3.80237, + 3.83533, + 3.84014, + 3.79384, + 3.88295, + 3.84588, + 3.82935, + 3.84494, + 3.8517, + 3.83153, + 3.84037, + 3.89638, + 3.80366, + 3.8738, + 3.79322, + 3.80552, + 3.80024, + 3.84643, + 3.84107, + 3.81869, + 3.87334, + 3.79885, + 3.89891, + 3.86192, + 3.83541, + 3.84327, + 3.84301, + 3.77504, + 3.83437, + 3.78309, + 3.73592, + 3.78098, + 3.80711, + 3.79688, + 3.79451, + 3.78697, + 3.81944, + 3.8357, + 3.78419, + 3.84716, + 3.78422, + 3.80811, + 3.81015, + 3.78557, + 3.79856, + 3.80035, + 3.80803, + 3.79067, + 3.78887, + 3.70707, + 3.81911, + 3.80337, + 3.86852, + 3.8238, + 3.79076, + 3.817, + 3.80191, + 3.86436, + 3.79506, + 3.77135, + 3.71988, + 3.76742, + 3.76852, + 3.79947, + 3.74223, + 3.82796, + 3.80137, + 3.75179, + 3.85419, + 3.74153, + 3.75233, + 3.74222, + 3.77405, + 3.76368, + 3.75689, + 3.77549, + 3.72838, + 3.79685, + 3.7622, + 3.74174, + 3.81635, + 3.81354, + 3.76734, + 3.79697, + 3.73373, + 3.78578, + 3.72265, + 3.78478, + 3.77295, + 3.77003, + 3.80455, + 3.73715, + 3.73299, + 3.75412, + 3.77077, + 3.80284, + 3.69181, + 3.7611, + 3.77744, + 3.67717, + 3.76498, + 3.72482, + 3.71854, + 3.78029, + 3.73392, + 3.73919, + 3.72154, + 3.72539, + 3.83116, + 3.71476, + 3.75519, + 3.75007, + 3.70735, + 3.71681, + 3.7788, + 3.62798, + 3.77322, + 3.6499, + 3.82058, + 3.70896, + 3.73358, + 3.6799, + 3.74943, + 3.65681, + 3.70177, + 3.77954, + 3.72156, + 3.72226, + 3.68523, + 3.68692, + 3.67229, + 3.7438, + 3.67946, + 3.69673, + 3.66724, + 3.6744, + 3.78139, + 3.7027, + 3.71637, + 3.68019, + 3.71413, + 3.63249, + 3.70117, + 3.70714, + 3.64921, + 3.71662, + 3.67793, + 3.61612, + 3.69623, + 3.66664, + 3.68843, + 3.71517, + 3.80243, + 3.68301, + 3.73884, + 3.63722, + 3.64617, + 3.71635, + 3.70133, + 3.66793, + 3.66688, + 3.69307, + 3.69747, + 3.66167, + 3.68218, + 3.70806, + 3.67807, + 3.69406, + 3.65958, + 3.66385, + 3.68838, + 3.65491, + 3.67502, + 3.693, + 3.67065, + 3.67303, + 3.62493, + 3.71113, + 3.66078, + 3.60537, + 3.66142, + 3.66626, + 3.66495, + 3.66852, + 3.69801, + 3.63677, + 3.62982, + 3.64909, + 3.62899, + 3.58792, + 3.65804, + 3.6867, + 3.67791, + 3.63415, + 3.62693, + 3.63352, + 3.59584, + 3.62589, + 3.59005, + 3.65756, + 3.67979, + 3.6218, + 3.61814, + 3.74461, + 3.65376, + 3.69396, + 3.70908, + 3.58418, + 3.60069, + 3.69807, + 3.6059, + 3.71573, + 3.57689, + 3.61656, + 3.55108, + 3.63637, + 3.66366, + 3.62931, + 3.62951, + 3.65221, + 3.58482, + 3.60868, + 3.66425, + 3.65118, + 3.67675, + 3.658, + 3.61976, + 3.64246, + 3.62331, + 3.61776, + 3.62874, + 3.62721, + 3.59866, + 3.61873, + 3.5489, + 3.70696, + 3.57469, + 3.57608, + 3.64923, + 3.53588, + 3.61134, + 3.58014, + 3.6154, + 3.62417, + 3.60499, + 3.57437, + 3.59862, + 3.6083, + 3.56258, + 3.54283, + 3.48789, + 3.58356, + 3.54743, + 3.54125, + 3.68133, + 3.55024, + 3.62022, + 3.50064, + 3.52001, + 3.55301, + 3.55878, + 3.62301, + 3.61296, + 3.53876, + 3.55563, + 3.56008, + 3.53872, + 3.5625, + 3.52189, + 3.52659, + 3.52789, + 3.53299, + 3.50062, + 3.55139, + 3.54653, + 3.52656, + 3.54409, + 3.59934, + 3.56251, + 3.49642, + 3.54057, + 3.51033, + 3.50881, + 3.56371, + 3.50959, + 3.47596, + 3.4983, + 3.50324, + 3.51161, + 3.49018, + 3.45379, + 3.4568, + 3.4709, + 3.39537, + 3.4726, + 3.45765, + 3.46488, + 3.42513, + 3.4203, + 3.51239, + 3.49464, + 3.49605, + 3.47994, + 3.43017, + 3.49244, + 3.4508, + 3.45262, + 3.48298, + 3.43508, + 3.41518, + 3.49, + 3.40892, + 3.42355, + 3.49253, + 3.41237, + 3.38292, + 3.37708, + 3.45369, + 3.43094, + 3.42157, + 3.42184, + 3.40303, + 3.38357, + 3.32032, + 3.43462, + 3.42763, + 3.4259, + 3.41536, + 3.35857, + 3.36072, + 3.38797, + 3.38809, + 3.3164, + 3.39759, + 3.33031, + 3.38347, + 3.40914, + 3.3216, + 3.3373, + 3.33471, + 3.42567, + 3.43624, + 3.31601, + 3.35842, + 3.30376, + 3.3755, + 3.30036, + 3.304, + 3.34693, + 3.30717, + 3.34916, + 3.37777, + 3.33521, + 3.3354, + 3.33662, + 3.27124, + 3.3539, + 3.39383, + 3.37248, + 3.32546, + 3.28574, + 3.35235, + 3.34408, + 3.34222, + 3.3303, + 3.34022, + 3.27893, + 3.32112, + 3.30557, + 3.24484, + 3.29785, + 3.26682, + 3.22714, + 3.28872, + 3.30816, + 3.25746, + 3.29812, + 3.2934, + 3.3574, + 3.22733, + 3.28921, + 3.33915, + 3.21852, + 3.27923, + 3.23888, + 3.29058, + 3.20529, + 3.23681, + 3.26328, + 3.28397, + 3.30838, + 3.26096, + 3.2749, + 3.258, + 3.28091, + 3.27164, + 3.25485, + 3.26296, + 3.24127, + 3.26696, + 3.26689, + 3.21262, + 3.22802, + 3.26266, + 3.22859, + 3.28781, + 3.2253, + 3.23549, + 3.28202, + 3.30797, + 3.22898, + 3.17838, + 3.22148, + 3.21341, + 3.23912, + 3.19721, + 3.18832, + 3.2565, + 3.21436, + 3.1865, + 3.22391, + 3.20155, + 3.24919, + 3.23574, + 3.18696, + 3.17537, + 3.14401, + 3.20485, + 3.20609, + 3.17466, + 3.1378, + 3.15216, + 3.19468, + 3.15816, + 3.14527, + 3.19374, + 3.1484, + 3.20494, + 3.16096, + 3.15878, + 3.17442, + 3.24439, + 3.20999, + 3.16619, + 3.07025, + 3.1159, + 3.25497, + 3.18261, + 3.20949, + 3.15191, + 3.14302, + 3.04797, + 3.12089, + 3.12873, + 3.13918, + 3.12088, + 3.16562, + 3.06367, + 3.17184, + 3.12916, + 3.12642, + 3.14795, + 3.19024, + 3.0813, + 3.10649, + 3.1019, + 3.13557, + 3.11323, + 3.12541, + 3.1726, + 3.15794, + 3.07752, + 3.0946, + 3.13231, + 3.10344, + 3.11949, + 3.10301, + 3.05579, + 3.16942, + 3.0996, + 3.09904, + 3.15448, + 3.09789, + 3.09691, + 3.12681, + 3.1398, + 3.16618, + 3.11921, + 3.08365, + 3.07737, + 3.1531, + 3.09147, + 3.07162, + 3.03144, + 3.03893, + 3.07538, + 3.07841, + 3.05103, + 3.11952, + 3.11496, + 3.09061, + 3.10705, + 3.0946, + 3.1438, + 3.11292, + 3.05945, + 3.07554, + 3.06615, + 3.11348, + 3.08067, + 3.04709, + 3.10191, + 3.05431, + 3.12748, + 3.04764, + 3.01876, + 3.05853, + 3.03669, + 2.97918, + 3.0435, + 3.08119, + 3.06269, + 3.09626, + 3.08603, + 3.07461, + 3.08761, + 3.02338, + 3.04842, + 3.00278, + 2.9818, + 3.08616, + 3.07841, + 3.00485, + 3.00871, + 3.0374, + 3.0213, + 2.99273, + 3.03198, + 3.01008, + 3.05377, + 3.02347, + 3.07184, + 3.09238, + 3.0337, + 2.94648, + 3.08056, + 3.11581, + 3.06111, + 2.99844, + 3.04809, + 3.00298, + 3.01841, + 3.08443, + 2.97501, + 3.055, + 3.01817, + 2.9941, + 2.95482, + 2.93857, + 3.03342, + 2.99739, + 2.96384, + 2.99674, + 3.00566, + 3.03091, + 2.96007, + 3.02182, + 2.93403, + 3.09829, + 3.0091, + 2.98855, + 3.01479, + 3.03527, + 3.02026, + 3.03447, + 3.03381, + 2.99644, + 3.01419, + 3.05048, + 2.96736, + 3.02802, + 3.13532, + 2.97867, + 2.95863, + 3.00951, + 2.98254, + 2.99559, + 2.91804, + 2.94361, + 3.01278, + 2.98653, + 3.00444, + 2.9757, + 2.99622, + 2.98816, + 3.00311, + 2.99989, + 2.98755, + 3.03377, + 2.97463, + 2.96327, + 2.98301, + 3.01855, + 2.94814, + 3.01632, + 3.02101, + 2.92578, + 2.9293, + 3.00537, + 2.93999, + 2.91726, + 2.95025, + 3.06795, + 2.90178, + 2.96537, + 3.03844, + 2.92099, + 3.01076, + 2.94878, + 2.91929, + 2.91717, + 3.02398, + 2.95694, + 2.89827, + 2.95817, + 2.93463, + 2.88714, + 3.01429, + 2.88445, + 2.93545, + 2.91244, + 2.95474, + 2.93962, + 2.8926, + 2.85307, + 2.93422, + 2.9297, + 2.92236, + 2.93161, + 2.95587, + 2.90156, + 2.98388, + 2.94396, + 2.93603, + 2.93848, + 2.96532, + 2.84699, + 2.86447, + 2.91252, + 2.95438, + 2.90619, + 2.95315, + 2.95224, + 2.91235, + 2.92049, + 2.90155, + 2.93415, + 3.00983, + 2.98178, + 2.89485, + 2.89593, + 2.86089, + 2.8884, + 2.90884, + 2.93988, + 2.90918, + 2.86846, + 2.95056, + 2.95628, + 2.92048, + 2.92831, + 2.86578, + 2.96543, + 2.90046, + 2.88209, + 2.9463, + 2.91948, + 2.96318, + 2.93245, + 2.9697, + 2.89533, + 2.95198, + 2.86938, + 2.82628, + 2.95756, + 2.95097, + 2.97077, + 2.93639, + 2.90521, + 2.95695, + 2.9037, + 2.92091, + 2.8628, + 2.93554, + 2.86756, + 2.92286, + 2.88841, + 2.96557, + 2.91396, + 2.89637, + 2.91341, + 2.88855, + 2.77714, + 2.90297, + 2.94488, + 2.94575, + 2.91736, + 2.83114, + 2.83237, + 2.93209, + 2.87038, + 2.8587, + 2.88183, + 2.84469, + 2.8989, + 2.9417, + 2.82079, + 2.86929, + 2.90045, + 2.98193, + 2.89512, + 2.9062, + 2.93924, + 2.82449, + 2.92485, + 2.87495, + 2.8365, + 2.8181, + 2.90693, + 2.81489, + 2.86948, + 2.87256, + 2.90533, + 2.90093, + 2.88342, + 2.77137, + 2.8786, + 2.84092, + 2.80635, + 2.78477, + 2.88779, + 2.73949, + 2.89247, + 2.79196, + 2.9072, + 2.81964, + 2.85877, + 2.88935, + 2.88218, + 2.83053, + 2.84917, + 2.81894, + 2.84817, + 2.90223, + 2.88839, + 2.88154, + 2.82994, + 2.78961, + 2.82896, + 2.74455, + 2.85291, + 2.90095, + 2.84824, + 2.86226, + 2.88905, + 2.80715, + 2.8626, + 2.87669, + 2.87899, + 2.88478, + 2.80931, + 2.89738, + 2.8037, + 2.81486, + 2.81346, + 2.84374, + 2.90051, + 2.8515, + 2.88707, + 2.88663, + 2.87102, + 2.84106, + 2.82347, + 2.87193, + 2.78659, + 2.90058, + 2.76909, + 2.81374, + 2.79345, + 2.85864, + 2.88459, + 2.79361, + 2.8044, + 2.84767, + 2.85486, + 2.82785, + 2.85836, + 2.86613, + 2.92563, + 2.81349, + 2.77303, + 2.85303, + 2.82634, + 2.74063, + 2.77044, + 2.86468, + 2.83577, + 2.82462, + 2.80297, + 2.79962, + 2.8223, + 2.88981, + 2.7985, + 2.77283, + 2.82732, + 2.82565, + 2.86194, + 2.8816, + 2.86627, + 2.7917, + 2.77768, + 2.81535, + 2.83914, + 2.74679, + 2.80587, + 2.81403, + 2.80038, + 2.79634, + 2.88313, + 2.86541, + 2.81117, + 2.82719, + 2.77105, + 2.81753, + 2.84877, + 2.80999, + 2.75832, + 2.83501, + 2.88573, + 2.73618, + 2.78669, + 2.82508, + 2.83497, + 2.86184, + 2.81411, + 2.80486, + 2.83339, + 2.77216, + 2.7664, + 2.83678, + 2.82966, + 2.8651, + 2.73586, + 2.77931, + 2.82145, + 2.82056, + 2.76942, + 2.82824, + 2.78171, + 2.83337, + 2.84238, + 2.8074, + 2.83586, + 2.81499, + 2.77751, + 2.78656, + 2.74025, + 2.78274, + 2.83574, + 2.87686, + 2.82694, + 2.75606, + 2.80385, + 2.78596, + 2.80802, + 2.80465, + 2.79881, + 2.81739, + 2.7888, + 2.83816, + 2.80383, + 2.81455, + 2.85243, + 2.84293, + 2.79704, + 2.80649, + 2.81233, + 2.8055, + 2.80424, + 2.76885, + 2.76262, + 2.80149, + 2.79061, + 2.79671, + 2.80511, + 2.75307, + 2.80407, + 2.83569, + 2.7843, + 2.82479, + 2.80138, + 2.82107, + 2.78979, + 2.79239, + 2.77129, + 2.78763, + 2.74932, + 2.801, + 2.74313, + 2.79965, + 2.81306, + 2.77436, + 2.77067, + 2.84259, + 2.79077, + 2.80687, + 2.76434, + 2.75526, + 2.79594, + 2.77651, + 2.8763, + 2.72225, + 2.74088, + 2.85648, + 2.74197, + 2.76585, + 2.74744, + 2.73941, + 2.84705, + 2.76933, + 2.82295, + 2.8006, + 2.80583, + 2.73376, + 2.80069, + 2.75279, + 2.7493, + 2.7359, + 2.72292, + 2.74577, + 2.75061, + 2.77033, + 2.7877, + 2.76327, + 2.75848, + 2.7837, + 2.83026, + 2.78755, + 2.69023, + 2.76919, + 2.7289, + 2.73707, + 2.77825, + 2.73557, + 2.74949, + 2.78003, + 2.79292, + 2.72757, + 2.74697, + 2.69217, + 2.7304, + 2.71621, + 2.71694, + 2.76401, + 2.76801, + 2.78138, + 2.73347, + 2.80642, + 2.78506, + 2.71379, + 2.78032, + 2.78976, + 2.79134, + 2.80772, + 2.74918, + 2.70712, + 2.7587, + 2.74551, + 2.68356, + 2.80405, + 2.75191, + 2.80921, + 2.72457, + 2.74775, + 2.81151, + 2.66465, + 2.72849, + 2.71959, + 2.75387, + 2.75552, + 2.79577, + 2.7547, + 2.71633, + 2.69833, + 2.77585, + 2.77982, + 2.74336, + 2.78179, + 2.76975, + 2.78352, + 2.70881, + 2.73891, + 2.75507, + 2.72337, + 2.80237, + 2.80451, + 2.72218, + 2.71474, + 2.76943, + 2.75142, + 2.76966, + 2.79794, + 2.80761, + 2.81492, + 2.75243, + 2.72851, + 2.66692, + 2.78883, + 2.75137, + 2.70084, + 2.721, + 2.75057, + 2.6791, + 2.74507, + 2.81547, + 2.70009, + 2.81968, + 2.75444, + 2.78013, + 2.77986, + 2.74503, + 2.68274, + 2.74822, + 2.71928, + 2.76341, + 2.7392, + 2.70981, + 2.68247, + 2.78056, + 2.7008, + 2.69603, + 2.79023, + 2.73148, + 2.78412, + 2.78367, + 2.69007, + 2.74103, + 2.76041, + 2.69397, + 2.73454, + 2.79217, + 2.75188, + 2.73541, + 2.75435, + 2.67168, + 2.66605, + 2.75613, + 2.75529, + 2.68593, + 2.76386, + 2.67782, + 2.7735, + 2.74449, + 2.71107, + 2.68143, + 2.77062, + 2.7109, + 2.6776, + 2.72273, + 2.73666, + 2.76326, + 2.72386, + 2.81193, + 2.79333, + 2.72329, + 2.6656, + 2.64689, + 2.66826, + 2.73035, + 2.73958, + 2.71352, + 2.6232, + 2.67447, + 2.71078, + 2.72834, + 2.67008, + 2.72791, + 2.69784, + 2.71227, + 2.62515, + 2.68954, + 2.76627, + 2.6215, + 2.74541, + 2.72286, + 2.74895, + 2.64032, + 2.62844, + 2.7021, + 2.76356, + 2.75275, + 2.66259, + 2.75015, + 2.6293, + 2.68498, + 2.64215, + 2.64355, + 2.68438, + 2.71158, + 2.72629, + 2.56832, + 2.77191, + 2.75158, + 2.65353, + 2.71807, + 2.71046, + 2.75894, + 2.65446, + 2.74547, + 2.69499, + 2.68761, + 2.6913, + 2.74163, + 2.74886, + 2.67043, + 2.66168, + 2.68886, + 2.76689, + 2.74306, + 2.65098, + 2.70104, + 2.66722, + 2.71603, + 2.68891, + 2.67689, + 2.68424, + 2.76128, + 2.70074, + 2.69055, + 2.62151, + 2.71233, + 2.71145, + 2.56962, + 2.66729, + 2.68197, + 2.73717, + 2.75111, + 2.77256, + 2.73667, + 2.72777, + 2.67809, + 2.70789, + 2.65197, + 2.67535, + 2.68896, + 2.66942, + 2.66279, + 2.70952, + 2.66716, + 2.78037, + 2.69124, + 2.67769, + 2.65496, + 2.73923, + 2.64488, + 2.68576, + 2.73899, + 2.64938, + 2.70387, + 2.66367, + 2.73747, + 2.66893, + 2.67631, + 2.66314, + 2.64389, + 2.61873, + 2.64314, + 2.766, + 2.73337, + 2.68305, + 2.71639, + 2.61625, + 2.71792, + 2.68769, + 2.73993, + 2.70447, + 2.67, + 2.72517, + 2.73256, + 2.72007, + 2.72097, + 2.66064, + 2.70916, + 2.65783, + 2.6917, + 2.69324, + 2.5658, + 2.65943, + 2.68232, + 2.7527, + 2.61684, + 2.6854, + 2.75141, + 2.65068, + 2.6931, + 2.64071, + 2.68162, + 2.65333, + 2.68028, + 2.63348, + 2.72617, + 2.66754, + 2.73209, + 2.68119, + 2.6864, + 2.64034, + 2.69337, + 2.62332, + 2.70951, + 2.73773, + 2.67288, + 2.62249, + 2.59525, + 2.72794, + 2.6466, + 2.67197, + 2.7226, + 2.63357, + 2.66373, + 2.63202, + 2.68662, + 2.67108, + 2.61592, + 2.6019, + 2.66101, + 2.6626, + 2.60034, + 2.65389, + 2.63549, + 2.61021, + 2.68758, + 2.71159, + 2.75712, + 2.6618, + 2.65398, + 2.70419, + 2.66052, + 2.66932, + 2.62803, + 2.66542, + 2.64726, + 2.58274, + 2.70265, + 2.58808, + 2.65158, + 2.65309, + 2.70866, + 2.55429, + 2.60902, + 2.62775, + 2.65961, + 2.73813, + 2.6892, + 2.67541, + 2.65591, + 2.69175, + 2.69494, + 2.63681, + 2.62478, + 2.67323, + 2.62809, + 2.69152, + 2.64142, + 2.74684, + 2.54882, + 2.6867, + 2.68145, + 2.70877, + 2.70729, + 2.61984, + 2.6673, + 2.63975, + 2.55461, + 2.66996, + 2.62989, + 2.61291, + 2.60881, + 2.59522, + 2.63217, + 2.66455, + 2.71612, + 2.65904, + 2.61188, + 2.63071, + 2.62894, + 2.65015, + 2.60086, + 2.60751, + 2.65635, + 2.61026, + 2.6486, + 2.68425, + 2.62975, + 2.62047, + 2.68684, + 2.72416, + 2.67282, + 2.67596, + 2.60035, + 2.67338, + 2.6874, + 2.64649, + 2.6895, + 2.66173, + 2.65004, + 2.66817, + 2.66857, + 2.63647, + 2.67898, + 2.69128, + 2.64617, + 2.69696, + 2.61101, + 2.6229, + 2.6265, + 2.61036, + 2.66572, + 2.60918, + 2.60118, + 2.68381, + 2.69382, + 2.66188, + 2.7231, + 2.65321, + 2.55765, + 2.66842, + 2.64541, + 2.61506, + 2.59532, + 2.63639, + 2.60841, + 2.62806, + 2.64608, + 2.67118, + 2.62389, + 2.55923, + 2.57586, + 2.62948, + 2.62331, + 2.60092, + 2.63199, + 2.61124, + 2.58761, + 2.64234, + 2.60936, + 2.61712, + 2.58712, + 2.65235, + 2.63345, + 2.67624, + 2.63538, + 2.5859, + 2.68176, + 2.68966, + 2.62908, + 2.66472, + 2.59177, + 2.56704, + 2.61299, + 2.64034, + 2.63382, + 2.6428, + 2.54883, + 2.58262, + 2.61183, + 2.6311, + 2.57346, + 2.57403, + 2.62809, + 2.57895, + 2.69194, + 2.62525, + 2.63167, + 2.59661, + 2.69256, + 2.70696, + 2.54479, + 2.70055, + 2.60821, + 2.61701, + 2.67208, + 2.61011, + 2.65011, + 2.62321, + 2.65866, + 2.5425, + 2.6093, + 2.60854, + 2.59741, + 2.58862, + 2.67295, + 2.7044, + 2.60812, + 2.68488, + 2.65197, + 2.57168, + 2.61187, + 2.61328, + 2.63525, + 2.62934, + 2.56182, + 2.63649, + 2.63364, + 2.62887, + 2.59577, + 2.60886, + 2.63652, + 2.65075, + 2.56499, + 2.70703, + 2.64762, + 2.62931, + 2.65009, + 2.67072, + 2.59086, + 2.64295, + 2.58587, + 2.61895, + 2.5797, + 2.62413, + 2.56185, + 2.66142, + 2.6316, + 2.62357, + 2.5959, + 2.63244, + 2.58769, + 2.63122, + 2.5933, + 2.56499, + 2.51952, + 2.63504, + 2.54099, + 2.64521, + 2.60912, + 2.6267, + 2.564, + 2.57348, + 2.56992, + 2.58418, + 2.61012, + 2.55381, + 2.56653, + 2.66297, + 2.6435, + 2.59938, + 2.60593, + 2.641, + 2.55413, + 2.57443, + 2.63708, + 2.64828, + 2.58094, + 2.6622, + 2.63222, + 2.67, + 2.5877, + 2.51709, + 2.52876, + 2.57926, + 2.61093, + 2.66773, + 2.62584, + 2.61201, + 2.61813, + 2.63209, + 2.61149, + 2.58899, + 2.55519, + 2.5915, + 2.61339, + 2.57118, + 2.55824, + 2.61613, + 2.5801, + 2.58463, + 2.56969, + 2.55443, + 2.62851, + 2.57225, + 2.6848, + 2.58631, + 2.59045, + 2.53288, + 2.59222, + 2.58792, + 2.62052, + 2.59499, + 2.56684, + 2.58895, + 2.59582, + 2.5789, + 2.57688, + 2.57849, + 2.65257, + 2.55409, + 2.52359, + 2.58454, + 2.59495, + 2.53446, + 2.57372, + 2.54588, + 2.62729, + 2.5586, + 2.65723, + 2.58125, + 2.60351, + 2.58585, + 2.51436, + 2.55796, + 2.50209, + 2.64614, + 2.60605, + 2.59766, + 2.63874, + 2.52589, + 2.58287, + 2.54012, + 2.49623, + 2.64405, + 2.58353, + 2.65639, + 2.59984, + 2.52379, + 2.6299, + 2.57622, + 2.60262, + 2.6084, + 2.6076, + 2.57319, + 2.59715, + 2.57519, + 2.61333, + 2.63064, + 2.59368, + 2.6369, + 2.5333, + 2.49021, + 2.61736, + 2.54959, + 2.57231, + 2.56281, + 2.65289, + 2.56465, + 2.63305, + 2.59313, + 2.59101, + 2.5983, + 2.54118, + 2.61238, + 2.59537, + 2.61145, + 2.58803, + 2.60472, + 2.67877, + 2.56161, + 2.6101, + 2.56673, + 2.60268, + 2.60031, + 2.52168, + 2.6507, + 2.54765, + 2.63041, + 2.57828, + 2.59903, + 2.49068, + 2.59229, + 2.58171, + 2.60845, + 2.56928, + 2.58428, + 2.6247, + 2.52681, + 2.56191, + 2.58753, + 2.50335, + 2.60935, + 2.58442, + 2.49095, + 2.60589, + 2.56827, + 2.61591, + 2.61087, + 2.58495, + 2.61272, + 2.58798, + 2.54086, + 2.59552, + 2.61571, + 2.5995, + 2.52747, + 2.51579, + 2.63453, + 2.61821, + 2.56831, + 2.57385, + 2.59723, + 2.54406, + 2.61962, + 2.55937, + 2.62051, + 2.55239, + 2.5812, + 2.68362, + 2.54966, + 2.62374, + 2.57061, + 2.53222, + 2.57754, + 2.58206, + 2.6136, + 2.52934, + 2.5716, + 2.53918, + 2.51976, + 2.56665, + 2.44944, + 2.56967, + 2.55454, + 2.53906, + 2.55189, + 2.55023, + 2.57851, + 2.57355, + 2.557, + 2.57158, + 2.50214, + 2.51197, + 2.56256, + 2.51444, + 2.52839, + 2.58499, + 2.60438, + 2.52385, + 2.5747, + 2.50562, + 2.5617, + 2.5552, + 2.52638, + 2.5443, + 2.60336, + 2.52014, + 2.57715, + 2.56441, + 2.55141, + 2.57211, + 2.57972, + 2.52367, + 2.57278, + 2.54216, + 2.55236, + 2.54777, + 2.56982, + 2.59999, + 2.54135, + 2.58151, + 2.51634, + 2.61955, + 2.5675, + 2.4568, + 2.57342, + 2.55853, + 2.56717, + 2.63909, + 2.618, + 2.55715, + 2.60809, + 2.51439, + 2.5015, + 2.50281, + 2.5334, + 2.50071, + 2.55917, + 2.50471, + 2.56075, + 2.63811, + 2.51631, + 2.58247, + 2.5451, + 2.53291, + 2.5299, + 2.53253, + 2.53392, + 2.51032, + 2.58595, + 2.55135, + 2.57227, + 2.57543, + 2.54353, + 2.61402, + 2.56794, + 2.5604, + 2.55498, + 2.51499, + 2.52695, + 2.59009, + 2.51501, + 2.50967, + 2.48264, + 2.55001, + 2.5278, + 2.54164, + 2.52304, + 2.54214, + 2.48849, + 2.51753, + 2.58903, + 2.61956, + 2.56039, + 2.5406, + 2.54079, + 2.5449, + 2.51107, + 2.5658, + 2.52561, + 2.53839, + 2.55095, + 2.59917, + 2.53839, + 2.58099, + 2.62992, + 2.57205, + 2.57496, + 2.55759, + 2.60914, + 2.53817, + 2.5961, + 2.51283, + 2.55853, + 2.42765, + 2.53366, + 2.54295, + 2.54823, + 2.5644, + 2.53103, + 2.51332, + 2.51396, + 2.62756, + 2.46276, + 2.54627, + 2.595, + 2.48257, + 2.53466, + 2.52359, + 2.55915, + 2.54452, + 2.54712, + 2.52808, + 2.56123, + 2.54537, + 2.56587, + 2.52644, + 2.55813, + 2.54549, + 2.56297, + 2.45761, + 2.48587, + 2.49228, + 2.57336, + 2.61951, + 2.4818, + 2.45865, + 2.54354, + 2.46115, + 2.4485, + 2.51564, + 2.48489, + 2.57547, + 2.54891, + 2.50171, + 2.61323, + 2.57528, + 2.49208, + 2.48911, + 2.63947, + 2.51962, + 2.46058, + 2.50496, + 2.56047, + 2.50229, + 2.52409, + 2.5273, + 2.54956, + 2.55625, + 2.54374, + 2.52165, + 2.48175, + 2.57167, + 2.56448, + 2.50733, + 2.55954, + 2.53072, + 2.51991, + 2.51214, + 2.58552, + 2.47838, + 2.56448, + 2.52481, + 2.50555, + 2.49014, + 2.55007, + 2.55401, + 2.51096, + 2.55744, + 2.56583, + 2.51184, + 2.53594, + 2.53344, + 2.47268, + 2.53568, + 2.51197, + 2.56462, + 2.53845, + 2.50893, + 2.53091, + 2.54488, + 2.53861, + 2.56976, + 2.52347, + 2.52186, + 2.48405, + 2.5714, + 2.53902, + 2.56134, + 2.49359, + 2.49513, + 2.5278, + 2.53223, + 2.45371, + 2.55331, + 2.53556, + 2.56111, + 2.51521, + 2.49776, + 2.45491, + 2.54416, + 2.49937, + 2.53734, + 2.56064, + 2.54502, + 2.43262, + 2.52998, + 2.49131, + 2.53937, + 2.45889, + 2.45812, + 2.5329, + 2.46925, + 2.53378, + 2.51476, + 2.44329, + 2.50191, + 2.59317, + 2.56486, + 2.52811, + 2.46905, + 2.53522, + 2.51229, + 2.47238, + 2.59919, + 2.56517, + 2.51386, + 2.52101, + 2.50209, + 2.56061, + 2.55957, + 2.5346, + 2.55247, + 2.56498, + 2.54012, + 2.54842, + 2.58767, + 2.52982, + 2.43828, + 2.55407, + 2.47761, + 2.49028, + 2.50474, + 2.54748, + 2.53365, + 2.50861, + 2.46424, + 2.50986, + 2.45849, + 2.45363, + 2.51416, + 2.53037, + 2.53185, + 2.47771, + 2.46415, + 2.54037, + 2.49347, + 2.56565, + 2.48657, + 2.48515, + 2.49086, + 2.48235, + 2.48662, + 2.51988, + 2.4533, + 2.59623, + 2.54791, + 2.48602, + 2.55049, + 2.57616, + 2.47121, + 2.57921, + 2.48412, + 2.51028, + 2.48415, + 2.47141, + 2.56888, + 2.49364, + 2.51247, + 2.50614, + 2.4496, + 2.4561, + 2.53052, + 2.48028, + 2.54659, + 2.48437, + 2.52207, + 2.46704, + 2.49094, + 2.5086, + 2.52494, + 2.50704, + 2.4743, + 2.52148, + 2.47393, + 2.47473, + 2.50914, + 2.45272, + 2.42524, + 2.55252, + 2.45336, + 2.54388, + 2.52111, + 2.49833, + 2.47948, + 2.48883, + 2.52313, + 2.3921, + 2.44072, + 2.46335, + 2.5059, + 2.49504, + 2.50137, + 2.45563, + 2.45945, + 2.51307, + 2.47799, + 2.45586, + 2.47137, + 2.55418, + 2.46642, + 2.49773, + 2.50209, + 2.57988, + 2.44636, + 2.5325, + 2.53913, + 2.51121, + 2.44555, + 2.48821, + 2.5053, + 2.51159, + 2.44676, + 2.52829, + 2.55339, + 2.46706, + 2.51902, + 2.56035, + 2.53526, + 2.44858, + 2.44197, + 2.44784, + 2.52702, + 2.49211, + 2.51124, + 2.48739, + 2.48838, + 2.42239, + 2.50735, + 2.48765, + 2.53528, + 2.47403, + 2.47126, + 2.40944, + 2.45306, + 2.4385, + 2.55269, + 2.44388, + 2.52225, + 2.52264, + 2.52474, + 2.41298, + 2.4527, + 2.52612, + 2.48551, + 2.51101, + 2.56463, + 2.44662, + 2.53841, + 2.62289, + 2.50929, + 2.48694, + 2.4675, + 2.50383, + 2.48539, + 2.4656, + 2.43423, + 2.43326, + 2.46717, + 2.43426, + 2.49763, + 2.48805, + 2.41894, + 2.50256, + 2.50097, + 2.54449, + 2.53517, + 2.48893, + 2.55221, + 2.49779, + 2.49037, + 2.50485, + 2.46928, + 2.45018, + 2.44296, + 2.54036, + 2.50816, + 2.43497, + 2.44359, + 2.59455, + 2.51341, + 2.44948, + 2.47583, + 2.51782, + 2.40125, + 2.51056, + 2.52343, + 2.53308, + 2.4524, + 2.4995, + 2.46437, + 2.50152, + 2.41373, + 2.46085, + 2.54979, + 2.48368, + 2.49061, + 2.4516, + 2.51717, + 2.5328, + 2.4438, + 2.50285, + 2.44912, + 2.38315, + 2.43396, + 2.50824, + 2.44129, + 2.41037, + 2.48145, + 2.50363, + 2.37905, + 2.45995, + 2.46084, + 2.44395, + 2.48107, + 2.43907, + 2.47561, + 2.47779, + 2.48287, + 2.56597, + 2.48416, + 2.43324, + 2.51114, + 2.53984, + 2.41456, + 2.45317, + 2.44444, + 2.48929, + 2.49083, + 2.44818, + 2.47185, + 2.43723, + 2.55823, + 2.54137, + 2.45373, + 2.44897, + 2.44649, + 2.485, + 2.47959, + 2.40037, + 2.43593, + 2.46117, + 2.46449, + 2.47129, + 2.44506, + 2.51655, + 2.50383, + 2.51861, + 2.5298, + 2.46658, + 2.49133, + 2.47009, + 2.40181, + 2.45433, + 2.52508, + 2.53393, + 2.42816, + 2.44758, + 2.48871, + 2.50509, + 2.54517, + 2.44175, + 2.48583, + 2.506, + 2.41778, + 2.48236, + 2.47385, + 2.45025, + 2.42938, + 2.44768, + 2.49538, + 2.41138, + 2.44096, + 2.55329, + 2.51881, + 2.5045, + 2.49193, + 2.48855, + 2.44205, + 2.52298, + 2.50699, + 2.41615, + 2.39718, + 2.50678, + 2.41029, + 2.48705, + 2.50058, + 2.5181, + 2.48285, + 2.52447, + 2.56393, + 2.48324, + 2.57286, + 2.47213, + 2.45422, + 2.49593, + 2.46208, + 2.42037, + 2.48634, + 2.4893, + 2.47901, + 2.44354, + 2.49694, + 2.52512, + 2.50591, + 2.46428, + 2.42898, + 2.48041, + 2.5037, + 2.49226, + 2.49609, + 2.4008, + 2.43324, + 2.54186, + 2.47446, + 2.49677, + 2.48796, + 2.34877, + 2.47584, + 2.45474, + 2.45576, + 2.44953, + 2.47731, + 2.53344, + 2.46746, + 2.41117, + 2.43148, + 2.49897, + 2.43484, + 2.36097, + 2.45879, + 2.39436, + 2.456, + 2.47828, + 2.5278, + 2.45388, + 2.5169, + 2.44678, + 2.43361, + 2.47447, + 2.43904, + 2.44716, + 2.41444, + 2.47599, + 2.48082, + 2.47923, + 2.48797, + 2.43862, + 2.46833, + 2.49863, + 2.43985, + 2.41255, + 2.51604, + 2.4771, + 2.44459, + 2.45696, + 2.4569, + 2.42946, + 2.43607, + 2.47287, + 2.50773, + 2.45398, + 2.42438, + 2.42476, + 2.49932, + 2.43083, + 2.56139, + 2.39153, + 2.42377, + 2.4326, + 2.47275, + 2.37569, + 2.43639, + 2.48065, + 2.37779, + 2.39973, + 2.47236, + 2.52, + 2.42616, + 2.42471, + 2.41076, + 2.42168, + 2.37664, + 2.49429, + 2.49674, + 2.40823, + 2.42678, + 2.39898, + 2.4886, + 2.46728, + 2.45683, + 2.41069, + 2.48299, + 2.44732, + 2.44496, + 2.48252, + 2.49997, + 2.43768, + 2.43672, + 2.46574, + 2.3854, + 2.44129, + 2.45887, + 2.47777, + 2.41973, + 2.48464, + 2.45327, + 2.43424, + 2.47941, + 2.43311, + 2.33966, + 2.38103, + 2.41504, + 2.43436, + 2.4045, + 2.39855, + 2.41776, + 2.48139, + 2.39193, + 2.40106, + 2.56399, + 2.41142, + 2.46308, + 2.42983, + 2.44596, + 2.45258, + 2.46746, + 2.47742, + 2.52757, + 2.4501, + 2.46035, + 2.44079, + 2.44111, + 2.45808, + 2.44631, + 2.44144, + 2.49393, + 2.45404, + 2.471, + 2.42071, + 2.35502, + 2.3958, + 2.39963, + 2.4572, + 2.48439, + 2.44288, + 2.45428, + 2.45226, + 2.44871, + 2.42287, + 2.41821, + 2.31632, + 2.41892, + 2.45868, + 2.46317, + 2.37192, + 2.43773, + 2.47889, + 2.44095, + 2.45007, + 2.428, + 2.45152, + 2.37038, + 2.46866, + 2.48546, + 2.42577, + 2.37846, + 2.36839, + 2.42522, + 2.43037, + 2.49233, + 2.45342, + 2.34117, + 2.45867, + 2.48703, + 2.41528, + 2.39737, + 2.49851, + 2.43516, + 2.46851, + 2.43343, + 2.50841, + 2.43086, + 2.36646, + 2.43614, + 2.41312, + 2.40969, + 2.42721, + 2.44625, + 2.51612, + 2.45477, + 2.44079, + 2.47306, + 2.47038, + 2.43168, + 2.45239, + 2.47242, + 2.44754, + 2.48656, + 2.47418, + 2.4529, + 2.44918, + 2.47144, + 2.48287, + 2.45669, + 2.44199, + 2.45045, + 2.44441, + 2.43335, + 2.44748, + 2.46681, + 2.38271, + 2.49157, + 2.43675, + 2.46981, + 2.44239, + 2.50267, + 2.48553, + 2.49532, + 2.41873, + 2.41314, + 2.52626, + 2.37738, + 2.39934, + 2.36168, + 2.38334, + 2.3858, + 2.47889, + 2.40401, + 2.43927, + 2.42859, + 2.469, + 2.40495, + 2.41213, + 2.4513, + 2.43545, + 2.34913, + 2.3702, + 2.49439, + 2.50536, + 2.44142, + 2.36121, + 2.42158, + 2.37616, + 2.42401, + 2.3981, + 2.40368, + 2.40608, + 2.47441, + 2.45675, + 2.41427, + 2.49563, + 2.46106, + 2.39419, + 2.42108, + 2.4423, + 2.36174, + 2.37434, + 2.3965, + 2.39443, + 2.44809, + 2.40736, + 2.37544, + 2.39908, + 2.45371, + 2.42536, + 2.49582, + 2.43336, + 2.42806, + 2.4212, + 2.42195, + 2.48005, + 2.44504, + 2.43007, + 2.41469, + 2.43028, + 2.45316, + 2.35527, + 2.34749, + 2.36553, + 2.48738, + 2.48837, + 2.43901, + 2.45175, + 2.46795, + 2.43799, + 2.38591, + 2.41503, + 2.47782, + 2.33491, + 2.35122, + 2.43248, + 2.37952, + 2.46948, + 2.39154, + 2.40167, + 2.47075, + 2.47539, + 2.43312, + 2.43355, + 2.35798, + 2.42126, + 2.39522, + 2.44165, + 2.53409, + 2.47253, + 2.43564, + 2.48541, + 2.52605, + 2.37531, + 2.45193, + 2.43064, + 2.33368, + 2.44635, + 2.3883, + 2.45411, + 2.43913, + 2.42699, + 2.4177, + 2.41209, + 2.40784, + 2.43533, + 2.38979, + 2.38954, + 2.35591, + 2.41254, + 2.47589, + 2.43805, + 2.45715, + 2.45687, + 2.42219, + 2.48148, + 2.38397, + 2.412, + 2.43789, + 2.39362, + 2.46996, + 2.44852, + 2.43945, + 2.51257, + 2.44708, + 2.36097, + 2.41921, + 2.50602, + 2.46708, + 2.35945, + 2.43047, + 2.43858, + 2.40356, + 2.46117, + 2.39015, + 2.43291, + 2.40733, + 2.49934, + 2.45005, + 2.42611, + 2.39203, + 2.46268, + 2.46522, + 2.4188, + 2.45776, + 2.48719, + 2.42071, + 2.37559, + 2.45621, + 2.38608, + 2.3375, + 2.39569, + 2.41607, + 2.41182, + 2.45551, + 2.37164, + 2.43465, + 2.38836, + 2.29337, + 2.42226, + 2.38456, + 2.39973, + 2.36451, + 2.43549, + 2.39703, + 2.46514, + 2.34092, + 2.3686, + 2.36638, + 2.3963, + 2.38741, + 2.322, + 2.4522, + 2.42296, + 2.3946, + 2.35307, + 2.47029, + 2.44564, + 2.46324, + 2.38634, + 2.42638, + 2.39866, + 2.32799, + 2.427, + 2.34351, + 2.43408, + 2.41638, + 2.47459, + 2.36144, + 2.38345, + 2.40518, + 2.39887, + 2.38547, + 2.43809, + 2.43649, + 2.41806, + 2.34737, + 2.39533, + 2.44806, + 2.37867, + 2.34808, + 2.4283, + 2.3994, + 2.38463, + 2.33297, + 2.45357, + 2.39041, + 2.37299, + 2.37114, + 2.47348, + 2.4324, + 2.38278, + 2.387, + 2.38894, + 2.3825, + 2.36569, + 2.3973, + 2.4538, + 2.39107, + 2.35772, + 2.40367, + 2.47927, + 2.40236, + 2.41206, + 2.41355, + 2.40457, + 2.36882, + 2.46935, + 2.40173, + 2.47172, + 2.42129, + 2.39868, + 2.35595, + 2.45532, + 2.46093, + 2.41247, + 2.39015, + 2.43603, + 2.38937, + 2.38167, + 2.35432, + 2.39596, + 2.45203, + 2.44817, + 2.43994, + 2.40765, + 2.47365, + 2.37336, + 2.43105, + 2.2874, + 2.47444, + 2.44809, + 2.38903, + 2.42847, + 2.43097, + 2.42105, + 2.36719, + 2.41405, + 2.45951, + 2.42072, + 2.39682, + 2.43415, + 2.47979, + 2.38059, + 2.38185, + 2.36539, + 2.37576, + 2.4104, + 2.34443, + 2.40225, + 2.4358, + 2.39576, + 2.38854, + 2.31644, + 2.39867, + 2.46033, + 2.38285, + 2.40998, + 2.3774, + 2.43852, + 2.37564, + 2.39266, + 2.43871, + 2.3981, + 2.34756, + 2.38106, + 2.44591, + 2.45643, + 2.33291, + 2.45392, + 2.36207, + 2.3989, + 2.38159, + 2.46144, + 2.3897, + 2.39159, + 2.38726, + 2.40366, + 2.39406, + 2.40143, + 2.32614, + 2.34314, + 2.38278, + 2.3639, + 2.36335, + 2.47772, + 2.48295, + 2.3424, + 2.40592, + 2.42125, + 2.38847, + 2.4326, + 2.38761, + 2.32859, + 2.38169, + 2.3917, + 2.39386, + 2.4567, + 2.39554, + 2.35668, + 2.42333, + 2.35512, + 2.3518, + 2.37154, + 2.38232, + 2.34516, + 2.42604, + 2.39911, + 2.39493, + 2.37223, + 2.37286, + 2.39589, + 2.35676, + 2.31851, + 2.36512, + 2.39574, + 2.37361, + 2.37608, + 2.38294, + 2.40001, + 2.43503, + 2.34914, + 2.41414, + 2.2842, + 2.40146, + 2.361, + 2.3575, + 2.36846, + 2.41704, + 2.3053, + 2.37741, + 2.43156, + 2.42723, + 2.37159, + 2.36045, + 2.36558, + 2.33395, + 2.44232, + 2.35623, + 2.43426, + 2.46154, + 2.39019, + 2.33971, + 2.38337, + 2.37051, + 2.32992, + 2.32513, + 2.34353, + 2.35053, + 2.34599, + 2.37815, + 2.36871, + 2.36244, + 2.38412, + 2.42166, + 2.41477, + 2.41588, + 2.31442, + 2.36525, + 2.42305, + 2.42509, + 2.38108, + 2.48414, + 2.4747, + 2.36735, + 2.4386, + 2.37478, + 2.44656, + 2.45512, + 2.36073, + 2.38947, + 2.37061, + 2.37254, + 2.3647, + 2.38957, + 2.32266, + 2.41707, + 2.37172, + 2.32196, + 2.44195, + 2.35164, + 2.37721, + 2.45974, + 2.40125, + 2.37919, + 2.40121, + 2.40656, + 2.40431, + 2.39828, + 2.36251, + 2.34878, + 2.3192, + 2.36455, + 2.33588, + 2.4067, + 2.39346, + 2.37477, + 2.35897, + 2.38503, + 2.41422, + 2.40102, + 2.38295, + 2.35731, + 2.34536, + 2.36943, + 2.34382, + 2.38457, + 2.41553, + 2.41011, + 2.33812, + 2.39173, + 2.38359, + 2.46877, + 2.35994, + 2.31356, + 2.35452, + 2.44076, + 2.35765, + 2.31413, + 2.36351, + 2.40812, + 2.37623, + 2.37268, + 2.41153, + 2.3828, + 2.36721, + 2.35975, + 2.41003, + 2.42775, + 2.38805, + 2.39763, + 2.33671, + 2.30849, + 2.43196, + 2.40053, + 2.40498, + 2.37281, + 2.33895, + 2.38814, + 2.38709, + 2.29562, + 2.40552, + 2.42674, + 2.28353, + 2.36709, + 2.38747, + 2.43536, + 2.28574, + 2.31932, + 2.33256, + 2.36615, + 2.3509, + 2.3465, + 2.33666, + 2.40038, + 2.42856, + 2.47235, + 2.32582, + 2.32998, + 2.40834, + 2.32001, + 2.3429, + 2.33184, + 2.35229, + 2.31496, + 2.35778, + 2.39379, + 2.30153, + 2.36632, + 2.3553, + 2.3968, + 2.30229, + 2.31862, + 2.38492, + 2.31996, + 2.40791, + 2.36851, + 2.33387, + 2.44133, + 2.36085, + 2.37109, + 2.32835, + 2.36442, + 2.41246, + 2.32801, + 2.33578, + 2.36342, + 2.38694, + 2.39458, + 2.39053, + 2.32132, + 2.34338, + 2.36383, + 2.43567, + 2.33884, + 2.40508, + 2.40711, + 2.40748, + 2.36651, + 2.45448, + 2.3411, + 2.3412, + 2.33847, + 2.29466, + 2.31834, + 2.33244, + 2.3318, + 2.34817, + 2.40952, + 2.37413, + 2.29033, + 2.38039, + 2.40061, + 2.38755, + 2.36713, + 2.3198, + 2.4009, + 2.37644, + 2.35729, + 2.33856, + 2.35551, + 2.31243, + 2.42418, + 2.35016, + 2.43423, + 2.40236, + 2.38754, + 2.41432, + 2.34497, + 2.38432, + 2.30964, + 2.3525, + 2.33479, + 2.41182, + 2.38985, + 2.41635, + 2.33682, + 2.43021, + 2.40384, + 2.34395, + 2.34698, + 2.39516, + 2.37112, + 2.33876, + 2.41652, + 2.34647, + 2.35761, + 2.43094, + 2.44124, + 2.32344, + 2.33098, + 2.38679, + 2.39217, + 2.38827, + 2.40402, + 2.36627, + 2.28741, + 2.36463, + 2.42916, + 2.28997, + 2.31332, + 2.32435, + 2.35909, + 2.34945, + 2.34203, + 2.36253, + 2.35494, + 2.30765, + 2.40377, + 2.39861, + 2.37706, + 2.34076, + 2.35282, + 2.33144, + 2.41193, + 2.41147, + 2.38108, + 2.392, + 2.424, + 2.32085, + 2.31582, + 2.31409, + 2.33267, + 2.35492, + 2.30452, + 2.35681, + 2.34307, + 2.42982, + 2.3299, + 2.37047, + 2.3758, + 2.37116, + 2.31265, + 2.37924, + 2.27602, + 2.36165, + 2.30245, + 2.35583, + 2.33128, + 2.37524, + 2.38862, + 2.28755, + 2.35508, + 2.40703, + 2.36397, + 2.39604, + 2.40241, + 2.35316, + 2.33623, + 2.40125, + 2.39651, + 2.36906, + 2.33148, + 2.31936, + 2.2974, + 2.33415, + 2.37516, + 2.40411, + 2.3965, + 2.33992, + 2.36064, + 2.374, + 2.33443, + 2.3703, + 2.3093, + 2.36726, + 2.38026, + 2.38113, + 2.33188, + 2.38845, + 2.31522, + 2.40702, + 2.32157, + 2.33237, + 2.40476, + 2.37072, + 2.3135, + 2.37444, + 2.40814, + 2.35038, + 2.32054, + 2.37754, + 2.41123, + 2.37526, + 2.37334, + 2.39234, + 2.33352, + 2.35454, + 2.34671, + 2.278, + 2.35701, + 2.31809, + 2.38648, + 2.37654, + 2.27011, + 2.3956, + 2.30964, + 2.35322, + 2.39058, + 2.3514, + 2.29601, + 2.40887, + 2.39479, + 2.38717, + 2.32845, + 2.32749, + 2.42149, + 2.35133, + 2.36205, + 2.36705, + 2.38024, + 2.27276, + 2.33031, + 2.39015, + 2.35107, + 2.37211, + 2.32647, + 2.34067, + 2.34266, + 2.34768, + 2.35381, + 2.29817, + 2.3358, + 2.35753, + 2.33894, + 2.34174, + 2.30702, + 2.37089, + 2.4002, + 2.36714, + 2.34439, + 2.38029, + 2.31557, + 2.31868, + 2.36817, + 2.3062, + 2.34969, + 2.3862, + 2.31742, + 2.37374, + 2.33592, + 2.30795, + 2.33078, + 2.30363, + 2.37755, + 2.32173, + 2.24658, + 2.38106, + 2.29931, + 2.40289, + 2.28121, + 2.3664, + 2.28871, + 2.25222, + 2.36338, + 2.33597, + 2.30395, + 2.33398, + 2.29544, + 2.35347, + 2.34537, + 2.39536, + 2.34465, + 2.36671, + 2.32264, + 2.29473, + 2.34713, + 2.35198, + 2.35651, + 2.32595, + 2.41528, + 2.42511, + 2.34961, + 2.36901, + 2.41455, + 2.35649, + 2.20305, + 2.37859, + 2.26474, + 2.30328, + 2.32076, + 2.32295, + 2.36271, + 2.33805, + 2.33653, + 2.35248, + 2.41576, + 2.35631, + 2.29582, + 2.30227, + 2.30052, + 2.37779, + 2.31777, + 2.30457, + 2.33778, + 2.33725, + 2.38799, + 2.32624, + 2.35793, + 2.21489, + 2.3568, + 2.34665, + 2.37795, + 2.34979, + 2.33138, + 2.35222, + 2.33497, + 2.31229, + 2.32785, + 2.31261, + 2.34641, + 2.30966, + 2.33011, + 2.3203, + 2.35829, + 2.39546, + 2.29829, + 2.36049, + 2.28997, + 2.32363, + 2.36086, + 2.28007, + 2.29862, + 2.28738, + 2.32796, + 2.28469, + 2.37557, + 2.35971, + 2.34856, + 2.33371, + 2.43035, + 2.3364, + 2.34784, + 2.32915, + 2.45303, + 2.26319, + 2.27797, + 2.35049, + 2.30604, + 2.39091, + 2.38856, + 2.32811, + 2.3586, + 2.3763, + 2.40737, + 2.42468, + 2.29717, + 2.38079, + 2.33199, + 2.2844, + 2.35656, + 2.23873, + 2.32868, + 2.31588, + 2.38177, + 2.32162, + 2.37505, + 2.36034, + 2.39087, + 2.35306, + 2.3138, + 2.31102, + 2.3395, + 2.32402, + 2.28041, + 2.27591, + 2.27592, + 2.43852, + 2.3236, + 2.34216, + 2.33443, + 2.31428, + 2.3246, + 2.32937, + 2.31187, + 2.35044, + 2.33839, + 2.39611, + 2.32738, + 2.325, + 2.28703, + 2.34692, + 2.36431, + 2.35307, + 2.30053, + 2.25565, + 2.3464, + 2.3976, + 2.29805, + 2.36602, + 2.35222, + 2.41203, + 2.29111, + 2.39338, + 2.38202, + 2.28533, + 2.31149, + 2.3994, + 2.31048, + 2.32986, + 2.32638, + 2.2965, + 2.28237, + 2.34284, + 2.25593, + 2.32466, + 2.33789, + 2.38439, + 2.35992, + 2.32567, + 2.38335, + 2.36934, + 2.34376, + 2.31668, + 2.32295, + 2.37287, + 2.3162, + 2.30218, + 2.27904, + 2.32526, + 2.29081, + 2.26775, + 2.35042, + 2.33598, + 2.39387, + 2.27399, + 2.33851, + 2.31339, + 2.25865, + 2.30557, + 2.28222, + 2.31588, + 2.37114, + 2.33603, + 2.38974, + 2.31124, + 2.31247, + 2.38898, + 2.36064, + 2.3793, + 2.26656, + 2.38434, + 2.35168, + 2.37874, + 2.28458, + 2.34536, + 2.36558, + 2.38075, + 2.35071, + 2.35047, + 2.29922, + 2.28976, + 2.34538, + 2.38151, + 2.29953, + 2.34682, + 2.29819, + 2.32651, + 2.31358, + 2.37483, + 2.2137, + 2.38919, + 2.28122, + 2.35157, + 2.38775, + 2.36373, + 2.34145, + 2.35998, + 2.37029, + 2.34652, + 2.30105, + 2.36501, + 2.25023, + 2.30257, + 2.28682, + 2.34696, + 2.35959, + 2.309, + 2.30905, + 2.372, + 2.35475, + 2.29397, + 2.3221, + 2.32319, + 2.32089, + 2.31318, + 2.29314, + 2.29082, + 2.2888, + 2.32099, + 2.31974, + 2.32944, + 2.32869, + 2.26575, + 2.34882, + 2.33387, + 2.29807, + 2.34745, + 2.27568, + 2.3765, + 2.34131, + 2.38432, + 2.31787, + 2.3129, + 2.3479, + 2.34492, + 2.31494, + 2.33812, + 2.36501, + 2.27056, + 2.34073, + 2.31151, + 2.27308, + 2.36842, + 2.34132, + 2.3584, + 2.29073, + 2.27972, + 2.32033, + 2.28428, + 2.30867, + 2.32251, + 2.30674, + 2.3487, + 2.40238, + 2.31657, + 2.31371, + 2.36587, + 2.28718, + 2.39406, + 2.24531, + 2.27121, + 2.35616, + 2.35022, + 2.37819, + 2.38128, + 2.28521, + 2.28675, + 2.34507, + 2.3157, + 2.31316, + 2.39692, + 2.32902, + 2.38607, + 2.34733, + 2.3356, + 2.36899, + 2.3109, + 2.31256, + 2.34217, + 2.30109, + 2.26033, + 2.28311, + 2.33036, + 2.3561, + 2.30822, + 2.23943, + 2.30454, + 2.24015, + 2.34933, + 2.30544, + 2.29913, + 2.27381, + 2.301, + 2.3102, + 2.31376, + 2.32089, + 2.39854, + 2.32713, + 2.31341, + 2.34682, + 2.32585, + 2.25769, + 2.28464, + 2.35967, + 2.29777, + 2.34915, + 2.33855, + 2.30143, + 2.31598, + 2.27136, + 2.38314, + 2.30828, + 2.32727, + 2.27975, + 2.33638, + 2.33695, + 2.25556, + 2.27118, + 2.36187, + 2.32948, + 2.31856, + 2.31782, + 2.31759, + 2.32257, + 2.32951, + 2.32422, + 2.25847, + 2.3022, + 2.22775, + 2.31743, + 2.24807, + 2.34732, + 2.36938, + 2.26449, + 2.3781, + 2.34702, + 2.31158, + 2.32228, + 2.30409, + 2.3017, + 2.35076, + 2.3339, + 2.25519, + 2.26083, + 2.34709, + 2.32374, + 2.31691, + 2.31619, + 2.43835, + 2.28286, + 2.31331, + 2.27018, + 2.3398, + 2.34235, + 2.29933, + 2.28017, + 2.27883, + 2.31051, + 2.25479, + 2.30503, + 2.33457, + 2.34546, + 2.33267, + 2.29765, + 2.21723, + 2.32093, + 2.28692, + 2.34186, + 2.34355, + 2.41484, + 2.38635, + 2.38863, + 2.32886, + 2.29336, + 2.24039, + 2.26092, + 2.28347, + 2.28931, + 2.30063, + 2.28297, + 2.26672, + 2.33504, + 2.25036, + 2.30185, + 2.33471, + 2.34894, + 2.34274, + 2.24908, + 2.31252, + 2.26165, + 2.28626, + 2.3149, + 2.31389, + 2.39159, + 2.23271, + 2.33834, + 2.33143, + 2.32396, + 2.30178, + 2.30472, + 2.29144, + 2.35978, + 2.30647, + 2.3212, + 2.31336, + 2.24742, + 2.32072, + 2.33159, + 2.28308, + 2.24581, + 2.33138, + 2.36302, + 2.32048, + 2.28385, + 2.33962, + 2.33205, + 2.24559, + 2.37812, + 2.29892, + 2.39876, + 2.34838, + 2.30028, + 2.3307, + 2.36426, + 2.27043, + 2.33673, + 2.36158, + 2.27535, + 2.28101, + 2.32255, + 2.2845, + 2.26677, + 2.28588, + 2.29385, + 2.29639, + 2.29405, + 2.35829, + 2.33347, + 2.35388, + 2.31765, + 2.31573, + 2.33276, + 2.32637, + 2.2869, + 2.3663, + 2.26301, + 2.30974, + 2.39988, + 2.32595, + 2.25346, + 2.31361, + 2.20447, + 2.31762, + 2.32427, + 2.38443, + 2.32127, + 2.29363, + 2.3297, + 2.28356, + 2.24175, + 2.35573, + 2.30903, + 2.27581, + 2.28817, + 2.22655, + 2.3117, + 2.26524, + 2.26944, + 2.28476, + 2.33353, + 2.26781, + 2.34228, + 2.22967, + 2.32138, + 2.28392, + 2.27765, + 2.28453, + 2.31037, + 2.28731, + 2.32046, + 2.27158, + 2.30304, + 2.31048, + 2.31055, + 2.30284, + 2.31686, + 2.26421, + 2.29578, + 2.34, + 2.31554, + 2.31426, + 2.28269, + 2.29109, + 2.25288, + 2.3441, + 2.27963, + 2.32795, + 2.30369, + 2.29721, + 2.26176, + 2.2865, + 2.30119, + 2.31767, + 2.26151, + 2.25708, + 2.25483, + 2.28461, + 2.34528, + 2.28909, + 2.31757, + 2.32009, + 2.29849, + 2.36728, + 2.27771, + 2.37934, + 2.32722, + 2.33238, + 2.25238, + 2.35262, + 2.34442, + 2.24892, + 2.27963, + 2.28751, + 2.31168, + 2.25677, + 2.24405, + 2.34552, + 2.34363, + 2.34295, + 2.28811, + 2.32645, + 2.27708, + 2.34251, + 2.27185, + 2.27032, + 2.25924, + 2.30917, + 2.33413, + 2.26041, + 2.30944, + 2.26045, + 2.3215, + 2.37973, + 2.37687, + 2.30112, + 2.25414, + 2.23536, + 2.26742, + 2.26829, + 2.28334, + 2.29017, + 2.2436, + 2.30472, + 2.32327, + 2.22032, + 2.30544, + 2.31482, + 2.31798, + 2.36188, + 2.26373, + 2.28496, + 2.32194, + 2.31651, + 2.30951, + 2.31524, + 2.23931, + 2.31331, + 2.3064, + 2.30754, + 2.32229, + 2.29953, + 2.30942, + 2.2455, + 2.22995, + 2.27598, + 2.27145, + 2.34907, + 2.28499, + 2.33274, + 2.35311, + 2.31892, + 2.26853, + 2.33194, + 2.32451, + 2.26971, + 2.37189, + 2.23369, + 2.28999, + 2.36987, + 2.29793, + 2.34096, + 2.34831, + 2.27748, + 2.32859, + 2.2783, + 2.30227, + 2.25795, + 2.38445, + 2.22675, + 2.3017, + 2.28495, + 2.25894, + 2.31047, + 2.31433, + 2.26925, + 2.31406, + 2.28849, + 2.31905, + 2.32917, + 2.2575, + 2.2658, + 2.3136, + 2.27457, + 2.34375, + 2.33208, + 2.26295, + 2.31324, + 2.3378, + 2.27822, + 2.2568, + 2.27925, + 2.29242, + 2.2762, + 2.29042, + 2.27601, + 2.29345, + 2.26191, + 2.33049, + 2.26877, + 2.35006, + 2.29163, + 2.31056, + 2.26425, + 2.27701, + 2.25224, + 2.30509, + 2.2756, + 2.31335, + 2.25832, + 2.30842, + 2.29366, + 2.31453, + 2.31744, + 2.28282, + 2.31849, + 2.25052, + 2.28484, + 2.31727, + 2.29214, + 2.27429, + 2.29625, + 2.36618, + 2.30621, + 2.27172, + 2.35141, + 2.26624, + 2.32619, + 2.30082, + 2.37303, + 2.32651, + 2.2319, + 2.27583, + 2.28767, + 2.26208, + 2.28975, + 2.25455, + 2.32159, + 2.26322, + 2.32481, + 2.34334, + 2.298, + 2.26343, + 2.28899, + 2.29281, + 2.3116, + 2.25594, + 2.2231, + 2.27035, + 2.32467, + 2.26816, + 2.26924, + 2.33015, + 2.29858, + 2.27592, + 2.3126, + 2.29137, + 2.21896, + 2.2572, + 2.26662, + 2.28766, + 2.30639, + 2.34087, + 2.24574, + 2.24694, + 2.319, + 2.27503, + 2.23404, + 2.25466, + 2.35617, + 2.28837, + 2.25345, + 2.2258, + 2.27974, + 2.26306, + 2.23349, + 2.33063, + 2.30477, + 2.31285, + 2.30391, + 2.31369, + 2.22309, + 2.28165, + 2.27084, + 2.28753, + 2.20054, + 2.21802, + 2.30895, + 2.27988, + 2.32555, + 2.30199, + 2.32774, + 2.30014, + 2.35876, + 2.26811, + 2.24612, + 2.26931, + 2.28746, + 2.226, + 2.25646, + 2.23505, + 2.26251, + 2.28245, + 2.35404, + 2.26406, + 2.21759, + 2.36444, + 2.27618, + 2.28048, + 2.30683, + 2.25652, + 2.32219, + 2.31178, + 2.32584, + 2.28049, + 2.30901, + 2.33382, + 2.35808, + 2.27405, + 2.23613, + 2.32045, + 2.33081, + 2.29187, + 2.27822, + 2.33333, + 2.34763, + 2.33963, + 2.30702, + 2.35085, + 2.2776, + 2.31401, + 2.2743, + 2.28269, + 2.31321, + 2.26779, + 2.29846, + 2.29899, + 2.28674, + 2.25104, + 2.29188, + 2.24941, + 2.23573, + 2.25549, + 2.326, + 2.22707, + 2.19091, + 2.27202, + 2.2573, + 2.34511, + 2.31047, + 2.30486, + 2.30453, + 2.30888, + 2.29207, + 2.23915, + 2.34281, + 2.26205, + 2.2788, + 2.24084, + 2.26297, + 2.22565, + 2.26401, + 2.32395, + 2.30888, + 2.27577, + 2.23916, + 2.33084, + 2.20222, + 2.20459, + 2.21624, + 2.36708, + 2.27542, + 2.26703, + 2.30914, + 2.31921, + 2.19542, + 2.28136, + 2.21464, + 2.37228, + 2.30606, + 2.24575, + 2.25141, + 2.28075, + 2.24149, + 2.32138, + 2.29035, + 2.32311, + 2.22986, + 2.2675, + 2.28678, + 2.30545, + 2.2392, + 2.3062, + 2.27575, + 2.27482, + 2.36098, + 2.28932, + 2.36313, + 2.27923, + 2.23081, + 2.24816, + 2.25986, + 2.27783, + 2.3313, + 2.23894, + 2.30961, + 2.29889, + 2.27375, + 2.27954, + 2.23446, + 2.33747, + 2.21888, + 2.30056, + 2.29386, + 2.19091, + 2.29853, + 2.18373, + 2.2247, + 2.37622, + 2.34344, + 2.26531, + 2.21173, + 2.22969, + 2.21245, + 2.32034, + 2.25669, + 2.25442, + 2.27981, + 2.33598, + 2.28863, + 2.25182, + 2.2144, + 2.22598, + 2.27594, + 2.24061, + 2.29323, + 2.31538, + 2.27097, + 2.32496, + 2.28008, + 2.25531, + 2.23983, + 2.31062, + 2.22498, + 2.26555, + 2.27609, + 2.28037, + 2.29703, + 2.25152, + 2.31323, + 2.25087, + 2.22589, + 2.25044, + 2.3799, + 2.31342, + 2.27544, + 2.26559, + 2.24385, + 2.23955, + 2.30404, + 2.24285, + 2.27096, + 2.28075, + 2.30996, + 2.31934, + 2.21255, + 2.25085, + 2.21385, + 2.36517, + 2.25011, + 2.21567, + 2.34407, + 2.23942, + 2.30581, + 2.25433, + 2.21406, + 2.19583, + 2.16367, + 2.29238, + 2.27652, + 2.28689, + 2.23188, + 2.25101, + 2.2069, + 2.26023, + 2.24432, + 2.26876, + 2.24231, + 2.30753, + 2.27174, + 2.28203, + 2.23602, + 2.25297, + 2.32252, + 2.23501, + 2.2441, + 2.25427, + 2.26421, + 2.26354, + 2.27438, + 2.27743, + 2.3413, + 2.34816, + 2.21529, + 2.30047, + 2.30838, + 2.29375, + 2.28378, + 2.29369, + 2.3612, + 2.3204, + 2.22818, + 2.23865, + 2.2576, + 2.26531, + 2.26604, + 2.24755, + 2.22915, + 2.32015, + 2.2524, + 2.35102, + 2.30039, + 2.28184, + 2.25508, + 2.21573, + 2.3199, + 2.32537, + 2.24624, + 2.22385, + 2.24388, + 2.27865, + 2.29919, + 2.332, + 2.25959, + 2.24184, + 2.27029, + 2.24272, + 2.217, + 2.27312, + 2.23213, + 2.33784, + 2.24607, + 2.21049, + 2.28382, + 2.28628, + 2.27932, + 2.23026, + 2.30625, + 2.32199, + 2.29223, + 2.37266, + 2.20944, + 2.24009, + 2.23374, + 2.30731, + 2.26484, + 2.222, + 2.2752, + 2.32475, + 2.26119, + 2.23574, + 2.26394, + 2.2649, + 2.30594, + 2.23764, + 2.2651, + 2.19928, + 2.33329, + 2.27862, + 2.3241, + 2.30848, + 2.26077, + 2.23658, + 2.30315, + 2.26561, + 2.21562, + 2.215, + 2.2668, + 2.28447, + 2.27141, + 2.24044, + 2.25239, + 2.27913, + 2.22815, + 2.19552, + 2.27596, + 2.23941, + 2.21747, + 2.3346, + 2.24769, + 2.23819, + 2.2597, + 2.26718, + 2.27513, + 2.21657, + 2.24492, + 2.27344, + 2.25294, + 2.30257, + 2.2664, + 2.23426, + 2.3176, + 2.27259, + 2.25807, + 2.27796, + 2.21176, + 2.26738, + 2.27172, + 2.30121, + 2.22638, + 2.2532, + 2.25186, + 2.22663, + 2.25306, + 2.35508, + 2.23079, + 2.23542, + 2.30251, + 2.26841, + 2.28758, + 2.29228, + 2.23275, + 2.27099, + 2.27637, + 2.27004, + 2.32502, + 2.25567, + 2.23987, + 2.25298, + 2.29515, + 2.23522, + 2.20681, + 2.26902, + 2.28208, + 2.30124, + 2.30757, + 2.2665, + 2.31069, + 2.25069, + 2.22944, + 2.23631, + 2.27318, + 2.24399, + 2.28333, + 2.17397, + 2.22383, + 2.22171, + 2.20641, + 2.25249, + 2.26373, + 2.21081, + 2.25092, + 2.24309, + 2.24109, + 2.2096, + 2.24156, + 2.23924, + 2.29145, + 2.26872, + 2.22748, + 2.30354, + 2.26186, + 2.2248, + 2.22063, + 2.2732, + 2.2928, + 2.23761, + 2.2856, + 2.30373, + 2.25622, + 2.27107, + 2.2047, + 2.25743, + 2.26774, + 2.26806, + 2.26718, + 2.23514, + 2.26876, + 2.25414, + 2.22596, + 2.21757, + 2.24918, + 2.27361, + 2.23689, + 2.29734, + 2.26362, + 2.24912, + 2.20272, + 2.24995, + 2.22097, + 2.26316, + 2.25865, + 2.13785, + 2.32427, + 2.3076, + 2.26371, + 2.29575, + 2.27468, + 2.22428, + 2.2474, + 2.20855, + 2.19004, + 2.2191, + 2.25557, + 2.27184, + 2.27009, + 2.26902, + 2.26074, + 2.22283, + 2.31222, + 2.19251, + 2.29032, + 2.25953, + 2.28061, + 2.24688, + 2.23443, + 2.27528, + 2.3004, + 2.32535, + 2.15229, + 2.26973, + 2.30728, + 2.28017, + 2.24378, + 2.20627, + 2.26838, + 2.22309, + 2.25808, + 2.27254, + 2.25879, + 2.30892, + 2.25283, + 2.22084, + 2.30474, + 2.21821, + 2.20423, + 2.33272, + 2.27974, + 2.24159, + 2.25214, + 2.24737, + 2.23276, + 2.20825, + 2.18644, + 2.30785, + 2.2353, + 2.2608, + 2.29785, + 2.24727, + 2.23613, + 2.24939, + 2.28215, + 2.21083, + 2.2342, + 2.20836, + 2.22409, + 2.20148, + 2.27887, + 2.28447, + 2.27605, + 2.25101, + 2.24515, + 2.24318, + 2.30539, + 2.24187, + 2.26708, + 2.26945, + 2.24406, + 2.24659, + 2.26902, + 2.20928, + 2.25511, + 2.27344, + 2.16798, + 2.18122, + 2.27509, + 2.26037, + 2.22824, + 2.24255, + 2.27395, + 2.21836, + 2.27066, + 2.28745, + 2.31211, + 2.25957, + 2.22632, + 2.26037, + 2.21943, + 2.32047, + 2.26657, + 2.196, + 2.24452, + 2.25432, + 2.24101, + 2.23783, + 2.25172, + 2.25288, + 2.24563, + 2.25752, + 2.28357, + 2.19328, + 2.22881, + 2.24384, + 2.26408, + 2.201, + 2.18255, + 2.26111, + 2.27603, + 2.2826, + 2.2439, + 2.24679, + 2.3049, + 2.26285, + 2.1657, + 2.22854, + 2.29231, + 2.21202, + 2.31859, + 2.1601, + 2.23898, + 2.19799, + 2.18529, + 2.20906, + 2.18287, + 2.24746, + 2.25303, + 2.22196, + 2.21808, + 2.21234, + 2.20915, + 2.2258, + 2.31046, + 2.2726, + 2.25578, + 2.26728, + 2.25823, + 2.25184, + 2.24255, + 2.1883, + 2.2977, + 2.22426, + 2.3146, + 2.33685, + 2.24832, + 2.26487, + 2.30893, + 2.26663, + 2.24264, + 2.24745, + 2.20989, + 2.20122, + 2.27402, + 2.27683, + 2.2418, + 2.18259, + 2.25985, + 2.24388, + 2.25256, + 2.28727, + 2.21402, + 2.27203, + 2.20865, + 2.25523, + 2.21317, + 2.24735, + 2.25371, + 2.23022, + 2.18307, + 2.19771, + 2.25384, + 2.1768, + 2.18254, + 2.2438, + 2.23252, + 2.27407, + 2.23176, + 2.2919, + 2.31625, + 2.24077, + 2.26987, + 2.26973, + 2.2081, + 2.25484, + 2.23556, + 2.19505, + 2.23615, + 2.27951, + 2.19773, + 2.27352, + 2.24487, + 2.23409, + 2.28094, + 2.21222, + 2.25545, + 2.20604, + 2.22922, + 2.21871, + 2.23487, + 2.21154, + 2.22138, + 2.21795, + 2.15199, + 2.12186, + 2.25677, + 2.29408, + 2.30101, + 2.22241, + 2.23599, + 2.17838, + 2.23392, + 2.23216, + 2.2282, + 2.24029, + 2.19892, + 2.20182, + 2.29924, + 2.24659, + 2.21558, + 2.13523, + 2.24031, + 2.23832, + 2.2361, + 2.25783, + 2.14691, + 2.24666, + 2.2304, + 2.25293, + 2.20698, + 2.28011, + 2.21899, + 2.22231, + 2.25094, + 2.19811, + 2.25357, + 2.18304, + 2.25966, + 2.23982, + 2.27055, + 2.26212, + 2.16246, + 2.24442, + 2.19089, + 2.2742, + 2.22611, + 2.25393, + 2.23888, + 2.25422, + 2.28876, + 2.30695, + 2.16905, + 2.22453, + 2.24778, + 2.29088, + 2.32827, + 2.25915, + 2.23699, + 2.23982, + 2.2934, + 2.278, + 2.15056, + 2.22392, + 2.24651, + 2.28561, + 2.24428, + 2.29171, + 2.20218, + 2.27289, + 2.20438, + 2.27205, + 2.25771, + 2.21743, + 2.2539, + 2.18989, + 2.24616, + 2.09462, + 2.29464, + 2.2381, + 2.27381, + 2.22227, + 2.1845, + 2.24689, + 2.34436, + 2.13466, + 2.282, + 2.22444, + 2.2361, + 2.20235, + 2.25996, + 2.18011, + 2.24235, + 2.19195, + 2.21779, + 2.22378, + 2.22843, + 2.21895, + 2.25129, + 2.21489, + 2.24468, + 2.22351, + 2.26985, + 2.26622, + 2.2457, + 2.2346, + 2.29214, + 2.15813, + 2.23181, + 2.19873, + 2.2778, + 2.26692, + 2.20834, + 2.22504, + 2.24427, + 2.22709, + 2.29954, + 2.19423, + 2.23063, + 2.2057, + 2.22093, + 2.15737, + 2.27659, + 2.25128, + 2.24126, + 2.33491, + 2.16782, + 2.20551, + 2.20622, + 2.24485, + 2.27422, + 2.28974, + 2.18009, + 2.28657, + 2.24227, + 2.26561, + 2.24557, + 2.22467, + 2.19801, + 2.19589, + 2.20536, + 2.26559, + 2.25484, + 2.20751, + 2.28041, + 2.12879, + 2.22118, + 2.26328, + 2.23625, + 2.28534, + 2.26483, + 2.22616, + 2.17126, + 2.22666, + 2.19732, + 2.21919, + 2.26583, + 2.20236, + 2.23885, + 2.11851, + 2.29928, + 2.18972, + 2.18551, + 2.25994, + 2.14637, + 2.29773, + 2.28146, + 2.1763, + 2.25422, + 2.23319, + 2.31134, + 2.29023, + 2.17315, + 2.17506, + 2.2579, + 2.2234, + 2.20293, + 2.21176, + 2.23258, + 2.1596, + 2.22487, + 2.24964, + 2.11981, + 2.20334, + 2.18752, + 2.26029, + 2.19671, + 2.14463, + 2.24732, + 2.13578, + 2.21815, + 2.29053, + 2.24347, + 2.26832, + 2.24564, + 2.22975, + 2.22927, + 2.21264, + 2.24596, + 2.22403, + 2.27156, + 2.2324, + 2.29214, + 2.264, + 2.23932, + 2.23175, + 2.22399, + 2.21267, + 2.18554, + 2.23799, + 2.19805, + 2.24236, + 2.23487, + 2.19871, + 2.21072, + 2.16625, + 2.23291, + 2.16936, + 2.20441, + 2.25754, + 2.17245, + 2.24195, + 2.1749, + 2.23418, + 2.3063, + 2.30118, + 2.21545, + 2.20499, + 2.17399, + 2.27147, + 2.21542, + 2.18053, + 2.12942, + 2.25953, + 2.24147, + 2.20108, + 2.28438, + 2.2277, + 2.22916, + 2.20886, + 2.22513, + 2.29721, + 2.22078, + 2.25585, + 2.15324, + 2.19529, + 2.1724, + 2.30415, + 2.19358, + 2.25345, + 2.20496, + 2.20459, + 2.18869, + 2.28839, + 2.19919, + 2.26473, + 2.26814, + 2.23938, + 2.18824, + 2.28337, + 2.20702, + 2.26018, + 2.25865, + 2.23921, + 2.23888, + 2.25055, + 2.22939, + 2.23578, + 2.18855, + 2.21436, + 2.21061, + 2.21166, + 2.24047, + 2.22465, + 2.26974, + 2.1709, + 2.21075, + 2.2248, + 2.24426, + 2.16158, + 2.1644, + 2.20684, + 2.14923, + 2.2455, + 2.23981, + 2.2519, + 2.23067, + 2.16993, + 2.28606, + 2.26347, + 2.22209, + 2.27635, + 2.22396, + 2.19679, + 2.2102, + 2.19956, + 2.23833, + 2.18497, + 2.18458, + 2.19868, + 2.19368, + 2.20248, + 2.22471, + 2.16594, + 2.22026, + 2.20694, + 2.22058, + 2.15419, + 2.15854, + 2.1888, + 2.19827, + 2.22371, + 2.19875, + 2.17589, + 2.24352, + 2.20224, + 2.24292, + 2.18679, + 2.18478, + 2.17571, + 2.27568, + 2.19909, + 2.1892, + 2.21373, + 2.17221, + 2.19547, + 2.19284, + 2.20406, + 2.20468, + 2.23072, + 2.17302, + 2.21362, + 2.19807, + 2.26144, + 2.22886, + 2.20004, + 2.20235, + 2.21414, + 2.16988, + 2.15622, + 2.23965, + 2.19846, + 2.17537, + 2.30169, + 2.24128, + 2.21354, + 2.25149, + 2.25398, + 2.27106, + 2.13152, + 2.16561, + 2.20063, + 2.1847, + 2.21628, + 2.262, + 2.22181, + 2.23301, + 2.18843, + 2.15993, + 2.15677, + 2.27284, + 2.16489, + 2.19916, + 2.1398, + 2.29537, + 2.273, + 2.27328, + 2.28932, + 2.17611, + 2.2292, + 2.19345, + 2.24528, + 2.25877, + 2.16795, + 2.24119, + 2.241, + 2.23499, + 2.20558, + 2.23699, + 2.1566, + 2.24216, + 2.22405, + 2.25204, + 2.20111, + 2.31211, + 2.28991, + 2.1806, + 2.21372, + 2.15639, + 2.24022, + 2.22589, + 2.13903, + 2.20626, + 2.22583, + 2.20669, + 2.16328, + 2.20356, + 2.22925, + 2.16117, + 2.26391, + 2.19999, + 2.18565, + 2.20236, + 2.24342, + 2.18822, + 2.20671, + 2.22332, + 2.22274, + 2.25148, + 2.27896, + 2.20091, + 2.22961, + 2.13387, + 2.19623, + 2.23741, + 2.23618, + 2.2123, + 2.1183, + 2.1612, + 2.22253, + 2.25013, + 2.25069, + 2.16281, + 2.235, + 2.23375, + 2.24304, + 2.15358, + 2.21107, + 2.27842, + 2.19868, + 2.19577, + 2.28025, + 2.26633, + 2.18797, + 2.23344, + 2.19758, + 2.30858, + 2.1701, + 2.18416, + 2.26493, + 2.21513, + 2.20325, + 2.22047, + 2.22152, + 2.23076, + 2.20499, + 2.16107, + 2.23073, + 2.2265, + 2.19598, + 2.11451, + 2.24254, + 2.21178, + 2.23262, + 2.2352, + 2.18972, + 2.13664, + 2.14642, + 2.24424, + 2.22136, + 2.23379, + 2.20182, + 2.22086, + 2.21942, + 2.25666, + 2.21326, + 2.22208, + 2.25125, + 2.26473, + 2.21218, + 2.21166, + 2.23813, + 2.25877, + 2.22026, + 2.25599, + 2.22988, + 2.21954, + 2.18196, + 2.22892, + 2.20358, + 2.176, + 2.2237, + 2.22511, + 2.18362, + 2.26284, + 2.23933, + 2.1815, + 2.15851, + 2.18376, + 2.22621, + 2.23815, + 2.25236, + 2.16465, + 2.25849, + 2.16965, + 2.27853, + 2.2231, + 2.27134, + 2.26107, + 2.22091, + 2.23582, + 2.23798, + 2.18639, + 2.19631, + 2.22252, + 2.22002, + 2.18222, + 2.18756, + 2.16207, + 2.24294, + 2.22669, + 2.21863, + 2.22913, + 2.29183, + 2.20731, + 2.14348, + 2.22127, + 2.1598, + 2.14535, + 2.21911, + 2.24315, + 2.20628, + 2.12623, + 2.19029, + 2.2074, + 2.18254, + 2.19894, + 2.19623, + 2.21212, + 2.2598, + 2.18732, + 2.17395, + 2.1791, + 2.19766, + 2.18911, + 2.21569, + 2.16504, + 2.2066, + 2.13402, + 2.19176, + 2.24327, + 2.18474, + 2.15945, + 2.20581, + 2.25377, + 2.21673, + 2.18962, + 2.25344, + 2.22438, + 2.19167, + 2.12169, + 2.12234, + 2.18678, + 2.29488, + 2.24097, + 2.1744, + 2.17925, + 2.18291, + 2.12379, + 2.13163, + 2.22096, + 2.27216, + 2.27404, + 2.1856, + 2.17813, + 2.18967, + 2.21593, + 2.22889, + 2.18884, + 2.19619, + 2.24829, + 2.245, + 2.17357, + 2.19923, + 2.24841, + 2.19846, + 2.21079, + 2.21288, + 2.21146, + 2.23961, + 2.19646, + 2.14437, + 2.16233, + 2.17183, + 2.18888, + 2.23809, + 2.16978, + 2.27749, + 2.17471, + 2.17407, + 2.16768, + 2.16875, + 2.2205, + 2.24319, + 2.19678, + 2.16642, + 2.16756, + 2.25248, + 2.18895, + 2.19294, + 2.15795, + 2.20966, + 2.20044, + 2.26231, + 2.22748, + 2.25184, + 2.16617, + 2.19674, + 2.21324, + 2.17613, + 2.19103, + 2.14368, + 2.24833, + 2.18035, + 2.19567, + 2.19069, + 2.21859, + 2.23762, + 2.23008, + 2.23579, + 2.30036, + 2.14598, + 2.21623, + 2.29332, + 2.22742, + 2.16483, + 2.24409, + 2.2265, + 2.23591, + 2.16333, + 2.21793, + 2.26052, + 2.17921, + 2.16207, + 2.16339, + 2.1831, + 2.2444, + 2.19043, + 2.30217, + 2.23443, + 2.17217, + 2.24418, + 2.19298, + 2.20652, + 2.17321, + 2.22938, + 2.1576, + 2.2477, + 2.16524, + 2.22628, + 2.14053, + 2.20938, + 2.18401, + 2.1168, + 2.17354, + 2.17737, + 2.16722, + 2.12087, + 2.22196, + 2.17336, + 2.16698, + 2.21296, + 2.28588, + 2.2957, + 2.24896, + 2.22379, + 2.24946, + 2.18048, + 2.18442, + 2.28043, + 2.22461, + 2.19722, + 2.1886, + 2.18034, + 2.18292, + 2.15963, + 2.18223, + 2.17423, + 2.19174, + 2.20057, + 2.15799, + 2.17359, + 2.26563, + 2.15428, + 2.22252, + 2.25182, + 2.19201, + 2.19964, + 2.16182, + 2.18568, + 2.20648, + 2.13393, + 2.15626, + 2.22197, + 2.2297, + 2.18775, + 2.17654, + 2.2139, + 2.16912, + 2.16991, + 2.26171, + 2.19787, + 2.25869, + 2.19707, + 2.18408, + 2.24158, + 2.24305, + 2.17198, + 2.19475, + 2.14717, + 2.13071, + 2.19173, + 2.19142, + 2.20264, + 2.22022, + 2.17235, + 2.22318, + 2.22151, + 2.15264, + 2.262, + 2.19635, + 2.20866, + 2.09748, + 2.16742, + 2.21412, + 2.17513, + 2.22751, + 2.21105, + 2.15255, + 2.08142, + 2.23492, + 2.19024, + 2.17487, + 2.23364, + 2.16083, + 2.25129, + 2.16732, + 2.27398, + 2.23137, + 2.28515, + 2.1826, + 2.16822, + 2.199, + 2.20704, + 2.17198, + 2.18951, + 2.22092, + 2.18299, + 2.15996, + 2.21033, + 2.22576, + 2.25902, + 2.14598, + 2.20248, + 2.21365, + 2.19369, + 2.20266, + 2.22554, + 2.14396, + 2.23735, + 2.18523, + 2.21327, + 2.1385, + 2.11236, + 2.14419, + 2.11693, + 2.27874, + 2.15128, + 2.21041, + 2.21278, + 2.20353, + 2.19682, + 2.22705, + 2.19489, + 2.14587, + 2.1605, + 2.1358, + 2.18505, + 2.19636, + 2.1678, + 2.17444, + 2.16609, + 2.2133, + 2.15837, + 2.26007, + 2.21901, + 2.21838, + 2.2289, + 2.19235, + 2.18687, + 2.14898, + 2.14436, + 2.1785, + 2.18926, + 2.17498, + 2.18857, + 2.17052, + 2.25191, + 2.21637, + 2.14774, + 2.1839, + 2.21205, + 2.18254, + 2.22673, + 2.19182, + 2.24383, + 2.26419, + 2.14876, + 2.20587, + 2.16535, + 2.171, + 2.23864, + 2.18447, + 2.07948, + 2.19822, + 2.15259, + 2.20613, + 2.17774, + 2.25645, + 2.20525, + 2.15837, + 2.24514, + 2.20117, + 2.19517, + 2.23338, + 2.22092, + 2.18493, + 2.22049, + 2.16538, + 2.23924, + 2.11864, + 2.19685, + 2.23542, + 2.23535, + 2.20776, + 2.18834, + 2.15912, + 2.2024, + 2.2364, + 2.18515, + 2.14846, + 2.23542, + 2.16338, + 2.14177, + 2.23236, + 2.17155, + 2.17734, + 2.23101, + 2.14107, + 2.20621, + 2.2258, + 2.20929, + 2.14006, + 2.23271, + 2.228, + 2.23592, + 2.17617, + 2.12695, + 2.20624, + 2.1473, + 2.26885, + 2.1665, + 2.2411, + 2.23942, + 2.23412, + 2.19414, + 2.18129, + 2.22372, + 2.149, + 2.17353, + 2.19332, + 2.22956, + 2.19612, + 2.09611, + 2.18822, + 2.25055, + 2.09947, + 2.14562, + 2.21729, + 2.13752, + 2.24239, + 2.223, + 2.24654, + 2.19255, + 2.2509, + 2.24795, + 2.16924, + 2.16962, + 2.20386, + 2.22648, + 2.19466, + 2.21748, + 2.25794, + 2.15843, + 2.22507, + 2.162, + 2.19109, + 2.19371, + 2.17231, + 2.14199, + 2.18118, + 2.1418, + 2.17042, + 2.14686, + 2.16497, + 2.21536, + 2.22927, + 2.22189, + 2.17408, + 2.23239, + 2.1909, + 2.23852, + 2.14701, + 2.19052, + 2.13351, + 2.23871, + 2.18124, + 2.15787, + 2.13693, + 2.19317, + 2.19172, + 2.17757, + 2.15866, + 2.17654, + 2.19488, + 2.21325, + 2.23961, + 2.18422, + 2.15119, + 2.17855, + 2.18409, + 2.25894, + 2.20659, + 2.11756, + 2.21752, + 2.16925, + 2.16518, + 2.18679, + 2.15202, + 2.14381, + 2.23832, + 2.14596, + 2.1292, + 2.22629, + 2.1634, + 2.17665, + 2.21671, + 2.14607, + 2.18149, + 2.23647, + 2.11486, + 2.10063, + 2.21347, + 2.16122, + 2.17386, + 2.18871, + 2.17045, + 2.15798, + 2.12981, + 2.23092, + 2.14763, + 2.18536, + 2.19813, + 2.15587, + 2.16637, + 2.20717, + 2.1684, + 2.25265, + 2.13618, + 2.19594, + 2.14158, + 2.13884, + 2.09643, + 2.21984, + 2.14415, + 2.18913, + 2.18371, + 2.16178, + 2.17632, + 2.1704, + 2.1893, + 2.22616, + 2.18549, + 2.15689, + 2.17516, + 2.1641, + 2.19504, + 2.17974, + 2.1411, + 2.14035, + 2.16962, + 2.2077, + 2.16971, + 2.20421, + 2.17865, + 2.16499, + 2.19048, + 2.17789, + 2.23147, + 2.19088, + 2.15808, + 2.14942, + 2.17741, + 2.21185, + 2.16612, + 2.10775, + 2.16014, + 2.18899, + 2.21219, + 2.1384, + 2.19676, + 2.17719, + 2.13189, + 2.16194, + 2.28633, + 2.17589, + 2.20129, + 2.24096, + 2.12471, + 2.22245, + 2.19435, + 2.20092, + 2.17098, + 2.15053, + 2.15869, + 2.15449, + 2.16505, + 2.08836, + 2.14381, + 2.2129, + 2.2443, + 2.15627, + 2.18562, + 2.17721, + 2.24678, + 2.16422, + 2.17136, + 2.23304, + 2.18754, + 2.08615, + 2.15053, + 2.182, + 2.1934, + 2.13023, + 2.18719, + 2.21188, + 2.20094, + 2.2128, + 2.16235, + 2.21876, + 2.24812, + 2.13839, + 2.14376, + 2.176, + 2.18248, + 2.0878, + 2.18845, + 2.15805, + 2.18194, + 2.27305, + 2.20462, + 2.12331, + 2.14726, + 2.18278, + 2.19324, + 2.07861, + 2.19475, + 2.12795, + 2.07141, + 2.1589, + 2.20913, + 2.14575, + 2.1554, + 2.19834, + 2.19859, + 2.19761, + 2.14008, + 2.21035, + 2.19338, + 2.12152, + 2.16342, + 2.18187, + 2.23497, + 2.16294, + 2.19523, + 2.17069, + 2.1596, + 2.18296, + 2.12661, + 2.20773, + 2.19878, + 2.18339, + 2.16327, + 2.12554, + 2.19279, + 2.14004, + 2.19628, + 2.20077, + 2.22333, + 2.17238, + 2.14481, + 2.22323, + 2.20387, + 2.17393, + 2.13201, + 2.25435, + 2.14168, + 2.1809, + 2.17039, + 2.17014, + 2.18769, + 2.17823, + 2.19337, + 2.23288, + 2.12942, + 2.1974, + 2.22317, + 2.21439, + 2.16148, + 2.0994, + 2.14307, + 2.17923, + 2.16521, + 2.165, + 2.20389, + 2.17387, + 2.19475, + 2.16099, + 2.1804, + 2.16889, + 2.1979, + 2.20662, + 2.16761, + 2.13787, + 2.14476, + 2.19704, + 2.1956, + 2.13627, + 2.23574, + 2.17881, + 2.19045, + 2.25563, + 2.12019, + 2.16512, + 2.20547, + 2.13034, + 2.15442, + 2.1915, + 2.1631, + 2.20917, + 2.23116, + 2.20563, + 2.11118, + 2.19799, + 2.17105, + 2.22072, + 2.19312, + 2.19949, + 2.19668, + 2.12649, + 2.22004, + 2.19222, + 2.14469, + 2.1375, + 2.24186, + 2.23416, + 2.17489, + 2.22855, + 2.19364, + 2.1462, + 2.18539, + 2.21381, + 2.20301, + 2.17025, + 2.21229, + 2.1542, + 2.19686, + 2.23795, + 2.13062, + 2.16361, + 2.2831, + 2.16167, + 2.19152, + 2.19636, + 2.22274, + 2.21934, + 2.11555, + 2.16735, + 2.19769, + 2.18792, + 2.17895, + 2.19356, + 2.13993, + 2.20102, + 2.12787, + 2.19608, + 2.18511, + 2.17356, + 2.11416, + 2.13741, + 2.15361, + 2.20432, + 2.18582, + 2.11962, + 2.18235, + 2.12726, + 2.17091, + 2.15228, + 2.19795, + 2.20253, + 2.15677, + 2.1901, + 2.20029, + 2.18824, + 2.13169, + 2.13188, + 2.1261, + 2.188, + 2.1577, + 2.15174, + 2.22681, + 2.11346, + 2.26227, + 2.18974, + 2.18759, + 2.18016, + 2.171, + 2.13627, + 2.22414, + 2.12527, + 2.14319, + 2.18409, + 2.19015, + 2.14186, + 2.2096, + 2.1584, + 2.15151, + 2.19772, + 2.15573, + 2.12144, + 2.17812, + 2.16634, + 2.17126, + 2.19852, + 2.14377, + 2.17556, + 2.13343, + 2.14667, + 2.21172, + 2.22372, + 2.17904, + 2.19627, + 2.16038, + 2.17056, + 2.1863, + 2.16126, + 2.14911, + 2.20188, + 2.2295, + 2.21522, + 2.1707, + 2.22305, + 2.20397, + 2.13875, + 2.1514, + 2.23455, + 2.21333, + 2.23432, + 2.22782, + 2.15701, + 2.17824, + 2.18557, + 2.14307, + 2.19431, + 2.17633, + 2.16333, + 2.18545, + 2.15988, + 2.20873, + 2.25565, + 2.17987, + 2.20093, + 2.1641, + 2.11594, + 2.19329, + 2.17483, + 2.23036, + 2.17095, + 2.08435, + 2.1746, + 2.13823, + 2.23445, + 2.22879, + 2.14523, + 2.11814, + 2.12371, + 2.16946, + 2.14735, + 2.20776, + 2.1688, + 2.09443, + 2.09813, + 2.27158, + 2.16765, + 2.19254, + 2.17127, + 2.1108, + 2.17815, + 2.13556, + 2.18773, + 2.23169, + 2.14421, + 2.22486, + 2.10308, + 2.16505, + 2.16881, + 2.14276, + 2.16921, + 2.17698, + 2.0814, + 2.16943, + 2.14014, + 2.17986, + 2.14425, + 2.12627, + 2.18475, + 2.09639, + 2.15737, + 2.14141, + 2.15202, + 2.17887, + 2.09074, + 2.1799, + 2.15226, + 2.09619, + 2.17392, + 2.21411, + 2.14455, + 2.18984, + 2.21242, + 2.20512, + 2.16369, + 2.14966, + 2.17023, + 2.17962, + 2.10001, + 2.15492, + 2.1599, + 2.17024, + 2.19805, + 2.14163, + 2.21704, + 2.11472, + 2.13153, + 2.21065, + 2.21365, + 2.14468, + 2.17928, + 2.13948, + 2.17584, + 2.11796, + 2.13929, + 2.21111, + 2.1729, + 2.17904, + 2.14527, + 2.15457, + 2.14542, + 2.15487, + 2.07213, + 2.13305, + 2.14742, + 2.18168, + 2.19591, + 2.11795, + 2.22315, + 2.19172, + 2.11757, + 2.10382, + 2.12674, + 2.20122, + 2.1669, + 2.13744, + 2.11814, + 2.14481, + 2.14126, + 2.18402, + 2.14413, + 2.1412, + 2.18759, + 2.16347, + 2.14641, + 2.1772, + 2.13392, + 2.13317, + 2.2441, + 2.19087, + 2.20755, + 2.12688, + 2.08398, + 2.17117, + 2.15669, + 2.21693, + 2.1563, + 2.21172, + 2.15064, + 2.12712, + 2.19224, + 2.12352, + 2.18321, + 2.16115, + 2.10183, + 2.14896, + 2.18542, + 2.11044, + 2.21889, + 2.14815, + 2.20288, + 2.13586, + 2.13122, + 2.13881, + 2.20578, + 2.1554, + 2.24229, + 2.10068, + 2.10239, + 2.12014, + 2.12012, + 2.24642, + 2.1717, + 2.10839, + 2.14481, + 2.11417, + 2.14398, + 2.18517, + 2.13833, + 2.17722, + 2.14771, + 2.18383, + 2.15047, + 2.23741, + 2.18579, + 2.17068, + 2.20426, + 2.17343, + 2.13063, + 2.18809, + 2.16792, + 2.2203, + 2.13557, + 2.19047, + 2.21943, + 2.14859, + 2.21883, + 2.15039, + 2.15073, + 2.16637, + 2.20756, + 2.11541, + 2.15605, + 2.23578, + 2.09558, + 2.13865, + 2.20222, + 2.13735, + 2.1257, + 2.22712, + 2.16516, + 2.11833, + 2.11664, + 2.21357, + 2.1293, + 2.1418, + 2.15839, + 2.11491, + 2.19184, + 2.20907, + 2.21059, + 2.15801, + 2.16079, + 2.1542, + 2.18909, + 2.2075, + 2.17892, + 2.20052, + 2.15818, + 2.19726, + 2.12918, + 2.12043, + 2.1192, + 2.19113, + 2.17503, + 2.11447, + 2.13221, + 2.17911, + 2.19611, + 2.22877, + 2.22697, + 2.17057, + 2.12098, + 2.21337, + 2.09408, + 2.12751, + 2.17385, + 2.22869, + 2.13343, + 2.15537, + 2.15627, + 2.19161, + 2.15547, + 2.1861, + 2.11345, + 2.12603, + 2.15712, + 2.15899, + 2.10614, + 2.22799, + 2.12488, + 2.15447, + 2.18165, + 2.16897, + 2.20423, + 2.19525, + 2.19034, + 2.14141, + 2.092, + 2.12331, + 2.2184, + 2.16262, + 2.2323, + 2.1185, + 2.13731, + 2.17633, + 2.10914, + 2.21421, + 2.1356, + 2.20011, + 2.13352, + 2.13991, + 2.12282, + 2.21991, + 2.16005, + 2.15045, + 2.17351, + 2.16292, + 2.20494, + 2.17673, + 2.1725, + 2.17753, + 2.20951, + 2.12298, + 2.1166, + 2.15282, + 2.15422, + 2.13507, + 2.13676, + 2.20661, + 2.21175, + 2.17303, + 2.1732, + 2.13905, + 2.13086, + 2.06595, + 2.13958, + 2.10611, + 2.15866, + 2.20199, + 2.16534, + 2.17839, + 2.13912, + 2.17059, + 2.17953, + 2.20951, + 2.09998, + 2.1497, + 2.11881, + 2.201, + 2.18636, + 2.14123, + 2.17393, + 2.13139, + 2.13438, + 2.25838, + 2.09495, + 2.18119, + 2.14884, + 2.12437, + 2.13167, + 2.18004, + 2.1817, + 2.08885, + 2.18663, + 2.15839, + 2.19119, + 2.12625, + 2.13064, + 2.12897, + 2.11453, + 2.11508, + 2.21637, + 2.11942, + 2.11395, + 2.16933, + 2.20956, + 2.158, + 2.22838, + 2.1665, + 2.13675, + 2.11883, + 2.18817, + 2.15585, + 2.18007, + 2.18405, + 2.129, + 2.13108, + 2.14397, + 2.14182, + 2.18087, + 2.13031, + 2.12518, + 2.17341, + 2.16205, + 2.18804, + 2.17343, + 2.1561, + 2.19577, + 2.14849, + 2.12863, + 2.11314, + 2.16094, + 2.15494, + 2.24692, + 2.14065, + 2.17351, + 2.13242, + 2.11577, + 2.14927, + 2.14705, + 2.20702, + 2.14626, + 2.13143, + 2.26467, + 2.14851, + 2.08748, + 2.15985, + 2.1408, + 2.13133, + 2.13693, + 2.15571, + 2.1332, + 2.15936, + 2.1864, + 2.22572, + 2.11322, + 2.18171, + 2.14538, + 2.11439, + 2.11543, + 2.10431, + 2.15426, + 2.11361, + 2.23124, + 2.19205, + 2.15783, + 2.16621, + 2.15966, + 2.18705, + 2.13729, + 2.18321, + 2.12248, + 2.1277, + 2.13242, + 2.16999, + 2.23769, + 2.1019, + 2.14574, + 2.18237, + 2.14769, + 2.1453, + 2.15215, + 2.19509, + 2.15582, + 2.19214, + 2.12899, + 2.06071, + 2.15981, + 2.11794, + 2.17052, + 2.1134, + 2.15056, + 2.1364, + 2.12485, + 2.10564, + 2.12643, + 2.16582, + 2.18306, + 2.16195, + 2.08746, + 2.12128, + 2.1741, + 2.16082, + 2.17856, + 2.13519, + 2.10839, + 2.19802, + 2.19525, + 2.16751, + 2.14105, + 2.14196, + 2.15494, + 2.15341, + 2.11478, + 2.13909, + 2.15364, + 2.18751, + 2.15586, + 2.10667, + 2.21337, + 2.14683, + 2.14487, + 2.17174, + 2.21612, + 2.12335, + 2.15413, + 2.10651, + 2.18791, + 2.1295, + 2.14391, + 2.12422, + 2.19471, + 2.19702, + 2.12624, + 2.17518, + 2.13955, + 2.14575, + 2.16906, + 2.15761, + 2.15301, + 2.21712, + 2.15404, + 2.16024, + 2.11105, + 2.13444, + 2.06263, + 2.07958, + 2.14639, + 2.11967, + 2.11237, + 2.14355, + 2.09234, + 2.13686, + 2.11726, + 2.15732, + 2.2001, + 2.17643, + 2.15822, + 2.13031, + 2.21946, + 2.17706, + 2.14201, + 2.13601, + 2.1548, + 2.22658, + 2.18241, + 2.1561, + 2.14607, + 2.14711, + 2.10361, + 2.14364, + 2.16466, + 2.13877, + 2.20757, + 2.16564, + 2.08779, + 2.17628, + 2.15093, + 2.13675, + 2.18662, + 2.14238, + 2.19216, + 2.12602, + 2.13973, + 2.16889, + 2.13101, + 2.13919, + 2.19636, + 2.20009, + 2.16071, + 2.18303, + 2.16926, + 2.11904, + 2.15374, + 2.10227, + 2.06041, + 2.1489, + 2.18369, + 2.14088, + 2.14455, + 2.15934, + 2.16377, + 2.14733, + 2.13128, + 2.20711, + 2.11473, + 2.16237, + 2.13424, + 2.16518, + 2.06324, + 2.12067, + 2.16686, + 2.10743, + 2.14634, + 2.17486, + 2.1638, + 2.1238, + 2.13779, + 2.16477, + 2.14167, + 2.14611, + 2.12306, + 2.16709, + 2.13379, + 2.17019, + 2.15353, + 2.11015, + 2.17153, + 2.16197, + 2.13218, + 2.15085, + 2.15781, + 2.25466, + 2.10951, + 2.14014, + 2.16187, + 2.12101, + 2.19565, + 2.06527, + 2.1721, + 2.14685, + 2.14808, + 2.10014, + 2.14934, + 2.1203, + 2.10332, + 2.12704, + 2.21661, + 2.18606, + 2.1656, + 2.08735, + 2.17195, + 2.13625, + 2.1438, + 2.16655, + 2.17534, + 2.1924, + 2.15769, + 2.11956, + 2.16561, + 2.22, + 2.15219, + 2.21531, + 2.09772, + 2.11993, + 2.13102, + 2.16096, + 2.10238, + 2.13756, + 2.15544, + 2.20732, + 2.17988, + 2.14668, + 2.20464, + 2.15031, + 2.10549, + 2.12134, + 2.17467, + 2.17739, + 2.13906, + 2.11434, + 2.14797, + 2.14234, + 2.12723, + 2.15721, + 2.12631, + 2.10021, + 2.21065, + 2.06616, + 2.09443, + 2.08835, + 2.13769, + 2.131, + 2.17644, + 2.07085, + 2.17694, + 2.12149, + 2.14257, + 2.10743, + 2.13535, + 2.22973, + 2.12877, + 2.12004, + 2.16013, + 2.16882, + 2.17764, + 2.09691, + 2.07116, + 2.20154, + 2.13691, + 2.15612, + 2.13042, + 2.21776, + 2.1763, + 2.15666, + 2.11485, + 2.13405, + 2.10092, + 2.1665, + 2.16885, + 2.0917, + 2.16698, + 2.15875, + 2.15715, + 2.20274, + 2.22135, + 2.11236, + 2.17254, + 2.1997, + 2.20477, + 2.18226, + 2.13096, + 2.13406, + 2.1266, + 2.15258, + 2.16074, + 2.15609, + 2.1192, + 2.14821, + 2.09995, + 2.10816, + 2.13307, + 2.12424, + 2.21113, + 2.15264, + 2.1543, + 2.1717, + 2.11504, + 2.15576, + 2.14418, + 2.19965, + 2.10689, + 2.15542, + 2.10296, + 2.12316, + 2.13181, + 2.08559, + 2.09557, + 2.1893, + 2.13595, + 2.11831, + 2.15318, + 2.12329, + 2.16081, + 2.11925, + 2.16646, + 2.1576, + 2.08549, + 2.11739, + 2.12032, + 2.11986, + 2.20412, + 2.14557, + 2.15658, + 2.19747, + 2.09774, + 2.18192, + 2.07301, + 2.18194, + 2.1714, + 2.18218, + 2.12295, + 2.15817, + 2.12634, + 2.13661, + 2.20957, + 2.132, + 2.11809, + 2.13282, + 2.16385, + 2.1819, + 2.19392, + 2.19965, + 2.09605, + 2.10998, + 2.13227, + 2.15023, + 2.11067, + 2.10107, + 2.11555, + 2.10901, + 2.11211, + 2.21725, + 2.12493, + 2.08417, + 2.14836, + 2.14632, + 2.14523, + 2.11451, + 2.15026, + 2.1456, + 2.19607, + 2.10271, + 2.11301, + 2.13541, + 2.16967, + 2.13959, + 2.09122, + 2.13218, + 2.13511, + 2.14937, + 2.09319, + 2.22332, + 2.1649, + 2.10092, + 2.13287, + 2.12295, + 2.2189, + 2.12971, + 2.02948, + 2.04855, + 2.13348, + 2.17088, + 2.1336, + 2.11146, + 2.14232, + 2.19518, + 2.11201, + 2.07141, + 2.14178, + 2.14737, + 2.1458, + 2.13256, + 2.11894, + 2.17876, + 2.11283, + 2.16828, + 2.19105, + 2.18398, + 2.09715, + 2.01543, + 2.12319, + 2.11653, + 2.16111, + 2.15539, + 2.09938, + 2.12497, + 2.09658, + 2.13796, + 2.10877, + 2.1363, + 2.09153, + 2.1354, + 2.12927, + 2.16925, + 2.04497, + 2.13855, + 2.11693, + 2.13237, + 2.21729, + 2.14198, + 2.17185, + 2.09057, + 2.15511, + 2.12693, + 2.17202, + 2.15091, + 2.17912, + 2.13925, + 2.18152, + 2.12077, + 2.11154, + 2.14419, + 2.15057, + 2.12067, + 2.21523, + 2.19308, + 2.11932, + 2.15405, + 2.14394, + 2.19311, + 2.20192, + 2.14891, + 2.16126, + 2.13381, + 2.21022, + 2.07788, + 2.14154, + 2.1593, + 2.16751, + 2.09106, + 2.13339, + 2.1655, + 2.22046, + 2.12049, + 2.03173, + 2.15567, + 2.19593, + 2.07965, + 2.10403, + 2.17251, + 2.12173, + 2.13437, + 2.11798, + 2.16775, + 2.13645, + 2.11347, + 2.1324, + 2.17526, + 2.16644, + 2.12277, + 2.10492, + 2.17144, + 2.16993, + 2.09841, + 2.17271, + 2.16234, + 2.14445, + 2.18642, + 2.11622, + 2.14784, + 2.17022, + 2.18088, + 2.11295, + 2.06826, + 2.09255, + 2.14574, + 2.22784, + 2.14507, + 2.08756, + 2.09345, + 2.08039, + 2.16518, + 2.19839, + 2.14267, + 2.14187, + 2.09488, + 2.1075, + 2.10622, + 2.10258, + 2.14102, + 2.13237, + 2.13323, + 2.05967, + 2.14527, + 2.1391, + 2.06164, + 2.16528, + 2.08549, + 2.09559, + 2.19385, + 2.1695, + 2.09547, + 2.08691, + 2.1146, + 2.09143, + 2.17281, + 2.14259, + 2.17527, + 2.10536, + 2.19447, + 2.09333, + 2.11649, + 2.18198, + 2.13537, + 2.14148, + 2.15844, + 2.064, + 2.17453, + 2.19131, + 2.12504, + 2.15203, + 2.18609, + 2.1661, + 2.13134, + 2.08756, + 2.08427, + 2.16414, + 2.21497, + 2.09981, + 2.1262, + 2.01528, + 2.15988, + 2.15862, + 2.09725, + 2.12982, + 2.07286, + 2.16997, + 2.16532, + 2.05147, + 2.19824, + 2.13548, + 2.06603, + 2.16366, + 2.08655, + 2.13162, + 2.08834, + 2.17486, + 2.13321, + 2.13171, + 2.14515, + 2.09801, + 2.13333, + 2.15441, + 2.12937, + 2.13597, + 2.15221, + 2.0731, + 2.10645, + 2.11284, + 2.16414, + 2.09933, + 2.14338, + 2.10623, + 2.07228, + 2.08654, + 2.14202, + 2.18884, + 2.10239, + 2.18639, + 2.19179, + 2.13551, + 2.15389, + 2.1511, + 2.14091, + 2.15937, + 2.07546, + 2.11303, + 2.17517, + 2.1412, + 2.03735, + 2.17992, + 2.13268, + 2.18176, + 2.08772, + 2.08312, + 2.12718, + 2.08874, + 2.1553, + 2.1415, + 2.15291, + 2.05888, + 2.11814, + 2.10731, + 2.08374, + 2.13396, + 2.12354, + 2.14289, + 2.09932, + 2.16092, + 2.13329, + 2.1063, + 2.11394, + 2.09821, + 2.14214, + 2.18023, + 2.10755, + 2.16653, + 2.19933, + 2.06603, + 2.10071, + 2.19799, + 2.06671, + 2.10484, + 2.13748, + 2.15959, + 2.15561, + 2.1137, + 2.12093, + 2.19014, + 2.13541, + 2.12725, + 2.0983, + 2.08588, + 2.10597, + 2.09329, + 2.20691, + 2.11375, + 2.07391, + 2.11606, + 2.09485, + 2.10288, + 2.0806, + 2.10469, + 2.15963, + 2.12958, + 2.08124, + 2.09756, + 2.14018, + 2.11993, + 2.11828, + 2.09453, + 2.12628, + 2.14104, + 2.0796, + 2.04218, + 2.01484, + 2.12482, + 2.08634, + 2.13438, + 2.15562, + 2.15216, + 2.17004, + 2.13035, + 2.16651, + 2.1716, + 2.14191, + 2.10148, + 2.06979, + 2.14407, + 2.13396, + 2.07676, + 2.16373, + 2.06168, + 2.04154, + 2.18675, + 2.07855, + 2.1341, + 2.12187, + 2.15629, + 2.14057, + 2.13709, + 2.08859, + 2.06976, + 2.13725, + 2.09054, + 2.13351, + 2.08726, + 2.10761, + 2.15441, + 2.09503, + 2.15399, + 2.06266, + 2.14508, + 2.11744, + 2.12495, + 2.11958, + 2.11224, + 2.11268, + 2.10583, + 2.10275, + 2.16901, + 2.10984, + 2.07304, + 2.08363, + 2.10196, + 2.13966, + 2.07077, + 2.08902, + 2.16228, + 2.15967, + 2.17185, + 2.07537, + 2.15779, + 2.1715, + 2.05667, + 2.12227, + 2.12891, + 2.15615, + 2.12718, + 2.10373, + 2.10221, + 2.09313, + 2.11385, + 2.10161, + 2.11608, + 2.12269, + 2.14827, + 2.10462, + 2.13028, + 2.09747, + 2.14935, + 2.14235, + 2.14072, + 2.17865, + 2.09507, + 2.08337, + 2.14248, + 2.11666, + 2.13571, + 2.13529, + 2.15697, + 2.09802, + 2.11925, + 2.09387, + 2.08241, + 2.0783, + 2.14557, + 2.12659, + 2.19182, + 2.06489, + 2.16013, + 2.18327, + 2.09867, + 2.13889, + 2.18897, + 2.13581, + 2.16738, + 2.1643, + 2.11768, + 2.12279, + 2.15801, + 2.07078, + 2.07846, + 2.0728, + 2.13256, + 2.09567, + 2.12748, + 2.18461, + 2.14324, + 2.13974, + 2.11556, + 2.14132, + 2.03372, + 2.1025, + 2.09162, + 2.09885, + 2.14057, + 2.09402, + 2.18067, + 2.11267, + 2.09488, + 2.17158, + 2.06687, + 2.12892, + 2.12106, + 2.15669, + 2.12901, + 2.13127, + 2.05828, + 2.15015, + 2.22143, + 2.1744, + 2.12979, + 2.07898, + 2.07257, + 2.1851, + 2.03252, + 2.06686, + 2.13522, + 2.08287, + 2.11278, + 2.06087, + 2.17548, + 2.11286, + 2.11709, + 2.12416, + 2.06491, + 2.0962, + 2.15181, + 2.16777, + 2.13497, + 2.12714, + 2.13369, + 2.03608, + 2.12232, + 2.14683, + 2.1591, + 2.11504, + 2.16808, + 2.04265, + 2.12814, + 2.11979, + 2.13031, + 2.12495, + 2.07751, + 2.14106, + 2.07351, + 2.11523, + 2.07912, + 2.16593, + 2.06806, + 2.05106, + 2.08856, + 2.06571, + 2.05193, + 2.15024, + 2.13226, + 2.11704, + 2.0977, + 2.20583, + 2.1516, + 2.15286, + 2.10037, + 2.0982, + 2.07352, + 2.09963, + 2.12464, + 2.12513, + 2.16762, + 2.13514, + 2.13649, + 2.08477, + 2.07079, + 2.10859, + 2.11399, + 2.07488, + 2.06204, + 2.06621, + 2.08936, + 2.10552, + 2.15456, + 2.07139, + 2.12529, + 2.13757, + 2.12853, + 2.04168, + 2.11304, + 2.06003, + 2.15838, + 2.08245, + 2.14785, + 2.17583, + 2.14739, + 2.12889, + 2.11007, + 2.14053, + 2.12198, + 2.12999, + 2.13901, + 2.17513, + 2.19321, + 2.14118, + 2.07928, + 2.12319, + 2.1115, + 2.11312, + 2.11301, + 2.09192, + 2.16897, + 2.09811, + 2.11893, + 2.12235, + 2.10151, + 2.14767, + 2.17382, + 2.12145, + 2.12704, + 2.096, + 2.09778, + 2.09733, + 2.10067, + 2.11163, + 2.11902, + 2.11622, + 2.10515, + 2.15673, + 2.15187, + 2.07975, + 2.11713, + 2.1019, + 2.08906, + 2.09129, + 2.09094, + 2.07139, + 2.09792, + 2.11818, + 2.13521, + 2.09317, + 2.15205, + 2.09359, + 2.12902, + 2.20491, + 2.15404, + 2.12387, + 2.21422, + 2.07809, + 2.15791, + 2.13147, + 2.13017, + 2.14478, + 2.13592, + 2.14572, + 2.12771, + 2.14784, + 2.15496, + 2.08933, + 2.06774, + 2.19163, + 2.09368, + 2.18901, + 2.07754, + 2.15847, + 2.11625, + 2.10876, + 2.18488, + 2.08851, + 2.1842, + 2.11237, + 2.10533, + 2.09335, + 2.09365, + 2.11399, + 2.10166, + 2.13801, + 2.10645, + 2.15973, + 2.14104, + 2.08315, + 2.12548, + 2.09931, + 2.15813, + 2.10575, + 2.12403, + 2.19015, + 2.0717, + 2.14015, + 2.16857, + 2.03163, + 2.11119, + 2.07661, + 2.12338, + 2.19026, + 2.09889, + 2.07589, + 2.06158, + 2.05661, + 2.20033, + 2.1062, + 2.10739, + 2.09728, + 2.09079, + 2.16006, + 2.04724, + 2.17185, + 2.15296, + 2.06467, + 2.07009, + 2.1072, + 2.12453, + 2.10475, + 2.18102, + 2.12786, + 2.10917, + 2.10525, + 2.14673, + 2.13222, + 2.08293, + 2.0987, + 2.14066, + 2.08767, + 2.07583, + 2.10129, + 2.13516, + 2.15028, + 2.19762, + 2.09509, + 2.21563, + 2.10623, + 2.0537, + 2.08187, + 2.08561, + 2.06894, + 2.11377, + 2.12836, + 2.0927, + 2.14447, + 2.11826, + 2.14211, + 2.17653, + 2.1369, + 2.14495, + 2.10479, + 2.07528, + 2.16553, + 2.13641, + 2.04795, + 2.07306, + 2.07787, + 2.08293, + 2.08743, + 2.17014, + 2.14769, + 2.13377, + 2.08137, + 2.11715, + 2.05214, + 2.1387, + 2.12016, + 2.18269, + 2.14379, + 2.08269, + 2.13372, + 2.02374, + 2.12732, + 2.11985, + 2.1444, + 2.02607, + 2.16631, + 2.09898, + 2.15149, + 2.14237, + 2.12051, + 2.10995, + 2.1431, + 2.08786, + 2.11085, + 2.11849, + 2.0467, + 2.08808, + 2.15111, + 2.10828, + 2.03967, + 2.0953, + 2.09515, + 2.13106, + 2.10416, + 2.16272, + 2.19205, + 2.15543, + 2.09813, + 2.12134, + 2.10226, + 2.0816, + 2.11417, + 2.0767, + 2.1201, + 2.07774, + 2.05761, + 2.12116, + 2.10238, + 2.15694, + 2.10822, + 2.08529, + 2.13655, + 2.13623, + 2.15343, + 2.12412, + 2.12337, + 2.07381, + 2.11136, + 2.06947, + 2.0946, + 2.12401, + 2.02247, + 2.13659, + 2.12685, + 2.16461, + 2.14882, + 2.07491, + 2.11043, + 2.11849, + 2.05548, + 2.13547, + 2.07164, + 2.10644, + 2.12943, + 2.13384, + 2.17229, + 2.07367, + 2.07991, + 2.08646, + 2.17803, + 2.10172, + 2.07228, + 2.12777, + 2.1558, + 2.11659, + 2.04521, + 2.09697, + 2.12532, + 2.10339, + 2.16412, + 2.09753, + 2.1333, + 2.13044, + 2.10626, + 2.11237, + 2.12524, + 2.073, + 2.04064, + 2.08737, + 2.13133, + 2.12298, + 2.11477, + 2.11178, + 2.04273, + 2.1295, + 2.07829, + 2.09891, + 2.11744, + 2.10461, + 2.11068, + 2.09291, + 2.0958, + 2.13826, + 2.08055, + 2.14422, + 2.03641, + 2.11846, + 2.14572, + 2.095, + 2.12173, + 2.12026, + 2.0954, + 2.13221, + 2.09799, + 2.12851, + 2.13405, + 2.09671, + 2.12179, + 2.13242, + 2.13734, + 2.12762, + 2.07765, + 2.09467, + 2.13116, + 2.11245, + 2.09388, + 2.06438, + 2.19199, + 2.10535, + 2.0643, + 2.16325, + 2.161, + 2.06441, + 2.12777, + 2.19557, + 2.15368, + 2.1306, + 2.1223, + 2.09381, + 2.16069, + 2.08246, + 2.06664, + 2.05811, + 2.18172, + 2.11197, + 2.0889, + 2.11844, + 2.05629, + 2.09787, + 2.12297, + 2.09358, + 2.07653, + 2.20638, + 2.13664, + 2.08055, + 2.09602, + 2.10926, + 2.09085, + 2.14696, + 2.10263, + 2.17495, + 2.16893, + 2.05959, + 2.13629, + 2.12439, + 2.113, + 2.15838, + 2.07767, + 2.14023, + 2.06465, + 2.14326, + 2.10932, + 2.11235, + 2.15571, + 2.11715, + 2.11077, + 2.08572, + 2.16581, + 2.06708, + 2.08967, + 2.09113, + 2.03634, + 2.11875, + 2.09162, + 2.10286, + 2.09849, + 2.13724, + 2.03559, + 2.15476, + 2.05496, + 2.10161, + 2.12889, + 2.10539, + 2.10914, + 2.13, + 2.1522, + 2.19162, + 2.12216, + 2.08058, + 2.08741, + 2.09026, + 2.11781, + 2.1328, + 2.08103, + 2.12144, + 2.13464, + 2.13409, + 2.05673, + 2.14685, + 2.12839, + 2.09789, + 2.11096, + 2.03408, + 2.1277, + 2.0641, + 2.08126, + 2.03025, + 2.13796, + 2.07861, + 2.08853, + 2.16225, + 2.05343, + 2.05362, + 2.1201, + 2.19761, + 2.06776, + 2.09517, + 2.06562, + 2.0837, + 2.07416, + 2.07223, + 2.09019, + 2.10433, + 2.10541, + 2.08951, + 2.0656, + 2.10961, + 2.19401, + 2.08729, + 2.1336, + 2.10931, + 2.12852, + 2.06295, + 2.12389, + 2.13807, + 2.10564, + 2.08134, + 2.03201, + 2.06256, + 2.13122, + 2.0748, + 2.12925, + 2.13271, + 2.08649, + 2.10411, + 2.08313, + 2.0844, + 2.06736, + 2.10034, + 2.02649, + 2.11708, + 2.11577, + 2.10454, + 2.07515, + 2.15633, + 2.04952, + 2.05541, + 2.1335, + 2.14564, + 2.13752, + 2.1232, + 2.08976, + 2.10063, + 2.08379, + 2.18628, + 2.17248, + 2.10656, + 2.10485, + 2.10782, + 2.11629, + 2.08295, + 2.09438, + 2.0461, + 2.11415, + 2.09651, + 2.04462, + 2.05152, + 2.06941, + 2.11877, + 2.08115, + 2.10382, + 2.09713, + 2.13192, + 2.11901, + 2.12414, + 2.14095, + 2.05162, + 2.04336, + 2.06538, + 2.13317, + 2.08047, + 2.12775, + 2.16373, + 2.14333, + 2.09389, + 2.13983, + 2.05974, + 2.06538, + 2.13546, + 2.07594, + 2.08922, + 2.05947, + 2.1159, + 2.1085, + 2.12799, + 2.09804, + 2.09748, + 2.13617, + 2.08942, + 2.12746, + 2.18929, + 2.07228, + 2.04472, + 2.05019, + 2.13376, + 2.13808, + 2.06058, + 2.11357, + 2.14014, + 2.14083, + 2.11342, + 2.10486, + 2.08908, + 2.14961, + 2.0871, + 2.04269, + 2.07421, + 2.13873, + 2.12728, + 2.1059, + 2.10184, + 2.13237, + 2.02594, + 2.1117, + 2.10417, + 2.06541, + 2.08943, + 2.11647, + 2.10221, + 2.08875, + 2.09492, + 2.1144, + 2.10078, + 2.10404, + 2.13708, + 2.16025, + 2.04102, + 2.11573, + 2.16445, + 2.13012, + 2.13756, + 2.04568, + 2.10701, + 2.1444, + 2.13497, + 2.13023, + 2.06821, + 2.09004, + 2.06164, + 2.12677, + 2.1306, + 2.17549, + 2.14337, + 1.97909, + 2.08921, + 2.07469, + 2.10392, + 2.03888, + 2.06376, + 2.12682, + 2.0744, + 2.11495, + 2.13959, + 2.0988, + 2.13658, + 2.0542, + 2.11604, + 2.08743, + 2.13097, + 2.05898, + 2.07154, + 2.00648, + 2.13888, + 2.16212, + 2.06639, + 2.08285, + 2.09566, + 2.1004, + 2.09767, + 2.11408, + 2.11714, + 2.07545, + 2.10731, + 2.09629, + 2.09582, + 2.06628, + 2.12314, + 2.10698, + 2.10181, + 2.13564, + 2.03563, + 2.08675, + 2.02621, + 2.15156, + 2.10211, + 2.17107, + 2.08302, + 2.08706, + 2.0643, + 2.08192, + 2.15243, + 2.11812, + 2.03822, + 2.07945, + 2.06443, + 2.12322, + 2.09557, + 2.04426, + 2.10083, + 2.11102, + 2.04523, + 2.08589, + 2.0738, + 2.06606, + 2.08098, + 2.13841, + 2.15132, + 2.18142, + 2.01625, + 2.11072, + 2.13764, + 2.06693, + 2.03944, + 2.12171, + 2.11775, + 2.11287, + 2.08698, + 2.07643, + 2.07805, + 2.04208, + 2.0846, + 2.08954, + 2.1007, + 2.07633, + 2.09357, + 2.15145, + 2.03944, + 2.13708, + 2.12186, + 2.13552, + 2.0563, + 2.08474, + 2.10664, + 2.08035, + 2.07747, + 2.13382, + 2.12754, + 2.11104, + 2.11554, + 2.12822, + 2.08551, + 2.10757, + 2.11655, + 2.04381, + 2.06609, + 2.15029, + 2.11813, + 2.05769, + 2.10855, + 2.09565, + 2.14681, + 2.06712, + 2.14611, + 2.10404, + 2.07452, + 2.14771, + 2.09639, + 2.07964, + 2.11627, + 2.06014, + 2.08635, + 2.05488, + 2.01871, + 2.0961, + 2.13904, + 2.09139, + 2.05184, + 2.11013, + 2.09804, + 2.08198, + 2.07202, + 2.0249, + 2.08768, + 2.07607, + 2.04796, + 2.06937, + 2.1416, + 2.09828, + 2.07378, + 1.98903, + 2.17028, + 2.09999, + 2.11408, + 2.12836, + 2.16324, + 2.10701, + 2.09383, + 2.13008, + 2.10959, + 2.0722, + 2.1232, + 2.08331, + 2.11982, + 2.08524, + 2.06727, + 2.15084, + 2.1194, + 2.12956, + 2.08734, + 2.04497, + 2.09508, + 2.08397, + 2.1124, + 2.08193, + 1.98146, + 2.08651, + 2.0249, + 2.05506, + 2.05229, + 2.05008, + 2.08448, + 1.99079, + 2.09303, + 2.06631, + 2.09303, + 2.07354, + 2.09196, + 2.09489, + 2.07874, + 2.09201, + 2.16335, + 2.0502, + 2.07131, + 2.04835, + 2.06584, + 2.07688, + 2.13008, + 2.06124, + 2.12235, + 2.12116, + 2.13997, + 2.12582, + 2.18375, + 2.10301, + 2.05615, + 2.07228, + 2.09195, + 2.0463, + 1.97925, + 2.15292, + 2.01689, + 2.06506, + 2.0327, + 2.09565, + 2.12951, + 2.04255, + 2.09192, + 2.07481, + 2.0485, + 2.08095, + 2.06796, + 2.05202, + 2.07413, + 2.01706, + 2.10438, + 2.04484, + 2.02036, + 2.06866, + 2.10875, + 2.09371, + 2.13349, + 2.06631, + 2.08181, + 2.14259, + 2.09199, + 2.04041, + 2.13474, + 2.08385, + 2.05325, + 2.09975, + 2.12255, + 2.0704, + 2.13144, + 2.09484, + 2.08705, + 2.15514, + 2.11261, + 2.11636, + 2.15667, + 2.0404, + 2.06174, + 2.03463, + 2.00406, + 2.03327, + 2.09417, + 2.13681, + 1.96806, + 2.12661, + 2.0948, + 2.0926, + 2.06922, + 2.09639, + 2.05791, + 2.07714, + 2.13913, + 2.02277, + 2.06623, + 2.13421, + 2.1062, + 2.07541, + 2.12336, + 2.06514, + 2.05075, + 2.07548, + 2.12557, + 2.14924, + 2.11018, + 2.0842, + 2.14355, + 2.08738, + 2.13799, + 2.09062, + 2.04969, + 2.08582, + 2.10324, + 2.03572, + 2.05147, + 2.00502, + 2.07141, + 1.99557, + 2.13894, + 2.1553, + 2.06648, + 2.05819, + 2.08383, + 2.11133, + 2.02196, + 2.10783, + 2.02858, + 2.03358, + 2.06072, + 2.0359, + 2.08323, + 2.04802, + 2.11395, + 2.13524, + 2.11736, + 2.07258, + 2.08804, + 2.11794, + 2.13645, + 2.0996, + 2.06315, + 2.05538, + 2.09322, + 2.10632, + 2.114, + 2.12489, + 2.07014, + 2.11277, + 2.01848, + 2.08928, + 2.05211, + 2.13821, + 2.12306, + 2.05305, + 2.09285, + 2.05594, + 2.16263, + 2.0912, + 2.11417, + 2.10779, + 2.07809, + 2.13621, + 2.05704, + 2.01261, + 2.08016, + 2.12863, + 2.06718, + 2.10976, + 2.13463, + 2.14882, + 2.0966, + 2.06652, + 2.07969, + 2.04107, + 2.02419, + 2.09575, + 2.12857, + 2.04398, + 2.11785, + 2.08828, + 2.04959, + 2.06058, + 2.08635, + 2.08974, + 2.03504, + 2.1456, + 2.17049, + 2.02768, + 2.09823, + 2.05754, + 2.07887, + 2.1078, + 2.08457, + 2.12408, + 2.0954, + 2.07639, + 2.09045, + 2.05784, + 2.04278, + 2.09548, + 2.1087, + 2.1437, + 2.09094, + 2.07874, + 2.01493, + 2.02804, + 2.01007, + 2.04847, + 2.12547, + 2.11514, + 2.11946, + 2.1125, + 2.07157, + 2.111, + 2.13207, + 2.0967, + 2.08252, + 2.08888, + 2.05647, + 2.05834, + 2.16022, + 2.04922, + 2.06841, + 2.07677, + 2.06226, + 2.09475, + 2.0168, + 2.12406, + 2.06325, + 2.09587, + 2.03052, + 2.08313, + 2.084, + 2.10075, + 2.05824, + 2.09606, + 2.11564, + 2.05424, + 2.12791, + 2.10788, + 2.11386, + 2.12504, + 2.13182, + 2.05432, + 2.11362, + 2.10827, + 2.11317, + 2.07054, + 2.0865, + 2.08514, + 2.09255, + 2.12185, + 2.08077, + 2.076, + 2.10649, + 2.07883, + 2.02817, + 2.0122, + 2.16202, + 2.11263, + 1.97946, + 1.99947, + 2.03089, + 2.13528, + 2.07286, + 2.13223, + 2.08395, + 2.15577, + 2.04823, + 2.1056, + 2.0594, + 2.05308, + 2.07569, + 2.00582, + 2.18676, + 2.03374, + 2.03684, + 2.08538, + 2.07424, + 2.10281, + 2.07143, + 2.09961, + 2.11097, + 2.07543, + 2.00702, + 2.03751, + 2.12102, + 2.04582, + 2.10064, + 2.01073, + 2.11498, + 2.13712, + 2.05089, + 2.0584, + 2.11574, + 2.14152, + 2.09001, + 2.08799, + 2.11396, + 2.04485, + 2.07874, + 2.06325, + 2.06574, + 2.15556, + 2.10324, + 2.08869, + 2.10685, + 2.04254, + 2.07161, + 2.01449, + 2.08847, + 2.0733, + 2.0586, + 2.01824, + 2.10437, + 2.19663, + 2.05156, + 2.09629, + 2.13721, + 2.02461, + 2.11276, + 2.06099, + 2.06829, + 2.09166, + 2.07752, + 2.07912, + 2.10421, + 2.10106, + 2.08491, + 2.07528, + 2.15454, + 2.04691, + 2.07905, + 2.11661, + 2.0584, + 2.03592, + 2.08157, + 2.15897, + 2.07329, + 2.11183, + 2.04339, + 2.04438, + 2.03336, + 2.13214, + 2.06406, + 2.08607, + 2.09633, + 2.01343, + 2.04247, + 2.01893, + 2.06765, + 2.12042, + 2.06529, + 2.00884, + 2.06082, + 2.01918, + 2.20488, + 2.07777, + 2.07087, + 2.15486, + 2.14038, + 2.01405, + 2.01239, + 2.08214, + 2.01641, + 2.09813, + 2.042, + 2.08089, + 2.16437, + 2.09584, + 2.00511, + 2.11407, + 2.06171, + 2.10425, + 2.07342, + 2.08236, + 2.0627, + 2.08872, + 2.0751, + 2.07786, + 2.09862, + 2.13165, + 2.10163, + 2.08189, + 2.07655, + 2.10482, + 2.08075, + 2.05504, + 2.14323, + 2.04128, + 2.07747, + 2.12379, + 2.07758, + 2.06598, + 1.99411, + 2.09964, + 2.12168, + 2.12594, + 2.03914, + 2.13376, + 2.18517, + 2.05919, + 2.04488, + 2.0858, + 2.06392, + 2.11487, + 2.03378, + 2.09504, + 1.99732, + 2.02115, + 2.06633, + 2.08621, + 2.11161, + 2.02401, + 2.07989, + 2.04353, + 2.07797, + 2.08321, + 2.10694, + 2.08116, + 2.08013, + 2.05166, + 2.03859, + 2.06647, + 2.06128, + 2.0405, + 2.08564, + 2.02637, + 2.1218, + 2.14185, + 2.10984, + 2.08003, + 2.10348, + 2.02095, + 2.13531, + 2.05896, + 2.10359, + 2.01529, + 2.08866, + 2.09921, + 2.03798, + 2.02394, + 2.06774, + 2.0759, + 2.0776, + 2.06026, + 2.11891, + 2.1025, + 2.09668, + 2.03808, + 2.14558, + 2.06375, + 1.99458, + 2.09215, + 2.06062, + 2.06884, + 2.06021, + 2.05503, + 2.09091, + 2.09302, + 2.0515, + 2.08263, + 2.05106, + 2.10749, + 2.10874, + 2.08487, + 2.01956, + 2.07787, + 2.05804, + 2.01602, + 2.1156, + 2.08484, + 2.07253, + 2.06774, + 2.11448, + 2.00769, + 2.04023, + 2.06195, + 2.04073, + 2.12735, + 2.07933, + 2.12628, + 2.06697, + 2.11568, + 2.06734, + 2.13341, + 2.06596, + 2.07189, + 1.99975, + 2.13733, + 2.0662, + 2.14758, + 2.09966, + 2.09943, + 2.07907, + 2.11264, + 2.09428, + 2.07668, + 2.08417, + 2.1009, + 2.10719, + 2.07278, + 2.1406, + 2.03982, + 2.06965, + 2.01863, + 2.07975, + 2.14794, + 2.12445, + 2.02001, + 2.11883, + 2.04336, + 2.03164, + 2.07358, + 2.11727, + 2.12822, + 2.05488, + 2.00583, + 2.01671, + 2.07008, + 2.04864, + 2.15423, + 2.11196, + 2.13013, + 2.18329, + 2.12132, + 2.10072, + 2.07513, + 2.11864, + 2.10071, + 2.06849, + 1.98953, + 2.03667, + 2.01311, + 2.14559, + 2.03179, + 2.09717, + 2.0781, + 2.04418, + 2.0241, + 2.07223, + 2.07765, + 2.06816, + 2.0047, + 2.09235, + 2.06072, + 2.01874, + 2.07433, + 2.03177, + 2.07782, + 2.02207, + 2.02828, + 2.03052, + 2.08796, + 2.04217, + 2.07722, + 2.00231, + 2.08325, + 2.06856, + 2.06138, + 2.04988, + 2.10389, + 2.06896, + 2.07199, + 2.10403, + 2.14834, + 2.146, + 2.07495, + 2.15474, + 2.01435, + 2.02295, + 2.07418, + 2.05188, + 1.95005, + 2.04698, + 2.0027, + 2.09133, + 2.06517, + 2.11931, + 2.05626, + 2.15348, + 2.07157, + 2.06836, + 2.02424, + 2.05232, + 2.11096, + 2.06014, + 2.07044, + 2.09761, + 2.04773, + 2.04677, + 2.00572, + 2.07806, + 2.04695, + 2.07245, + 2.08196, + 2.09445, + 2.01205, + 2.06319, + 2.04123, + 2.06795, + 2.03582, + 2.03007, + 2.10083, + 2.11105, + 2.12536, + 2.10771, + 2.09022, + 2.08695, + 2.02961, + 2.06678, + 2.07391, + 2.09108, + 2.08101, + 2.05321, + 2.03353, + 2.0768, + 2.11662, + 2.09157, + 2.06999, + 2.02295, + 2.07998, + 2.1274, + 2.05929, + 2.0327, + 2.05993, + 2.05613, + 2.05721, + 2.03967, + 2.08017, + 1.99532, + 2.15504, + 2.08392, + 2.05929, + 2.08824, + 2.05432, + 2.05738, + 2.02724, + 1.9721, + 2.05708, + 2.11622, + 2.00563, + 2.02918, + 2.10931, + 2.06615, + 2.05428, + 2.05104, + 2.06887, + 2.10398, + 1.99669, + 2.10738, + 2.05644, + 2.05772, + 2.07513, + 2.08181, + 2.14405, + 2.15466, + 2.10755, + 2.11731, + 2.07633, + 2.06804, + 2.05887, + 2.08575, + 2.03062, + 2.0421, + 2.0979, + 2.05685, + 2.11896, + 2.01023, + 2.12295, + 2.12157, + 2.05898, + 2.12495, + 2.10141, + 2.0376, + 2.05051, + 2.02397, + 2.08365, + 2.10829, + 2.01454, + 2.00711, + 2.06005, + 2.09017, + 2.09549, + 2.09088, + 2.08542, + 2.07953, + 2.11315, + 2.00019, + 2.13795, + 2.06708, + 2.05435, + 2.07118, + 2.10171, + 2.08301, + 2.05753, + 2.00449, + 2.06953, + 2.08565, + 2.10364, + 2.02805, + 2.07596, + 2.09671, + 2.08481, + 2.06851, + 2.08965, + 2.09405, + 2.08666, + 2.01672, + 1.99783, + 2.0308, + 2.10783, + 1.98615, + 2.10551, + 2.04035, + 2.12412, + 2.04586, + 2.05379, + 2.08107, + 2.01705, + 2.06461, + 2.07541, + 2.09577, + 2.12469, + 2.09285, + 2.09374, + 2.11407, + 2.07602, + 2.11062, + 2.09319, + 2.03698, + 2.07173, + 2.08843, + 2.10623, + 1.97882, + 2.03307, + 2.11743, + 2.13381, + 1.99917, + 2.06088, + 2.12626, + 2.08097, + 2.11418, + 2.01078, + 2.07393, + 2.10276, + 2.06112, + 2.08514, + 2.13986, + 2.06858, + 1.96141, + 2.06757, + 2.0924, + 2.07449, + 2.09889, + 2.06556, + 2.10549, + 2.09042, + 1.9865, + 2.07955, + 1.9797, + 2.06262, + 2.01997, + 2.08973, + 2.04196, + 2.02348, + 2.13264, + 2.06386, + 2.09811, + 2.03411, + 2.15665, + 2.08293, + 2.071, + 2.07658, + 2.17354, + 2.02568, + 2.06407, + 2.06232, + 2.04192, + 2.11026, + 2.0558, + 2.13428, + 2.05726, + 2.12916, + 2.05409, + 2.0381, + 2.03409, + 2.05967, + 2.11175, + 2.0571, + 2.08, + 2.06239, + 2.0856, + 2.01971, + 2.14144, + 1.99617, + 2.08663, + 2.06458, + 2.02968, + 2.05902, + 2.06709, + 2.09696, + 2.05254, + 2.02575, + 2.01666, + 2.06365, + 2.06849, + 2.03339, + 2.0836, + 2.05389, + 2.06919, + 2.12839, + 2.06191, + 1.9969, + 2.13849, + 2.04207, + 2.03666, + 2.06636, + 2.08137, + 2.08508, + 2.06531, + 2.03684, + 2.05422, + 2.05608, + 2.01764, + 2.08834, + 2.11597, + 2.04752, + 2.13887, + 2.05414, + 2.10016, + 2.08874, + 2.02427, + 2.04, + 2.09702, + 2.06191, + 2.07475, + 2.05225, + 2.07732, + 2.07689, + 2.03459, + 2.10178, + 2.05543, + 2.01174, + 2.01685, + 2.08381, + 2.07526, + 2.04286, + 2.06321, + 2.06589, + 2.01497, + 2.02844, + 1.9941, + 2.07638, + 2.02883, + 2.07611, + 2.07492, + 2.0213, + 1.99648, + 2.07458, + 2.08831, + 2.10314, + 2.06595, + 2.14293, + 2.11275, + 2.08798, + 2.0226, + 2.12569, + 2.05368, + 2.03676, + 2.07185, + 2.0657, + 2.06805, + 2.02539, + 2.13168, + 2.12109, + 2.02806, + 2.17646, + 2.05934, + 2.05101, + 2.0635, + 2.07882, + 2.02287, + 2.06363, + 2.07557, + 2.08147, + 2.09725, + 2.10681, + 2.10097, + 2.04607, + 2.00042, + 2.10639, + 2.02104, + 2.0728, + 2.04873, + 2.10192, + 2.07086, + 2.12973, + 2.05518, + 2.14593, + 2.1289, + 2.1208, + 2.04688, + 2.04163, + 2.11887, + 2.06291, + 2.10193, + 2.05585, + 2.06526, + 2.10719, + 2.06099, + 2.03764, + 1.96667, + 2.07842, + 2.06978, + 2.0467, + 2.06868, + 2.0281, + 2.07606, + 2.06319, + 2.09745, + 2.08347, + 2.02629, + 2.08695, + 2.0741, + 2.13217, + 2.06302, + 2.0969, + 2.11372, + 2.02474, + 2.09705, + 2.09613, + 2.05204, + 2.04801, + 2.06313, + 2.10968, + 2.01281, + 2.10232, + 2.03633, + 2.05308, + 2.10498, + 2.00901, + 2.0953, + 2.02451, + 2.09715, + 2.10641, + 2.10068, + 2.05326, + 2.12624, + 2.10394, + 2.03133, + 2.05325, + 2.07099, + 2.10652, + 2.07389, + 2.10081, + 2.06438, + 2.04873, + 2.04403, + 2.06912, + 2.1148, + 2.06834, + 1.99483, + 2.05345, + 2.08751, + 2.0224, + 2.04882, + 2.02314, + 2.16878, + 2.09001, + 2.05333, + 2.06419, + 2.0933, + 2.07829, + 2.01932, + 2.05817, + 2.08431, + 2.11285, + 2.00857, + 2.07289, + 1.99443, + 2.08259, + 2.03306, + 2.02887, + 2.03445, + 2.08559, + 2.05949, + 2.02193, + 2.07204, + 2.10583, + 2.11107, + 2.05598, + 2.08288, + 2.03973, + 2.05778, + 2.06878, + 2.14201, + 2.12522, + 2.08545, + 2.08692, + 2.02053, + 2.05446, + 2.08623, + 2.01557, + 2.00816, + 2.05988, + 2.0229, + 2.02391, + 2.10504, + 2.12706, + 2.09158, + 2.05715, + 2.07415, + 2.1255, + 2.03306, + 2.06392, + 2.06344, + 2.08021, + 2.07575, + 1.97882, + 2.00249, + 2.02147, + 1.99094, + 2.1553, + 2.04567, + 1.98475, + 2.10893, + 2.0802, + 2.03024, + 2.0225, + 2.07984, + 2.02403, + 2.04878, + 2.04283, + 2.07964, + 2.11598, + 2.04082, + 2.03682, + 2.137, + 2.09772, + 2.03725, + 2.08016, + 2.03772, + 1.98558, + 2.06352, + 2.04918, + 2.02798, + 2.05669, + 2.0397, + 2.03802, + 2.00055, + 2.0775, + 2.0793, + 2.1328, + 2.10442, + 2.08381, + 2.1243, + 2.06731, + 2.08703, + 2.03377, + 2.06871, + 2.06195, + 2.10511, + 2.05166, + 2.0509, + 2.04504, + 2.0389, + 2.08043, + 2.09295, + 2.04042, + 2.07732, + 2.0989, + 2.04119, + 2.11715, + 2.0662, + 2.03521, + 2.11652, + 2.09343, + 2.09146, + 2.00349, + 2.10837, + 2.11932, + 2.10045, + 2.12766, + 2.11238, + 2.05193, + 2.08805, + 2.08027, + 1.99229, + 2.00739, + 2.07347, + 2.05927, + 2.10553, + 2.06289, + 2.05298, + 2.07148, + 2.02937, + 2.09286, + 2.0625, + 2.04251, + 2.11579, + 2.08493, + 2.0145, + 2.1172, + 1.99018, + 2.10698, + 2.08955, + 2.05902, + 2.01577, + 2.04284, + 2.03211, + 2.09129, + 2.11101, + 2.09873, + 2.10147, + 2.06763, + 2.06895, + 2.07842, + 2.06146, + 2.04676, + 2.04107, + 2.01566, + 2.0244, + 2.08427, + 2.10549, + 2.02203, + 2.11446, + 2.00773, + 2.05271, + 2.08152, + 2.06324, + 2.12073, + 2.05899, + 2.09005, + 2.03802, + 2.08768, + 2.06788, + 2.03647, + 2.09092, + 1.94285, + 2.10432, + 2.10817, + 2.07619, + 2.03425, + 2.00709, + 2.06827, + 2.05093, + 2.07483, + 2.06409, + 2.05012, + 2.04017, + 2.06685, + 2.04528, + 2.05901, + 2.03942, + 2.02023, + 2.09415, + 2.00588, + 2.04256, + 2.06708, + 2.02678, + 2.0221, + 2.05656, + 2.02921, + 2.13808, + 2.07724, + 2.04311, + 2.08102, + 2.08407, + 2.02629, + 2.0513, + 2.06495, + 2.04718, + 2.04385, + 2.04184, + 2.07937, + 1.99661, + 2.03563, + 2.03948, + 2.06068, + 2.10829, + 2.0595, + 2.09556, + 2.11285, + 2.03227, + 2.06781, + 2.05925, + 2.05581, + 2.06333, + 2.06697, + 2.00727, + 2.05655, + 2.11136, + 2.03674, + 2.06544, + 2.12446, + 2.03548, + 2.0911, + 2.06112, + 2.05034, + 2.05249, + 2.06103, + 2.05356, + 2.06695, + 2.09099, + 2.07425, + 2.07788, + 2.09215, + 2.07736, + 1.98757, + 2.03298, + 2.03088, + 2.02213, + 2.08634, + 2.02768, + 1.99756, + 2.14677, + 2.05558, + 2.02496, + 2.09724, + 2.05255, + 2.06716, + 2.07167, + 2.00812, + 2.09066, + 2.06376, + 2.04842, + 2.06851, + 2.07735, + 2.13334, + 2.0587, + 2.04104, + 2.00786, + 2.07433, + 2.05318, + 1.95878, + 2.07099, + 2.03443, + 2.05422, + 2.12209, + 2.07094, + 1.9528, + 2.01506, + 2.05553, + 2.12138, + 2.02508, + 2.07666, + 2.14575, + 2.01951, + 2.04164, + 2.03867, + 2.03378, + 2.09433, + 2.06457, + 2.08161, + 2.09086, + 2.0496, + 2.04918, + 2.06391, + 2.06524, + 2.04333, + 2.07325, + 2.0304, + 2.06887, + 1.96485, + 2.09435, + 2.05732, + 2.04756, + 2.08311, + 2.05735, + 2.11405, + 2.11355, + 1.98737, + 1.99303, + 2.06603, + 1.98646, + 2.10581, + 2.10562, + 2.02354, + 2.103, + 2.07137, + 2.0457, + 2.00153, + 2.06103, + 2.0997, + 1.99062, + 2.01324, + 2.06253, + 2.06176, + 2.0397, + 2.05751, + 2.06248, + 2.11154, + 2.08294, + 2.07978, + 2.07026, + 2.08019, + 2.03755, + 2.07636, + 2.01067, + 2.02766, + 2.05753, + 2.12263, + 2.05045, + 1.98059, + 2.04864, + 2.04771, + 2.06722, + 2.03609, + 2.06284, + 2.07717, + 2.01665, + 2.08986, + 2.0273, + 2.05682, + 2.03488, + 2.05332, + 2.03322, + 2.05592, + 2.08147, + 2.0479, + 2.1046, + 2.02317, + 2.05165, + 2.05359, + 2.00625, + 2.02435, + 2.02878, + 2.03786, + 2.09736, + 2.05512, + 2.09181, + 2.06442, + 2.05538, + 2.09673, + 2.03222, + 2.09708, + 1.98943, + 2.0283, + 2.05977, + 2.0863, + 2.02144, + 2.06487, + 2.04112, + 2.10147, + 2.0824, + 2.07287, + 2.03416, + 2.0116, + 2.11638, + 2.09206, + 2.08047, + 2.05441, + 2.03693, + 2.04957, + 2.04778, + 2.03492, + 1.96548, + 2.02681, + 2.02874, + 2.07203, + 2.0569, + 1.99965, + 2.03311, + 2.0092, + 2.02598, + 2.05989, + 2.10664, + 2.04568, + 2.03186, + 2.01805, + 2.06315, + 1.99281, + 2.0392, + 2.05607, + 2.04348, + 2.03614, + 2.05212, + 2.09476, + 1.97991, + 2.0256, + 2.04247, + 2.03762, + 2.02747, + 1.98989, + 2.01387, + 2.0662, + 1.97273, + 2.04414, + 2.04068, + 2.14846, + 2.05013, + 2.10822, + 2.10342, + 2.05437, + 2.05571, + 2.1086, + 2.05597, + 2.03278, + 2.09545, + 2.06232, + 2.04632, + 2.0163, + 2.08783, + 2.05287, + 2.05522, + 2.11135, + 2.0458, + 2.12138, + 1.99393, + 2.02124, + 2.08029, + 2.02087, + 2.07313, + 2.03356, + 2.06596, + 2.09844, + 2.03429, + 2.05596, + 1.98228, + 2.07446, + 2.05781, + 1.99759, + 2.07992, + 1.94621, + 2.08207, + 2.06664, + 2.05679, + 2.06798, + 2.02544, + 2.06645, + 2.00403, + 2.03956, + 1.99711, + 2.08653, + 2.00936, + 2.08544, + 2.0267, + 2.03343, + 2.07269, + 2.07503, + 2.0354, + 2.02986, + 2.12732, + 2.10069, + 2.08838, + 2.00378, + 2.03698, + 2.0345, + 2.03579, + 2.03079, + 2.04633, + 2.08341, + 1.99281, + 2.04339, + 2.08322, + 2.04202, + 1.97566, + 2.12464, + 2.08085, + 2.02189, + 2.07332, + 2.11819, + 2.05622, + 2.04107, + 2.05936, + 2.06088, + 2.10049, + 2.08115, + 2.04944, + 2.0799, + 2.01254, + 2.01197, + 2.01803, + 2.06186, + 2.0443, + 2.0118, + 2.15467, + 2.07352, + 2.01528, + 2.03535, + 2.01712, + 2.06954, + 2.01698, + 2.00203, + 2.06967, + 2.07898, + 2.0671, + 2.02714, + 2.06968, + 2.02246, + 2.13574, + 1.99259, + 2.05496, + 2.0191, + 2.04134, + 2.02151, + 2.02575, + 2.00882, + 2.08244, + 2.07441, + 2.0507, + 2.06194, + 2.01666, + 2.03804, + 2.11047, + 2.06599, + 1.98031, + 2.06439, + 2.07867, + 2.03715, + 2.0558, + 2.02979, + 2.01242, + 1.95233, + 2.02884, + 1.97599, + 2.01915, + 2.04814, + 2.04897, + 2.03521, + 2.0504, + 2.06254, + 2.03101, + 2.00247, + 2.04606, + 2.0705, + 2.01914, + 2.06384, + 2.03466, + 2.01895, + 1.99722, + 2.03233, + 2.14209, + 2.13457, + 2.00492, + 2.01353, + 1.98569, + 1.99858, + 2.02839, + 2.01293, + 2.07357, + 2.00096, + 2.0323, + 1.97499, + 2.06599, + 2.06921, + 2.03327, + 2.02488, + 2.04191, + 2.02133, + 2.02351, + 2.00015, + 2.02345, + 1.96638, + 2.02281, + 2.05081, + 1.99942, + 2.06361, + 2.02102, + 2.04005, + 2.09392, + 2.03241, + 2.00798, + 2.0817, + 2.04202, + 2.06015, + 2.01093, + 2.07711, + 2.05408, + 2.11212, + 2.00511, + 2.04476, + 2.0318, + 2.06195, + 2.06481, + 2.11177, + 2.08009, + 1.99903, + 2.09377, + 2.01221, + 2.05325, + 2.0452, + 2.06081, + 1.99355, + 2.05137, + 2.06812, + 2.0877, + 2.02019, + 2.05333, + 1.97595, + 2.07502, + 2.01471, + 1.99411, + 2.08107, + 2.0588, + 2.0105, + 2.03353, + 2.04271, + 2.02517, + 2.07914, + 2.05705, + 2.01211, + 2.0303, + 2.09696, + 2.0821, + 1.99863, + 1.97906, + 2.05219, + 2.02901, + 2.09172, + 2.07638, + 2.079, + 2.04351, + 1.99277, + 1.96134, + 2.0013, + 2.06079, + 1.99285, + 2.03553, + 2.07931, + 2.08115, + 2.07353, + 2.04599, + 2.0149, + 2.0358, + 2.02745, + 2.0754, + 2.08336, + 2.06918, + 2.06555, + 2.03802, + 2.03622, + 2.05264, + 2.06019, + 2.04436, + 2.0434, + 2.09629, + 2.01639, + 2.05267, + 1.98718, + 2.00768, + 2.0835, + 1.95697, + 2.03776, + 2.04586, + 1.97659, + 2.0237, + 2.0232, + 2.05365, + 2.05695, + 2.06813, + 2.10843, + 2.04927, + 2.04191, + 2.06537, + 2.06218, + 2.06167, + 2.09267, + 2.14703, + 2.05801, + 2.03078, + 2.01405, + 2.04858, + 2.01306, + 2.01265, + 2.06588, + 2.04529, + 2.07559, + 2.02285, + 2.0835, + 2.05909, + 2.06312, + 2.0296, + 2.06669, + 2.04078, + 2.05484, + 2.05034, + 2.05032, + 2.09256, + 2.07644, + 2.10918, + 2.09884, + 2.05171, + 2.05447, + 2.07415, + 1.97931, + 1.99107, + 2.09041, + 2.07007, + 2.12373, + 2.0628, + 2.03133, + 2.02806, + 2.05817, + 2.11746, + 2.03185, + 1.99633, + 2.03181, + 2.06992, + 2.00142, + 2.04983, + 2.08606, + 2.01466, + 2.07301, + 2.0694, + 2.07049, + 2.09433, + 2.05604, + 1.93766, + 2.07719, + 2.06593, + 2.00452, + 2.04133, + 2.02449, + 1.93746, + 2.09304, + 2.05463, + 1.97208, + 2.07886, + 2.08435, + 2.04709, + 2.05548, + 2.05979, + 2.08635, + 2.0245, + 2.11378, + 2.07825, + 2.00529, + 2.01365, + 2.10492, + 2.06886, + 2.12362, + 2.03996, + 2.00802, + 2.0232, + 2.07588, + 2.05648, + 1.99096, + 2.04846, + 2.06835, + 2.10403, + 2.04452, + 2.09195, + 1.9982, + 1.95311, + 2.06445, + 2.0108, + 2.05774, + 2.0647, + 2.0606, + 2.08073, + 2.04388, + 2.05094, + 2.0839, + 2.07656, + 2.00466, + 2.05127, + 1.96307, + 2.08589, + 2.05027, + 2.01888, + 2.03501, + 1.99818, + 2.04141, + 2.06752, + 2.06005, + 2.06424, + 2.09357, + 2.06184, + 2.0651, + 1.98939, + 2.02905, + 2.074, + 2.04499, + 2.02906, + 2.06848, + 2.03097, + 2.13828, + 2.05086, + 2.05244, + 2.03032, + 2.01746, + 2.07007, + 2.01759, + 2.0675, + 2.07511, + 2.08403, + 2.06978, + 2.12505, + 2.05219, + 2.10628, + 2.01007, + 1.99664, + 2.05293, + 2.01147, + 2.04377, + 2.04881, + 2.05149, + 1.98977, + 2.09375, + 2.01582, + 2.05345, + 2.03797, + 1.98496, + 2.00659, + 2.04192, + 2.10839, + 2.02277, + 2.11565, + 2.03522, + 1.99542, + 2.00427, + 2.04391, + 2.00052, + 2.0555, + 2.07215, + 2.08636, + 2.01941, + 2.0739, + 2.02585, + 2.00941, + 2.00431, + 2.0757, + 2.06148, + 2.00521, + 2.0939, + 2.08654, + 2.00003, + 2.09182, + 2.03023, + 2.03517, + 2.01204, + 2.01232, + 2.01482, + 2.01081, + 1.98632, + 1.98401, + 2.04891, + 1.99541, + 1.97905, + 2.07105, + 2.06188, + 2.02913, + 2.02339, + 2.05316, + 2.08183, + 2.01807, + 1.99209, + 2.0713, + 2.1148, + 2.03973, + 1.97343, + 2.05063, + 2.08566, + 2.06206, + 2.08155, + 2.04375, + 2.00931, + 2.06977, + 2.01332, + 2.00786, + 2.05361, + 2.07465, + 2.05162, + 2.02641, + 2.04114, + 2.0394, + 2.07364, + 2.04138, + 1.99877, + 2.06716, + 2.0497, + 2.04435, + 2.03228, + 2.06879, + 2.09824, + 2.05829, + 2.07127, + 1.99953, + 2.12035, + 2.04031, + 2.00151, + 2.00565, + 2.07348, + 2.02206, + 2.08856, + 2.1003, + 2.08671, + 2.0348, + 2.03413, + 2.00235, + 2.05301, + 2.00236, + 2.01938, + 2.03495, + 2.01281, + 2.05153, + 2.03436, + 2.0984, + 2.06466, + 2.05331, + 2.06208, + 1.95656, + 2.07439, + 2.03927, + 2.07195, + 1.94577, + 2.02683, + 2.04671, + 2.0243, + 2.04746, + 1.99379, + 2.05004, + 2.05325, + 1.95167, + 2.06438, + 1.9819, + 2.06717, + 1.98481, + 2.07661, + 2.06218, + 2.09445, + 2.05715, + 2.08314, + 2.07168, + 2.01358, + 2.02683, + 1.97722, + 1.95312, + 2.04417, + 2.02442, + 2.02347, + 2.07241, + 2.02514, + 2.08622, + 2.04221, + 2.05096, + 2.07314, + 2.13696, + 2.06015, + 2.01742, + 2.0084, + 2.04167, + 2.04772, + 2.00709, + 2.03842, + 2.04394, + 2.03635, + 2.00665, + 2.03504, + 2.01059, + 2.01281, + 2.04627, + 1.99592, + 2.01543, + 2.06817, + 2.01479, + 2.08267, + 2.01821, + 1.99912, + 2.02065, + 1.97842, + 2.04527, + 2.03568, + 2.02168, + 2.04755, + 2.00704, + 2.02188, + 2.03648, + 2.0004, + 2.01286, + 2.06695, + 2.04746, + 2.03476, + 2.01299, + 1.98974, + 2.06906, + 2.01204, + 2.08883, + 2.06575, + 1.95288, + 2.04875, + 2.03387, + 1.97633, + 2.05345, + 2.04138, + 2.02941, + 2.00312, + 2.10963, + 2.0227, + 2.04545, + 2.03884, + 2.0069, + 2.09703, + 2.00674, + 2.03592, + 2.01223, + 2.02784, + 2.04446, + 2.05916, + 2.11052, + 2.09213, + 1.99841, + 1.9766, + 2.04458, + 1.99501, + 2.10247, + 2.066, + 2.02093, + 1.98519, + 2.10046, + 2.02259, + 2.0452, + 2.04717, + 2.0968, + 1.99128, + 1.99461, + 2.04492, + 2.08868, + 1.99449, + 2.05135, + 2.04986, + 2.06184, + 2.03039, + 2.03804, + 2.0274, + 2.02479, + 2.0313, + 2.03745, + 2.04138, + 2.02565, + 2.05005, + 2.06094, + 1.9984, + 2.08405, + 2.11242, + 2.08307, + 2.03924, + 2.08906, + 2.04133, + 2.05965, + 2.02815, + 2.02263, + 2.0009, + 2.00766, + 2.04237, + 2.04047, + 2.08929, + 2.04549, + 1.95894, + 2.05369, + 2.01792, + 2.07557, + 2.02753, + 2.04762, + 1.96677, + 2.01277, + 2.0046, + 2.05989, + 2.02114, + 2.05902, + 2.04022, + 1.99867, + 1.98075, + 2.04126, + 2.03787, + 2.0874, + 2.063, + 2.04377, + 2.04205, + 2.05737, + 1.98219, + 2.06904, + 2.04775, + 2.06803, + 2.01797, + 2.039, + 2.03651, + 2.11954, + 2.06176, + 2.09317, + 2.02388, + 1.99481, + 2.0153, + 2.08242, + 2.05532, + 2.02236, + 2.00758, + 2.04008, + 2.05073, + 1.99605, + 2.02382, + 2.10455, + 1.97817, + 2.04235, + 2.02687, + 2.00991, + 2.02168, + 2.05494, + 2.0512, + 2.05067, + 2.00786, + 2.06875, + 2.0224, + 2.06234, + 2.00912, + 2.09214, + 1.95324, + 2.02738, + 2.08275, + 2.02254, + 2.0369, + 2.05405, + 2.02959, + 2.05703, + 1.99223, + 2.07428, + 2.02973, + 1.97431, + 2.061, + 2.07873, + 2.01556, + 1.98274, + 2.06137, + 2.00247, + 2.0947, + 2.01852, + 2.01967, + 1.94124, + 2.06542, + 2.04619, + 2.04536, + 2.01331, + 2.04072, + 1.99667, + 2.018, + 2.10627, + 2.00543, + 2.06958, + 2.10232, + 2.01031, + 2.01484, + 2.05005, + 2.08926, + 1.99118, + 2.07571, + 2.0442, + 2.01177, + 2.04327, + 2.03287, + 2.08929, + 2.03896, + 2.03296, + 2.05071, + 2.00438, + 1.993, + 2.04854, + 2.01181, + 2.06205, + 2.01158, + 2.00008, + 2.01962, + 2.05425, + 2.04649, + 2.01251, + 2.13246, + 2.02078, + 1.96197, + 1.98832, + 2.03155, + 2.04205, + 2.02571, + 2.03448, + 2.03671, + 1.98112, + 2.07774, + 2.00172, + 1.99759, + 2.10468, + 1.9926, + 2.04203, + 2.04605, + 2.08304, + 1.99226, + 2.01744, + 2.05274, + 2.01254, + 1.98196, + 2.04995, + 2.00141, + 2.02619, + 1.97542, + 2.01756, + 2.05893, + 2.03685, + 2.04299, + 2.03363, + 2.04344, + 2.05253, + 2.04273, + 2.049, + 2.04465, + 2.06437, + 2.05469, + 2.01664, + 2.0528, + 2.03139, + 2.03358, + 2.00775, + 2.13464, + 2.08799, + 1.99273, + 2.03076, + 2.05424, + 2.02467, + 1.99377, + 2.06463, + 2.00243, + 2.04052, + 2.01414, + 1.99525, + 1.98163, + 1.9722, + 2.0066, + 2.02137, + 1.95982, + 2.05045, + 1.96512, + 2.08604, + 2.00693, + 2.04563, + 1.99637, + 2.02522, + 1.95063, + 2.01126, + 1.99196, + 1.96953, + 2.00673, + 2.11076, + 2.05141, + 2.05908, + 2.03717, + 2.06208, + 1.98347, + 2.04901, + 2.08991, + 2.06519, + 1.94892, + 2.07483, + 2.04106, + 2.0238, + 2.04959, + 2.01121, + 2.03226, + 1.97948, + 2.02006, + 1.98296, + 2.00407, + 2.02294, + 1.99481, + 2.06786, + 2.01331, + 2.06993, + 2.04081, + 1.97166, + 1.96785, + 2.04559, + 1.99974, + 1.98193, + 2.09427, + 2.05862, + 2.06364, + 2.04382, + 2.07245, + 1.97886, + 2.08746, + 2.02099, + 2.0504, + 2.00904, + 2.06181, + 2.03075, + 2.05166, + 2.02199, + 2.06201, + 1.97316, + 2.10181, + 2.01546, + 2.07818, + 2.01619, + 2.07721, + 2.04741, + 2.07659, + 2.02654, + 2.06533, + 2.08106, + 1.98971, + 1.9816, + 2.02453, + 2.10511, + 1.99992, + 2.03092, + 1.95937, + 1.99368, + 2.05773, + 2.02116, + 1.98536, + 2.01015, + 2.10459, + 2.03902, + 2.03918, + 2.03325, + 2.01775, + 2.00205, + 2.04061, + 2.06224, + 2.04991, + 2.13514, + 2.05253, + 2.04615, + 2.01691, + 1.9955, + 2.05995, + 2.10562, + 2.03446, + 1.98969, + 2.05353, + 1.92862, + 2.07712, + 2.02195, + 2.03035, + 2.0617, + 2.04521, + 2.11582, + 2.03336, + 2.1062, + 1.97303, + 2.04044, + 1.97689, + 1.96544, + 2.06958, + 2.07703, + 2.0125, + 2.02929, + 2.04616, + 2.08024, + 1.99276, + 2.03152, + 2.04875, + 2.06501, + 2.04279, + 2.01695, + 2.00081, + 2.01705, + 2.10031, + 2.0991, + 1.99026, + 2.02798, + 2.03765, + 2.04349, + 2.0691, + 1.99352, + 1.96085, + 2.05949, + 1.98782, + 2.00053, + 2.04778, + 2.01161, + 2.0263, + 2.04023, + 2.09427, + 2.0425, + 2.05877, + 2.01403, + 2.02845, + 1.99665, + 2.02719, + 1.98273, + 2.03832, + 2.02678, + 2.05003, + 2.09428, + 1.99382, + 2.01616, + 2.02085, + 2.01399, + 2.05093, + 2.08196, + 2.0974, + 2.00954, + 2.0579, + 2.00367, + 2.04651, + 2.00061, + 1.99142, + 2.09523, + 2.06945, + 1.98428, + 2.05986, + 2.05129, + 1.9787, + 2.04062, + 2.07625, + 2.03406, + 1.98366, + 2.00276, + 2.04209, + 1.99034, + 2.04436, + 2.01854, + 2.07582, + 2.02472, + 2.01564, + 2.04766, + 2.0021, + 2.02958, + 2.06718, + 2.0269, + 2.0562, + 1.98415, + 2.10495, + 2.07558, + 1.97873, + 2.06828, + 2.07391, + 2.04666, + 2.08702, + 2.00299, + 2.03966, + 1.90193, + 2.00991, + 1.96801, + 2.03322, + 2.05742, + 2.08016, + 2.00009, + 2.01803, + 2.05561, + 2.04927, + 2.00996, + 2.07946, + 1.99202, + 2.05029, + 2.05601, + 1.99476, + 2.03286, + 2.08657, + 1.99633, + 2.02739, + 1.98202, + 2.10259, + 1.99573, + 2.00333, + 2.04982, + 2.05528, + 1.99594, + 2.03069, + 2.07108, + 2.0565, + 2.0293, + 2.06936, + 2.05684, + 2.07113, + 2.05184, + 2.05938, + 2.06232, + 2.00901, + 2.0264, + 2.01848, + 2.00885, + 2.04134, + 1.93906, + 2.08677, + 2.02942, + 2.00517, + 2.01085, + 2.00384, + 2.01917, + 2.01199, + 1.99907, + 1.9842, + 1.98772, + 2.05759, + 2.0756, + 2.04736, + 2.04841, + 2.06533, + 2.02209, + 1.95722, + 2.05277, + 2.03147, + 2.01122, + 2.04154, + 1.99118, + 2.02905, + 2.01992, + 2.05153, + 2.00151, + 2.04448, + 2.01624, + 2.03142, + 2.07705, + 1.98829, + 2.05905, + 2.00661, + 2.04719, + 2.04164, + 1.94409, + 2.04687, + 1.99531, + 2.0431, + 1.96737, + 2.08512, + 2.00398, + 2.03257, + 2.04067, + 2.06084, + 2.05831, + 2.05144, + 2.0378, + 1.98551, + 2.00189, + 2.03009, + 1.99709, + 2.02987, + 2.07721, + 2.00797, + 1.98894, + 2.0588, + 1.96312, + 2.03794, + 1.99722, + 2.08, + 2.05966, + 2.00908, + 1.98005, + 1.98886, + 1.99833, + 2.03177, + 1.99676, + 2.06761, + 2.06546, + 1.99675, + 2.00105, + 2.0126, + 2.01483, + 2.03515, + 2.07148, + 2.04988, + 2.02312, + 2.02478, + 2.0675, + 2.00915, + 2.03448, + 2.00931, + 1.96812, + 2.09029, + 2.00158, + 2.02548, + 1.96033, + 2.05469, + 2.08831, + 2.10054, + 2.05097, + 2.06478, + 1.93357, + 1.9862, + 2.03489, + 2.00182, + 1.99074, + 2.05095, + 2.02907, + 1.95065, + 2.04738, + 1.97365, + 2.05899, + 2.01042, + 2.00248, + 1.91584, + 2.02787, + 2.029, + 2.02843, + 1.97224, + 1.98028, + 1.97923, + 2.0349, + 1.97383, + 1.96711, + 2.00871, + 2.04652, + 2.01933, + 2.01334, + 2.02175, + 2.04653, + 2.00607, + 2.12906, + 1.99195, + 2.03293, + 2.07709, + 2.00835, + 1.98402, + 2.02952, + 2.06772, + 2.05982, + 2.05761, + 1.99813, + 2.0301, + 2.01908, + 1.98472, + 2.01914, + 2.08002, + 2.03777, + 2.05484, + 2.04266, + 2.07644, + 2.01995, + 2.00252, + 2.01765, + 2.01819, + 2.01961, + 2.02911, + 1.988, + 2.08838, + 2.0543, + 2.03986, + 2.04175, + 2.11259, + 2.02308, + 2.11121, + 2.00928, + 1.97019, + 2.03228, + 1.99059, + 2.05269, + 2.0406, + 2.0514, + 2.06977, + 2.07301, + 1.98433, + 2.02284, + 2.05447, + 1.9911, + 2.1004, + 2.0019, + 2.04878, + 2.09615, + 2.03017, + 1.96198, + 2.05567, + 2.03783, + 2.0176, + 2.06279, + 2.00846, + 1.9966, + 2.05103, + 1.97235, + 2.03745, + 1.98532, + 1.98366, + 1.99227, + 1.98912, + 1.9981, + 2.00532, + 2.01077, + 2.05767, + 2.02644, + 1.98781, + 2.03154, + 1.96607, + 2.0017, + 2.0502, + 2.05493, + 2.0798, + 2.0474, + 1.98818, + 1.99227, + 2.04269, + 2.03015, + 1.99726, + 2.08021, + 1.95536, + 1.99633, + 2.01104, + 1.9854, + 2.09295, + 2.00914, + 1.98836, + 2.05984, + 2.01752, + 2.01018, + 1.99307, + 2.07742, + 2.0338, + 2.04326, + 2.03325, + 2.06367, + 1.95861, + 2.04643, + 2.04298, + 2.07182, + 1.95904, + 2.06589, + 2.01601, + 2.02384, + 2.05404, + 1.99331, + 2.03091, + 2.03839, + 1.98751, + 1.99061, + 2.06377, + 1.98709, + 1.99511, + 2.02984, + 2.04086, + 1.917, + 2.01041, + 2.01561, + 2.01116, + 2.02548, + 1.97304, + 1.98645, + 2.00927, + 2.01387, + 2.02743, + 1.94947, + 1.97216, + 2.02591, + 2.01813, + 2.02633, + 2.05251, + 1.94656, + 2.02516, + 2.07575, + 2.05024, + 2.07926, + 2.03839, + 2.03793, + 2.03907, + 2.04937, + 2.071, + 2.06587, + 2.03193, + 2.02391, + 2.03961, + 2.02611, + 1.98718, + 2.0064, + 1.95923, + 2.01422, + 2.02635, + 2.01855, + 1.95932, + 1.98137, + 1.9382, + 1.98496, + 2.05682, + 2.00338, + 1.99249, + 2.02971, + 1.98475, + 1.99565, + 2.00011, + 1.98817, + 2.04617, + 1.95292, + 1.96558, + 1.97704, + 1.9639, + 2.00853, + 2.06038, + 1.93902, + 2.03269, + 2.05443, + 2.05108, + 1.97352, + 2.06641, + 1.96112, + 2.08331, + 1.97423, + 2.02683, + 1.97744, + 2.0362, + 2.06564, + 1.99807, + 2.01944, + 2.09912, + 2.08156, + 1.96018, + 2.0293, + 2.0936, + 1.95791, + 2.06562, + 2.04463, + 2.01874, + 1.99582, + 2.05538, + 2.03876, + 1.95537, + 2.0239, + 1.97208, + 2.00811, + 2.05162, + 2.0634, + 1.9526, + 2.06848, + 2.02276, + 1.99694, + 1.99792, + 2.03578, + 2.11844, + 2.09191, + 2.02243, + 1.87811, + 2.02906, + 2.03125, + 2.01584, + 2.05565, + 2.0127, + 2.05311, + 1.99147, + 2.01825, + 1.96421, + 2.00847, + 2.03262, + 2.05404, + 1.99861, + 2.03847, + 2.07007, + 2.08098, + 1.99097, + 1.96965, + 2.01327, + 1.96723, + 2.03507, + 2.01562, + 2.05189, + 2.05747, + 2.03642, + 2.03468, + 2.06061, + 2.09757, + 1.98072, + 2.04695, + 1.94565, + 2.06268, + 2.03412, + 1.93504, + 1.9653, + 2.03721, + 1.93384, + 1.9698, + 2.01241, + 2.05127, + 1.97721, + 2.05221, + 2.07942, + 1.98581, + 2.04671, + 2.03968, + 2.00701, + 1.98215, + 1.96589, + 2.02465, + 2.05796, + 2.03362, + 1.98102, + 2.04755, + 2.01727, + 1.99702, + 1.95521, + 1.97006, + 2.03422, + 2.00421, + 2.12456, + 2.02896, + 1.98881, + 1.98948, + 2.01639, + 1.99763, + 2.06432, + 2.00342, + 2.02628, + 1.94357, + 2.01706, + 2.05078, + 2.05807, + 1.99656, + 1.96201, + 2.00779, + 2.0257, + 2.03237, + 2.0297, + 2.02753, + 1.95626, + 2.0173, + 2.0552, + 2.01339, + 2.01701, + 2.02015, + 2.01077, + 1.98322, + 1.96444, + 2.03022, + 2.02724, + 2.10411, + 2.00826, + 2.02952, + 2.02855, + 2.07096, + 2.06074, + 2.00696, + 2.08547, + 1.97324, + 1.99811, + 1.96896, + 1.99855, + 1.97778, + 2.01804, + 2.0409, + 2.00016, + 2.05343, + 1.98898, + 2.03514, + 2.04517, + 2.00783, + 1.99026, + 1.97843, + 2.01287, + 2.00309, + 1.99703, + 1.94229, + 2.01806, + 2.00115, + 2.00361, + 1.98432, + 2.03043, + 2.08663, + 1.96306, + 2.0179, + 2.08255, + 2.04953, + 2.03675, + 1.99322, + 2.00494, + 2.03521, + 2.07294, + 2.00984, + 2.01965, + 2.06652, + 1.9971, + 1.98603, + 1.96039, + 2.04443, + 1.98842, + 2.03208, + 1.98713, + 2.0276, + 2.06413, + 1.97517, + 1.94964, + 1.98601, + 2.02599, + 1.96895, + 2.03406, + 2.00392, + 1.94878, + 1.93994, + 2.04878, + 2.02049, + 2.07027, + 2.03959, + 2.03564, + 1.96753, + 2.03455, + 2.04722, + 2.07086, + 1.96425, + 1.9974, + 2.08203, + 1.9998, + 2.00913, + 1.99502, + 2.0213, + 2.04663, + 1.9605, + 2.07072, + 1.97065, + 2.02948, + 2.02303, + 2.07083, + 2.00865, + 1.95834, + 2.05494, + 1.95127, + 1.95866, + 2.03531, + 1.95642, + 2.04075, + 2.00111, + 1.95651, + 2.06501, + 2.04002, + 1.95657, + 2.05644, + 2.03245, + 1.99571, + 2.09864, + 2.05246, + 2.00419, + 1.98986, + 1.99285, + 1.99414, + 1.98582, + 2.05419, + 2.03268, + 1.96084, + 1.96931, + 2.03434, + 2.06422, + 2.02297, + 2.0169, + 1.9922, + 2.02366, + 2.01021, + 1.94237, + 2.0596, + 2.02884, + 1.95473, + 1.97729, + 2.01942, + 1.98257, + 2.00121, + 1.97581, + 1.98864, + 2.07926, + 2.04559, + 2.11119, + 2.0064, + 2.01953, + 2.0561, + 2.0152, + 2.00195, + 2.0488, + 2.05433, + 1.94545, + 1.98894, + 2.03514, + 1.96007, + 2.05129, + 2.00728, + 2.03702, + 1.96445, + 2.02548, + 2.12273, + 2.04321, + 2.01468, + 2.02275, + 1.98088, + 1.98887, + 2.02666, + 2.012, + 2.00707, + 1.9987, + 1.97281, + 2.01063, + 2.00517, + 2.04176, + 2.07291, + 2.02487, + 2.02908, + 2.04452, + 1.9954, + 2.02014, + 2.00692, + 1.98732, + 2.01584, + 2.04199, + 1.98595, + 2.02522, + 1.98916, + 1.97619, + 1.97789, + 2.0126, + 1.99261, + 2.01578, + 2.03327, + 2.04221, + 1.98237, + 2.00512, + 1.92235, + 2.04375, + 2.03261, + 2.06578, + 1.99043, + 2.04664, + 1.93456, + 2.0388, + 1.99526, + 1.99115, + 2.03796, + 2.03547, + 1.96898, + 1.97562, + 2.08045, + 2.02621, + 2.01901, + 2.0653, + 1.99854, + 2.05852, + 2.05129, + 2.02701, + 2.01379, + 2.02948, + 2.00735, + 2.04941, + 1.96573, + 2.01903, + 1.96895, + 1.96195, + 1.97505, + 2.02764, + 1.98727, + 1.99096, + 2.00394, + 2.0805, + 2.04087, + 1.96825, + 1.97602, + 1.95703, + 2.03198, + 1.9142, + 2.03639, + 1.94347, + 2.03689, + 2.00989, + 2.03822, + 1.99745, + 2.03986, + 2.01531, + 2.04774, + 2.02886, + 1.94095, + 1.98422, + 2.02463, + 2.00062, + 2.05377, + 2.00139, + 2.02391, + 2.00514, + 1.99956, + 1.99995, + 1.99346, + 1.98958, + 2.06951, + 2.02386, + 2.04238, + 1.98314, + 2.01808, + 1.98751, + 1.98229, + 1.9959, + 2.02373, + 1.94895, + 1.98692, + 2.10199, + 2.06477, + 1.98143, + 2.00136, + 2.05122, + 1.95947, + 2.04105, + 1.98372, + 1.95131, + 2.01702, + 1.9985, + 1.98936, + 2.05077, + 1.98544, + 1.99829, + 1.99232, + 1.99834, + 1.98451, + 2.05129, + 2.05385, + 2.00879, + 2.03047, + 2.05291, + 2.00253, + 1.95412, + 1.99365, + 1.91888, + 2.01307, + 2.02629, + 1.99914, + 1.95803, + 2.01059, + 1.99322, + 2.01757, + 2.01168, + 2.01442, + 2.03676, + 2.0081, + 1.89199, + 1.97492, + 1.94554, + 2.00253, + 2.02376, + 2.01736, + 2.05809, + 1.95855, + 1.99146, + 1.97251, + 2.01931, + 2.0197, + 2.00076, + 2.0824, + 1.96626, + 2.00595, + 2.00556, + 1.99692, + 2.00042, + 1.99194, + 2.02848, + 2.01454, + 1.92868, + 2.0128, + 2.01294, + 2.02245, + 2.00355, + 1.97926, + 1.99438, + 2.04544, + 1.98878, + 2.02317, + 2.05832, + 2.05176, + 1.99093, + 2.00458, + 2.09083, + 2.01218, + 2.01488, + 1.98868, + 2.05206, + 2.02418, + 2.04944, + 2.03538, + 1.98035, + 2.03976, + 1.96904, + 1.98689, + 2.00182, + 2.05096, + 2.04869, + 2.00459, + 2.0297, + 2.00987, + 1.98749, + 2.0019, + 2.02971, + 2.03556, + 1.9856, + 2.06113, + 2.03574, + 1.97064, + 2.08041, + 1.96483, + 1.99301, + 1.98006, + 1.9313, + 2.01808, + 2.0258, + 2.03275, + 2.09576, + 1.98446, + 1.98921, + 1.98268, + 1.97382, + 2.03328, + 2.0298, + 2.01399, + 2.06142, + 2.04923, + 2.01043, + 1.9741, + 2.03857, + 2.0282, + 2.0995, + 2.11682, + 2.07535, + 1.98859, + 1.95763, + 1.9381, + 2.04968, + 1.98562, + 2.08763, + 1.94718, + 1.96977, + 2.02407, + 1.97047, + 2.0147, + 1.96208, + 1.90099, + 2.07603, + 2.02276, + 2.00562, + 2.03233, + 2.12088, + 2.06874, + 1.9812, + 1.95639, + 1.98698, + 2.05529, + 1.983, + 2.11055, + 2.01205, + 2.06332, + 2.04293, + 2.02461, + 2.00586, + 2.06079, + 1.97871, + 1.97443, + 2.02281, + 2.00214, + 2.0261, + 1.98808, + 2.06307, + 1.99366, + 1.98239, + 2.00326, + 1.99525, + 2.01102, + 2.03917, + 1.99459, + 2.03149, + 2.04708, + 1.98997, + 1.99754, + 1.97091, + 2.02839, + 1.98442, + 2.06248, + 2.03474, + 2.03616, + 1.97396, + 2.04268, + 1.99204, + 1.95996, + 2.03771, + 2.00482, + 1.95327, + 1.97945, + 2.00126, + 2.04572, + 1.97116, + 2.04714, + 2.0102, + 1.98112, + 1.92874, + 1.95191, + 2.01692, + 1.96376, + 1.98024, + 2.02489, + 1.99766, + 1.99019, + 1.95507, + 2.03374, + 1.91463, + 1.98136, + 1.96572, + 2.04854, + 2.01462, + 1.98584, + 1.97944, + 1.91392, + 1.93925, + 1.97923, + 1.9981, + 1.97254, + 2.05865, + 2.03985, + 2.02978, + 2.00912, + 2.09103, + 2.04664, + 2.03203, + 2.00625, + 2.02695, + 1.9299, + 2.01462, + 2.04031, + 1.98378, + 1.98164, + 2.01099, + 2.04143, + 2.03486, + 2.0398, + 1.99276, + 2.00627, + 2.03088, + 1.93286, + 1.97995, + 1.98387, + 1.96655, + 2.00029, + 1.96476, + 2.0436, + 2.01933, + 2.03058, + 2.00946, + 2.00662, + 1.98321, + 1.96428, + 2.06089, + 2.02815, + 1.97661, + 1.95311, + 1.99788, + 1.98392, + 2.023, + 1.9883, + 2.0231, + 2.01242, + 1.96769, + 2.03766, + 1.98989, + 1.95733, + 2.06986, + 2.02944, + 1.88962, + 1.98596, + 1.96756, + 2.07344, + 1.99616, + 2.07636, + 1.96153, + 2.01993, + 2.006, + 1.98924, + 1.98594, + 2.08265, + 1.99294, + 2.00128, + 2.01888, + 2.00446, + 2.04186, + 2.03706, + 1.98871, + 2.0367, + 1.98992, + 2.00194, + 1.98956, + 2.01477, + 2.07673, + 1.99776, + 2.00791, + 2.00243, + 2.05245, + 2.00527, + 1.89964, + 2.0233, + 2.02567, + 2.0068, + 1.92181, + 1.97317, + 1.95074, + 2.06205, + 1.96365, + 1.99552, + 2.03024, + 2.08255, + 2.00579, + 1.96697, + 1.95575, + 2.05837, + 2.01277, + 2.00968, + 1.95842, + 2.01428, + 1.98785, + 1.92533, + 2.01882, + 2.06527, + 1.96613, + 2.01629, + 2.0061, + 2.01929, + 2.00902, + 1.97217, + 1.97057, + 2.02872, + 1.9562, + 1.93554, + 2.10084, + 1.99287, + 1.99207, + 2.02983, + 2.00123, + 2.03857, + 2.03137, + 1.98541, + 1.95956, + 2.02009, + 1.93708, + 2.02226, + 2.04299, + 1.95262, + 2.03477, + 1.96713, + 2.04649, + 1.96283, + 2.05235, + 1.95168, + 1.99563, + 1.98333, + 1.9804, + 1.96479, + 2.01103, + 1.95921, + 2.02415, + 2.01369, + 1.99571, + 2.01753, + 2.06413, + 2.01131, + 2.01281, + 1.98365, + 2.04805, + 1.98333, + 2.00521, + 2.03218, + 2.00052, + 2.03325, + 2.03395, + 2.01898, + 2.05167, + 2.01596, + 2.02609, + 1.9922, + 2.03392, + 2.01698, + 1.97777, + 2.00345, + 2.02413, + 1.97269, + 2.01582, + 2.03331, + 1.99219, + 2.00692, + 1.99662, + 1.98049, + 2.00729, + 1.98974, + 2.00085, + 2.02075, + 1.90049, + 2.03939, + 1.9401, + 2.04572, + 1.98253, + 1.95721, + 1.99365, + 2.04621, + 1.9598, + 2.06474, + 1.9597, + 1.99697, + 2.00205, + 2.02449, + 1.9592, + 2.07183, + 2.04893, + 2.00964, + 1.99749, + 1.9637, + 2.02774, + 1.96726, + 1.98985, + 2.02242, + 1.97285, + 2.03987, + 2.00749, + 1.91543, + 2.04369, + 1.94382, + 1.95827, + 1.96691, + 2.00206, + 2.07647, + 2.02042, + 1.98448, + 2.01804, + 1.96448, + 2.03352, + 2.02048, + 1.95061, + 2.03489, + 2.01484, + 2.02283, + 1.95214, + 2.03393, + 2.01868, + 2.03471, + 1.98764, + 2.01705, + 1.95488, + 1.98411, + 2.01061, + 1.97284, + 1.98691, + 2.05997, + 2.00921, + 2.04649, + 1.96603, + 1.98895, + 1.98335, + 2.01348, + 1.95849, + 2.04201, + 2.04699, + 1.98494, + 1.99152, + 2.01163, + 2.03349, + 1.97441, + 1.95745, + 1.94131, + 2.02055, + 2.06058, + 2.03908, + 2.02442, + 2.03803, + 2.00502, + 2.01744, + 2.04546, + 2.07086, + 1.95477, + 2.05745, + 1.97998, + 2.05611, + 1.99976, + 2.04745, + 1.98438, + 2.02153, + 2.01266, + 2.02685, + 1.99237, + 1.95874, + 2.01595, + 2.01275, + 1.99528, + 1.93453, + 2.03881, + 2.042, + 2.0232, + 2.0455, + 1.99861, + 1.99264, + 2.05347, + 1.96142, + 1.97577, + 1.94603, + 2.01496, + 1.93602, + 2.03565, + 1.96889, + 2.01638, + 1.97009, + 1.98204, + 2.00127, + 2.05713, + 2.00223, + 1.97572, + 1.95095, + 1.94675, + 2.03205, + 1.97211, + 1.97383, + 2.02932, + 1.99864, + 1.98542, + 1.93838, + 1.98474, + 2.00468, + 1.90209, + 2.01508, + 2.00664, + 1.9883, + 1.95055, + 2.01114, + 2.06622, + 1.91469, + 2.0693, + 1.99328, + 2.00079, + 1.98355, + 1.9891, + 1.98803, + 1.99355, + 1.97788, + 1.98502, + 1.98553, + 1.94578, + 2.04847, + 1.99754, + 1.99669, + 2.02536, + 1.96085, + 1.9855, + 2.01302, + 2.05116, + 1.99158, + 1.93569, + 1.96444, + 1.98112, + 1.97228, + 2.00323, + 1.97894, + 1.91352, + 2.00361, + 2.04402, + 2.0064, + 2.02979, + 1.98477, + 1.99644, + 2.00115, + 1.95118, + 1.95617, + 1.96624, + 2.05518, + 1.89362, + 2.01568, + 1.9944, + 2.02599, + 2.06907, + 1.93003, + 1.97998, + 1.96448, + 2.02148, + 2.00263, + 1.9826, + 2.00307, + 1.97674, + 2.04795, + 2.01112, + 2.06018, + 1.9703, + 1.97933, + 2.0022, + 1.99355, + 1.98898, + 1.97372, + 2.04092, + 2.01353, + 2.02296, + 1.9766, + 1.9998, + 1.93045, + 2.05486, + 2.03206, + 1.89151, + 1.96828, + 2.03969, + 1.99979, + 2.0169, + 1.97263, + 2.01506, + 1.98855, + 1.97664, + 2.06285, + 1.97189, + 2.02166, + 1.96846, + 1.99084, + 2.01495, + 1.99737, + 1.98845, + 2.04, + 1.89863, + 2.00204, + 2.04437, + 1.9923, + 1.98981, + 1.97009, + 1.9507, + 1.96559, + 1.9867, + 2.05348, + 1.98062, + 2.00027, + 1.95882, + 2.00115, + 1.9907, + 2.00334, + 1.97457, + 2.0031, + 2.00836, + 1.9097, + 1.9315, + 2.00495, + 1.95076, + 1.99167, + 2.02935, + 2.02231, + 1.99844, + 2.06407, + 1.98244, + 1.93732, + 1.94948, + 2.0558, + 2.04316, + 1.99596, + 1.97589, + 1.97237, + 1.99428, + 1.97414, + 2.02602, + 2.01618, + 1.99366, + 1.98207, + 1.98739, + 1.89958, + 1.98187, + 1.98361, + 2.00059, + 2.01874, + 1.96295, + 2.04907, + 2.03307, + 2.03817, + 2.00627, + 1.97757, + 1.99663, + 1.98184, + 1.99729, + 2.00995, + 1.88819, + 1.97794, + 2.00415, + 1.99307, + 2.00314, + 2.02864, + 2.02904, + 1.97873, + 1.97951, + 1.9679, + 1.9739, + 2.02483, + 1.94875, + 1.97001, + 2.02303, + 1.97568, + 2.03039, + 1.972, + 1.96526, + 1.95852, + 1.99328, + 1.96262, + 2.01939, + 2.00978, + 2.03351, + 2.04386, + 2.01462, + 1.98075, + 1.91643, + 1.9798, + 2.00099, + 2.01135, + 2.01561, + 2.00976, + 1.96302, + 1.96523, + 2.03429, + 2.03473, + 1.92108, + 2.03141, + 2.09516, + 2.00677, + 2.03369, + 1.99738, + 1.98227, + 1.9916, + 2.02027, + 2.04128, + 2.05798, + 2.0523, + 1.97825, + 2.07077, + 1.95376, + 2.02397, + 1.98578, + 1.99831, + 1.94968, + 2.01742, + 2.0109, + 1.96485, + 1.95675, + 1.98677, + 2.04235, + 2.04987, + 1.94219, + 2.05676, + 2.02581, + 2.03068, + 1.99321, + 2.01793, + 1.90772, + 2.05076, + 2.04089, + 1.98871, + 1.92802, + 1.97656, + 2.02284, + 1.96275, + 2.05975, + 1.99876, + 2.07755, + 1.93556, + 1.94664, + 2.00254, + 2.03218, + 1.96148, + 1.94981, + 1.95951, + 2.08401, + 2.03398, + 1.98407, + 1.98549, + 1.96512, + 1.98633, + 2.03149, + 2.00493, + 1.98666, + 2.02876, + 2.00091, + 2.0426, + 1.95763, + 1.91548, + 1.91078, + 1.97378, + 2.00277, + 2.02352, + 2.08331, + 2.01085, + 1.95839, + 1.97665, + 2.03236, + 1.99652, + 1.99873, + 2.02419, + 1.96455, + 1.90486, + 2.01951, + 1.99785, + 2.03716, + 1.9734, + 2.04055, + 1.97903, + 1.9381, + 1.97781, + 2.03637, + 1.98255, + 1.98489, + 2.04846, + 1.95674, + 1.95809, + 1.98031, + 1.95848, + 2.01704, + 1.97616, + 1.94339, + 2.04096, + 2.05934, + 1.99289, + 2.0376, + 1.97598, + 2.00435, + 1.96602, + 2.01242, + 1.98324, + 1.97226, + 1.98835, + 1.92274, + 2.01217, + 1.98835, + 2.02167, + 1.98622, + 2.04031, + 2.02588, + 1.98607, + 2.03358, + 2.00742, + 1.94243, + 1.97613, + 1.96072, + 1.99119, + 1.99252, + 2.04808, + 1.98132, + 1.90744, + 1.9521, + 1.98523, + 1.97674, + 1.96921, + 2.0059, + 2.02196, + 2.09653, + 2.02984, + 2.03233, + 2.01399, + 1.97902, + 1.92289, + 2.02088, + 1.98795, + 1.97243, + 2.00055, + 1.99687, + 1.99595, + 1.96015, + 1.93251, + 1.99104, + 1.95964, + 1.98884, + 1.98333, + 2.03268, + 1.91441, + 2.06152, + 1.93455, + 1.96024, + 2.02305, + 2.02251, + 1.97979, + 1.93099, + 2.02761, + 1.93714, + 1.97679, + 2.01065, + 2.09354, + 1.95595, + 1.96252, + 2.04783, + 1.96374, + 1.9913, + 1.98251, + 2.01662, + 1.96123, + 2.02611, + 1.97044, + 2.00854, + 2.0152, + 1.98203, + 2.01076, + 1.99256, + 1.958, + 2.00109, + 2.0034, + 2.02911, + 1.96206, + 1.99128, + 2.01339, + 2.00852, + 2.04354, + 1.93514, + 2.01169, + 2.01617, + 1.89919, + 1.95354, + 1.95736, + 2.02089, + 2.00792, + 2.00597, + 2.0159, + 2.00293, + 1.9962, + 2.0171, + 1.98384, + 1.91738, + 1.98072, + 1.99734, + 2.0799, + 1.94829, + 1.89855, + 2.0291, + 2.01176, + 2.05298, + 2.02792, + 2.05886, + 1.99928, + 2.02507, + 2.05813, + 2.02668, + 1.95257, + 1.95227, + 1.968, + 1.96955, + 1.97169, + 1.94825, + 1.97716, + 1.98542, + 2.00687, + 1.98687, + 2.00347, + 2.03969, + 1.98224, + 1.935, + 1.9709, + 2.0671, + 1.99546, + 2.00251, + 2.01341, + 1.86798, + 1.97899, + 1.9975, + 2.03694, + 1.98567, + 2.00011, + 2.04276, + 1.98067, + 2.02486, + 2.00715, + 2.03001, + 2.00473, + 2.04593, + 2.02199, + 2.00787, + 1.98125, + 2.0041, + 1.96644, + 1.98402, + 2.04687, + 1.98445, + 1.96908, + 1.98546, + 2.05776, + 2.04457, + 1.98404, + 1.98669, + 1.93033, + 1.9852, + 1.94804, + 1.95895, + 1.96825, + 1.98975, + 2.02821, + 2.06057, + 1.99018, + 1.92653, + 2.00515, + 1.99945, + 1.97966, + 1.96691, + 2.00663, + 1.98157, + 2.03215, + 1.96618, + 2.05549, + 1.9983, + 1.97929, + 2.03801, + 1.94459, + 1.92648, + 2.0353, + 1.94629, + 2.02508, + 2.03577, + 1.9909, + 1.99029, + 1.9972, + 2.01723, + 1.98741, + 1.97019, + 2.0116, + 1.97402, + 2.00446, + 1.95901, + 1.94283, + 1.9989, + 2.01434, + 1.95845, + 2.00733, + 1.97276, + 1.97346, + 2.02668, + 2.01142, + 2.00703, + 2.0151, + 1.95583, + 1.94438, + 2.01065, + 1.93958, + 1.94426, + 1.99917, + 2.0056, + 2.03731, + 1.99175, + 2.00864, + 2.04502, + 1.96004, + 1.92537, + 1.9456, + 1.97112, + 1.96476, + 1.98412, + 2.01266, + 1.97465, + 2.03248, + 2.01574, + 1.93379, + 1.96352, + 2.07466, + 1.94021, + 1.92511, + 1.97332, + 2.00491, + 1.94898, + 1.98354, + 1.93344, + 2.0303, + 2.04397, + 2.03331, + 2.02834, + 2.03329, + 2.04104, + 2.02153, + 2.00073, + 1.99066, + 2.01512, + 2.0153, + 1.9408, + 1.98334, + 2.03944, + 2.02187, + 2.0345, + 1.94131, + 2.00797, + 1.98111, + 1.99203, + 2.03004, + 2.03545, + 2.02201, + 2.03476, + 1.97641, + 2.01004, + 1.99534, + 2.02757, + 2.027, + 1.94261, + 2.05076, + 1.92188, + 1.9429, + 2.09663, + 1.90244, + 1.97694, + 1.98409, + 1.95274, + 1.97645, + 1.98941, + 1.95427, + 1.96345, + 1.9693, + 1.99523, + 1.96543, + 2.05512, + 1.97311, + 1.97184, + 2.02727, + 1.96254, + 1.96313, + 1.98338, + 1.96345, + 2.00016, + 1.95226, + 1.96962, + 1.96841, + 2.01774, + 2.01013, + 1.9609, + 1.90046, + 1.9943, + 2.01479, + 1.96584, + 1.94991, + 1.98248, + 1.94358, + 2.02598, + 1.98599, + 1.9788, + 1.964, + 2.00263, + 2.01156, + 1.94345, + 1.93722, + 1.98747, + 2.01206, + 1.99596, + 2.03204, + 1.92939, + 1.97974, + 1.97004, + 2.00422, + 2.00573, + 2.02825, + 2.06348, + 1.9778, + 1.97892, + 1.92993, + 2.00311, + 1.99318, + 2.00283, + 1.89879, + 1.95669, + 2.04127, + 1.99294, + 2.00856, + 1.97424, + 2.05307, + 1.95007, + 1.99605, + 1.97253, + 2.03717, + 2.00418, + 1.99459, + 1.98566, + 1.99275, + 1.98428, + 2.01674, + 2.0169, + 1.99546, + 1.96682, + 1.99448, + 2.01996, + 2.07104, + 2.00004, + 1.92634, + 2.03429, + 2.04954, + 1.97503, + 2.0191, + 1.94803, + 1.9294, + 2.01009, + 1.98563, + 1.97411, + 2.01039, + 1.97171, + 2.01617, + 1.9745, + 1.9717, + 2.0179, + 2.02169, + 1.96091, + 1.93472, + 1.93124, + 2.03503, + 2.00312, + 1.94756, + 1.97263, + 2.0053, + 2.01181, + 1.93185, + 1.99288, + 1.9604, + 2.03188, + 1.98252, + 1.94941, + 1.98199, + 1.98967, + 2.00364, + 2.00329, + 2.03105, + 2.02863, + 2.03405, + 1.95088, + 1.98236, + 2.00378, + 1.97968, + 1.96715, + 2.05643, + 1.99113, + 1.95354, + 2.02381, + 1.98066, + 1.95233, + 1.99064, + 1.99499, + 1.99963, + 1.98265, + 2.03129, + 2.05113, + 1.93927, + 1.94626, + 1.95358, + 2.0079, + 1.98633, + 1.927, + 1.91407, + 2.01291, + 1.9977, + 1.94055, + 1.92996, + 2.05607, + 1.98319, + 1.93848, + 1.97485, + 1.96573, + 1.98183, + 1.98029, + 1.9763, + 1.97673, + 1.95977, + 2.02845, + 2.04553, + 1.93552, + 1.95932, + 1.919, + 2.03002, + 2.03049, + 1.99282, + 2.01993, + 1.98707, + 2.00712, + 1.96717, + 1.96314, + 2.01438, + 2.0253, + 1.97594, + 1.98823, + 1.96277, + 1.96884, + 1.96481, + 2.01356, + 1.90224, + 1.97409, + 1.92016, + 1.99256, + 1.9705, + 2.04418, + 1.94863, + 1.99169, + 1.88822, + 1.98237, + 2.03701, + 2.00487, + 1.97934, + 1.97313, + 1.95245, + 1.94582, + 1.99571, + 1.98369, + 1.99128, + 1.97404, + 1.96798, + 2.03327, + 1.99452, + 1.9317, + 1.97406, + 1.98336, + 2.04028, + 2.04071, + 2.03543, + 1.96285, + 2.03403, + 1.96632, + 1.99084, + 1.97986, + 1.96514, + 1.9726, + 1.94514, + 1.99318, + 1.99782, + 1.99016, + 1.98098, + 2.04205, + 1.97103, + 2.02323, + 1.94867, + 1.99526, + 2.0218, + 1.98826, + 2.01249, + 2.00605, + 1.9782, + 1.92196, + 2.03419, + 1.95081, + 1.92547, + 1.97216, + 1.98277, + 2.04983, + 1.95157, + 1.99612, + 1.94277, + 1.91894, + 1.98716, + 1.96341, + 1.9547, + 1.93626, + 1.95351, + 1.96746, + 2.00362, + 1.96986, + 2.00854, + 2.03535, + 1.98909, + 2.0071, + 1.98053, + 1.89974, + 1.88706, + 1.99948, + 1.9944, + 2.06122, + 2.03833, + 2.00912, + 1.95391, + 1.96251, + 2.02318, + 1.99228, + 1.98454, + 1.96682, + 1.9963, + 1.93436, + 1.94906, + 2.02444, + 2.04053, + 1.98776, + 1.99624, + 1.96611, + 1.96937, + 1.95541, + 1.99131, + 1.93865, + 2.07497, + 2.03941, + 2.05973, + 1.96334, + 1.97828, + 2.00941, + 2.0231, + 1.96689, + 2.03658, + 1.95218, + 2.03254, + 2.05962, + 1.99608, + 1.90958, + 2.06436, + 2.00983, + 1.97181, + 1.96836, + 1.99543, + 2.02426, + 1.96266, + 1.96595, + 1.96847, + 2.03084, + 1.94589, + 2.00036, + 1.9347, + 1.96128, + 1.98817, + 1.99094, + 2.00073, + 1.96516, + 2.00657, + 2.03516, + 1.9641, + 2.01086, + 2.0202, + 1.97758, + 1.96737, + 1.96066, + 1.99637, + 1.99239, + 1.95635, + 1.93077, + 1.98171, + 1.99667, + 1.93671, + 2.00278, + 2.02386, + 1.97179, + 2.00508, + 1.9927, + 1.94199, + 1.97418, + 1.97833, + 1.98674, + 1.98324, + 1.99701, + 1.97478, + 1.96459, + 1.96923, + 2.01838, + 2.00544, + 1.92812, + 1.93194, + 1.95946, + 1.93229, + 1.98554, + 1.94472, + 1.96006, + 2.06347, + 2.03454, + 2.02813, + 1.99065, + 1.88492, + 1.9695, + 2.02826, + 2.03011, + 1.99475, + 2.02767, + 2.09269, + 1.92003, + 1.93642, + 1.97548, + 1.91734, + 1.98807, + 1.94399, + 1.9875, + 2.03989, + 1.9735, + 2.01372, + 1.98959, + 1.9726, + 1.9682, + 2.00462, + 1.964, + 1.9971, + 2.00619, + 1.94498, + 2.01274, + 2.08062, + 2.01585, + 1.99568, + 2.06212, + 1.97864, + 2.02482, + 2.00044, + 1.93452, + 2.01283, + 1.98868, + 2.00252, + 1.94436, + 1.95456, + 1.98729, + 1.93025, + 2.01188, + 1.95522, + 2.00946, + 1.92741, + 2.0293, + 2.01412, + 1.96944, + 1.85562, + 2.03398, + 1.99448, + 1.98626, + 2.01263, + 2.03701, + 2.02779, + 1.9861, + 1.93431, + 2.05202, + 1.91912, + 1.96914, + 1.96211, + 1.9215, + 2.02252, + 1.9535, + 1.98695, + 1.9481, + 1.9923, + 1.98367, + 1.92088, + 2.02521, + 1.99033, + 1.98421, + 1.97445, + 2.03386, + 2.02991, + 2.03236, + 1.97375, + 1.98152, + 1.94662, + 2.00794, + 1.99559, + 1.99689, + 1.98376, + 1.96719, + 1.93885, + 1.93029, + 1.99269, + 1.97823, + 1.97119, + 2.00468, + 2.02014, + 1.96549, + 1.98446, + 1.99627, + 2.0587, + 1.98754, + 1.95387, + 2.00008, + 1.96028, + 1.97904, + 1.91734, + 1.99355, + 1.9515, + 2.00868, + 1.93325, + 1.97367, + 1.9764, + 1.93601, + 1.95077, + 1.99771, + 1.99598, + 1.93073, + 1.95586, + 1.95627, + 2.00006, + 1.98971, + 1.96715, + 2.02188, + 1.97787, + 1.96229, + 1.9209, + 1.94712, + 1.94313, + 1.9795, + 1.95527, + 1.92708, + 1.91806, + 2.0466, + 2.00079, + 2.00519, + 1.966, + 2.03785, + 1.94921, + 1.97676, + 1.9662, + 2.03085, + 1.93562, + 1.9313, + 2.01941, + 2.02013, + 1.93643, + 1.95894, + 1.95778, + 1.94561, + 1.95845, + 2.0194, + 1.94204, + 1.9897, + 1.97353, + 1.9965, + 1.93067, + 1.97084, + 2.00349, + 1.97769, + 1.96569, + 1.91816, + 1.95467, + 1.92357, + 1.95407, + 1.98378, + 2.00928, + 2.02088, + 1.96533, + 1.98272, + 1.96449, + 1.9888, + 1.9876, + 1.89257, + 1.98443, + 1.93691, + 1.98647, + 1.98377, + 1.96244, + 1.91485, + 2.02801, + 1.99371, + 1.98383, + 1.93932, + 2.03993, + 1.95617, + 1.90354, + 1.94911, + 1.98231, + 1.95849, + 2.01279, + 1.98692, + 1.97703, + 2.03021, + 1.97021, + 1.96368, + 2.0056, + 1.96479, + 2.00998, + 2.03106, + 1.93726, + 2.01484, + 1.95845, + 2.03382, + 1.97781, + 1.96391, + 1.91376, + 2.00831, + 2.05082, + 1.93713, + 1.96367, + 1.95695, + 1.94157, + 1.9053, + 1.98043, + 1.96037, + 2.04364, + 1.98088, + 1.93161, + 2.01679, + 1.96765, + 1.91298, + 1.96849, + 2.03841, + 1.95388, + 1.98285, + 1.99397, + 1.94903, + 1.98552, + 2.01108, + 1.90294, + 1.94041, + 2.02583, + 2.03383, + 2.07532, + 1.96256, + 1.95447, + 1.96777, + 1.95356, + 1.95474, + 1.92051, + 1.97469, + 1.99365, + 1.93624, + 1.92425, + 2.00907, + 2.02582, + 1.9966, + 1.95483, + 1.91602, + 2.01729, + 1.94688, + 1.9511, + 1.99284, + 1.97352, + 1.95443, + 1.96131, + 2.01319, + 1.9911, + 1.99706, + 1.96574, + 1.94709, + 1.97128, + 2.01347, + 2.00459, + 2.05158, + 2.00237, + 2.00458, + 1.98558, + 2.00432, + 2.01505, + 1.95335, + 2.0139, + 1.98579, + 1.94451, + 2.01946, + 1.96131, + 1.98425, + 1.96505, + 1.87638, + 2.02833, + 1.98527, + 1.93589, + 1.98291, + 2.00207, + 2.00821, + 1.93842, + 2.01899, + 1.96355, + 1.94923, + 1.97149, + 2.01003, + 2.021, + 1.90265, + 1.94123, + 1.99005, + 1.9667, + 1.98316, + 1.99619, + 1.94322, + 1.98903, + 2.02459, + 2.01778, + 1.93959, + 1.9572, + 2.01687, + 2.03342, + 1.98714, + 1.90974, + 1.96413, + 1.93967, + 2.00428, + 1.99324, + 1.93698, + 2.02305, + 2.01771, + 1.99757, + 1.95202, + 1.93205, + 1.95497, + 1.97572, + 1.94547, + 1.94131, + 1.87771, + 2.05968, + 1.92594, + 1.99585, + 1.97679, + 1.96619, + 1.97151, + 1.93183, + 2.02339, + 1.96641, + 1.95669, + 1.95238, + 1.92394, + 2.01263, + 1.98686, + 1.99557, + 1.95669, + 1.97434, + 1.94185, + 2.00366, + 1.96482, + 2.00482, + 1.97337, + 1.93184, + 1.98171, + 2.00013, + 2.00078, + 1.9926, + 2.01497, + 1.91734, + 2.0471, + 1.99045, + 1.97346, + 2.0546, + 1.95712, + 1.91867, + 1.96107, + 1.96687, + 1.98602, + 2.01906, + 1.9422, + 1.92829, + 1.99356, + 2.00052, + 1.92881, + 2.03842, + 1.97915, + 2.00085, + 1.97143, + 1.96326, + 1.93283, + 1.96998, + 1.97348, + 1.91339, + 2.01583, + 1.97175, + 2.05243, + 2.05453, + 1.99339, + 1.98419, + 2.01361, + 1.93532, + 1.96542, + 1.9782, + 1.96069, + 1.98955, + 1.99741, + 1.99438, + 2.00907, + 1.94164, + 1.91727, + 1.97279, + 2.01746, + 1.99268, + 1.94287, + 2.02791, + 1.92978, + 1.9047, + 1.90564, + 1.99784, + 1.99989, + 2.06317, + 1.98358, + 1.9155, + 1.92227, + 2.00725, + 1.95086, + 1.99643, + 1.98353, + 2.02813, + 1.99828, + 2.07523, + 1.9931, + 1.98494, + 1.96496, + 2.02275, + 2.00813, + 1.92473, + 2.00383, + 1.96417, + 2.01452, + 1.99262, + 1.88807, + 1.90506, + 1.93445, + 1.96481, + 2.03627, + 1.94696, + 1.95402, + 1.9825, + 1.97432, + 1.9798, + 1.93927, + 1.98013, + 1.95889, + 1.95168, + 1.98974, + 1.93711, + 1.98389, + 2.00521, + 2.04882, + 1.96911, + 1.94369, + 2.10105, + 1.97562, + 2.01181, + 2.01213, + 2.02869, + 2.00185, + 1.91835, + 2.00355, + 1.96372, + 1.97117, + 1.98286, + 2.03665, + 1.95927, + 1.9663, + 2.00408, + 2.04361, + 1.9962, + 1.94799, + 1.95962, + 1.94746, + 1.97048, + 1.99226, + 2.01224, + 1.93817, + 1.94561, + 1.99782, + 1.94198, + 1.98114, + 1.93666, + 1.9584, + 1.97029, + 1.96347, + 1.96103, + 2.02238, + 1.98185, + 1.97127, + 2.01246, + 2.00018, + 2.00953, + 2.02532, + 2.03519, + 1.97326, + 1.95495, + 1.98598, + 1.96043, + 2.01431, + 2.00126, + 1.96306, + 1.92119, + 1.98395, + 1.91376, + 1.95375, + 1.92882, + 2.01989, + 2.00988, + 2.00782, + 1.98083, + 1.94331, + 1.95664, + 1.9685, + 1.93775, + 1.97353, + 1.95202, + 1.94563, + 1.94753, + 1.9342, + 1.95383, + 2.00884, + 1.95045, + 2.00743, + 2.02391, + 1.99232, + 1.98303, + 2.01668, + 1.98341, + 2.12, + 1.97469, + 1.95465, + 1.95191, + 1.93757, + 1.93613, + 1.95431, + 1.92264, + 1.94794, + 1.99006, + 1.98009, + 2.04625, + 1.98275, + 1.9321, + 1.98278, + 1.96495, + 1.96174, + 2.01025, + 1.99745, + 1.95494, + 1.92365, + 2.00088, + 1.95428, + 2.0119, + 2.03279, + 1.98256, + 1.98426, + 2.00448, + 1.9587, + 1.94967, + 1.98558, + 1.97571, + 2.0167, + 1.97, + 1.99878, + 1.99161, + 1.97537, + 2.00101, + 1.9866, + 1.94771, + 1.92996, + 1.94673, + 2.00313, + 1.97442, + 1.97999, + 1.96232, + 1.95125, + 1.93083, + 1.9764, + 2.0037, + 1.93986, + 1.95912, + 1.99717, + 1.94977, + 1.97692, + 2.00599, + 1.92449, + 2.01315, + 1.93977, + 1.96668, + 1.96718, + 1.99215, + 1.92846, + 1.9536, + 1.97173, + 1.97247, + 1.9761, + 1.93479, + 1.99013, + 2.02282, + 1.94592, + 2.00971, + 1.9754, + 2.0106, + 2.00716, + 2.02199, + 1.90274, + 1.9667, + 1.96439, + 1.9563, + 2.00954, + 2.01943, + 1.95102, + 2.01505, + 1.97, + 1.9571, + 2.02098, + 1.98598, + 1.93574, + 1.95752, + 1.96123, + 1.97996, + 1.88537, + 1.91621, + 2.00375, + 1.97274, + 1.97126, + 1.9414, + 1.96476, + 1.92179, + 1.99697, + 1.96214, + 2.04319, + 1.92058, + 1.99669, + 1.95231, + 1.99893, + 1.96724, + 2.00434, + 1.96359, + 2.02052, + 1.98201, + 1.98097, + 2.0416, + 1.93833, + 1.94685, + 1.8908, + 1.96725, + 2.00229, + 1.98477, + 1.95004, + 1.97548, + 1.94814, + 1.93435, + 1.98676, + 2.03156, + 1.94819, + 2.03513, + 2.06098, + 1.96503, + 1.94686, + 1.9525, + 1.9792, + 2.0509, + 1.96295, + 1.9403, + 1.94524, + 1.94178, + 1.97712, + 1.88336, + 1.96105, + 1.99633, + 1.98437, + 1.99804, + 1.93821, + 1.99166, + 1.96774, + 1.89773, + 1.92836, + 1.88551, + 1.93865, + 1.93004, + 1.94561, + 1.96234, + 1.95982, + 1.97006, + 2.04929, + 1.98355, + 1.95069, + 1.96282, + 2.02303, + 1.89441, + 1.94946, + 1.96196, + 1.96048, + 1.94227, + 1.9771, + 1.95643, + 1.95222, + 1.96817, + 1.91682, + 1.93093, + 2.00938, + 1.95287, + 1.95115, + 1.99607, + 1.98889, + 2.04047, + 1.9963, + 1.92561, + 1.95427, + 2.00296, + 1.93019, + 1.98702, + 1.97153, + 1.94843, + 2.00609, + 2.00275, + 1.95366, + 1.99981, + 2.0396, + 1.98452, + 1.93443, + 1.93329, + 2.00219, + 1.99894, + 1.97154, + 1.97404, + 1.9506, + 2.03493, + 1.94391, + 1.94493, + 1.9338, + 1.99544, + 2.01323, + 1.90762, + 1.96144, + 2.00523, + 2.02091, + 2.06628, + 1.96535, + 1.94685, + 1.97524, + 1.95928, + 1.95921, + 1.99955, + 1.93487, + 2.02453, + 1.91431, + 2.00856, + 1.94713, + 2.01627, + 2.03416, + 1.94354, + 1.9831, + 1.98563, + 2.01353, + 1.96529, + 1.99574, + 1.94429, + 1.95839, + 1.96998, + 1.9868, + 2.00454, + 1.94127, + 1.95508, + 1.94047, + 1.97924, + 1.98295, + 1.99062, + 1.92712, + 1.93389, + 1.95819, + 1.94414, + 1.8819, + 1.95202, + 1.98718, + 1.99937, + 1.93831, + 1.9618, + 1.92638, + 1.96301, + 1.95276, + 1.94873, + 2.02361, + 1.97588, + 2.01239, + 1.98399, + 2.01884, + 1.96307, + 1.93774, + 1.93475, + 2.0152, + 1.94811, + 1.98276, + 1.98838, + 1.97724, + 1.90091, + 1.87406, + 1.97194, + 1.97741, + 1.95337, + 1.99019, + 1.94909, + 1.92047, + 1.99518, + 1.94543, + 1.97223, + 1.99569, + 1.9499, + 2.02308, + 1.97286, + 1.95651, + 2.0017, + 1.98428, + 1.95679, + 1.98119, + 1.96725, + 2.0006, + 1.96624, + 2.00056, + 1.94665, + 1.97609, + 2.00981, + 1.98482, + 1.90937, + 1.86038, + 1.95381, + 1.97141, + 1.9418, + 1.93867, + 1.96167, + 1.9798, + 1.9777, + 1.94992, + 1.96763, + 1.96742, + 1.97224, + 1.89956, + 1.99476, + 1.91959, + 1.96674, + 2.01863, + 1.95378, + 1.96567, + 1.91762, + 1.97196, + 1.99614, + 1.9843, + 1.93138, + 1.96464, + 1.99066, + 1.99496, + 1.94187, + 2.04153, + 2.00983, + 2.01253, + 1.98862, + 1.98532, + 1.93247, + 1.98124, + 1.98496, + 1.91601, + 2.00015, + 1.95752, + 1.85977, + 1.97536, + 1.91797, + 1.99533, + 1.98154, + 1.99169, + 1.98718, + 1.95177, + 2.00054, + 1.99086, + 1.98527, + 1.98955, + 1.98121, + 1.91877, + 2.03102, + 1.94662, + 1.96952, + 1.97537, + 1.93707, + 1.97287, + 1.98319, + 1.98094, + 1.98584, + 1.94898, + 2.03493, + 1.98483, + 1.95736, + 2.005, + 1.97067, + 1.92753, + 2.0404, + 2.01794, + 1.99445, + 1.96374, + 1.96249, + 1.96126, + 2.01567, + 1.97186, + 1.99377, + 1.96385, + 1.95966, + 1.91722, + 1.94026, + 2.04341, + 1.97561, + 2.03429, + 1.94834, + 1.95979, + 1.96698, + 1.99466, + 2.032, + 1.98647, + 1.97339, + 1.98541, + 1.99343, + 1.9975, + 2.00459, + 1.92977, + 1.94035, + 1.96027, + 1.96117, + 2.02045, + 1.95554, + 2.00729, + 1.97553, + 1.96472, + 1.90474, + 1.96908, + 1.9176, + 1.93222, + 1.97489, + 2.02916, + 1.95856, + 1.96698, + 1.982, + 1.98051, + 1.97411, + 1.94515, + 1.96233, + 1.96947, + 1.95161, + 1.98839, + 1.95187, + 1.95991, + 1.96441, + 2.02842, + 1.97327, + 1.92108, + 1.99463, + 1.97719, + 1.98958, + 2.00001, + 1.95279, + 1.90101, + 2.01805, + 2.01558, + 1.98936, + 1.99803, + 1.9932, + 1.95486, + 1.9493, + 1.93138, + 1.96692, + 1.964, + 1.99579, + 1.92504, + 2.0367, + 1.96875, + 1.9875, + 1.86965, + 1.93676, + 1.95676, + 1.98201, + 1.98704, + 1.90864, + 1.97297, + 1.95319, + 1.9565, + 1.96676, + 2.00463, + 1.88853, + 1.97872, + 1.95847, + 2.03037, + 1.99604, + 1.94762, + 2.01836, + 1.95253, + 1.98769, + 1.93894, + 1.91301, + 2.024, + 1.97574, + 1.98434, + 1.9472, + 1.95914, + 1.94324, + 1.99734, + 1.94083, + 2.02947, + 2.00302, + 1.97415, + 1.91728, + 2.00511, + 1.93039, + 1.94029, + 1.96278, + 2.03847, + 1.99537, + 1.98783, + 1.98972, + 1.99169, + 2.04112, + 1.94444, + 1.92006, + 2.0123, + 1.96727, + 1.92559, + 1.99542, + 1.97775, + 1.99654, + 1.97345, + 1.97704, + 1.96876, + 1.9428, + 1.92134, + 1.97265, + 1.91729, + 1.9865, + 1.99779, + 1.95909, + 1.97465, + 1.98477, + 1.87031, + 1.92061, + 1.98045, + 1.99703, + 1.96988, + 2.00502, + 1.97002, + 2.01651, + 1.94624, + 1.90909, + 1.96184, + 2.03578, + 1.93211, + 2.00002, + 1.93402, + 1.98671, + 2.003, + 1.99881, + 1.93612, + 1.99127, + 1.89462, + 1.97984, + 1.98552, + 1.95373, + 1.9681, + 1.99415, + 2.03394, + 1.94494, + 1.96831, + 1.92203, + 2.05426, + 1.91021, + 1.91504, + 1.95663, + 1.98115, + 1.96429, + 1.95331, + 2.02275, + 1.94924, + 1.95192, + 1.98223, + 2.00738, + 2.01188, + 1.97933, + 2.0228, + 1.93587, + 1.99367, + 1.92953, + 1.92319, + 1.94797, + 1.96581, + 2.02049, + 1.92735, + 1.94909, + 1.94261, + 1.94637, + 1.93461, + 1.92548, + 1.96693, + 1.93239, + 1.93908, + 1.98171, + 1.93323, + 1.92038, + 1.90329, + 1.95412, + 1.96008, + 2.01787, + 1.91014, + 2.00295, + 1.94809, + 1.95648, + 1.916, + 1.94391, + 2.02286, + 1.92035, + 1.96339, + 1.98396, + 2.02977, + 1.94066, + 1.96189, + 1.96589, + 2.04575, + 1.9781, + 1.96108, + 2.01827, + 1.99769, + 1.93543, + 1.92655, + 1.98173, + 1.97946, + 1.98773, + 1.97598, + 1.96225, + 1.98576, + 1.97442, + 2.01132, + 2.00138, + 1.92463, + 1.94441, + 1.95364, + 1.94326, + 1.96604, + 1.91178, + 1.9505, + 1.97324, + 1.96651, + 1.91171, + 1.93661, + 2.05011, + 1.99516, + 1.93651, + 2.01667, + 2.04204, + 1.96781, + 1.9876, + 1.97798, + 1.99398, + 1.99633, + 1.9366, + 1.9785, + 1.97861, + 1.92202, + 1.99333, + 1.95395, + 1.95112, + 1.97162, + 1.96958, + 2.00216, + 1.9494, + 1.99109, + 2.01035, + 1.9599, + 1.9183, + 2.02702, + 1.94259, + 1.98105, + 1.99736, + 1.89613, + 1.99487, + 1.95124, + 2.00971, + 1.90702, + 1.95452, + 1.95907, + 1.96423, + 1.9766, + 1.99772, + 1.91466, + 1.98375, + 1.93421, + 1.92774, + 1.89509, + 1.95344, + 1.91103, + 2.00796, + 1.94012, + 2.0087, + 1.97784, + 1.8906, + 1.98044, + 1.95602, + 1.94264, + 1.95789, + 1.9387, + 1.96224, + 1.91959, + 1.93368, + 1.94242, + 2.02529, + 1.91847, + 1.96567, + 1.97997, + 1.98145, + 2.02076, + 1.94209, + 1.95255, + 2.04639, + 1.93688, + 2.00651, + 2.04311, + 1.8814, + 1.91513, + 1.95666, + 2.01217, + 1.96515, + 1.95301, + 1.96678, + 1.94906, + 1.95899, + 1.94074, + 2.0126, + 1.90498, + 1.9697, + 1.90526, + 1.96683, + 1.86889, + 1.96433, + 1.94823, + 1.93327, + 1.98054, + 1.95148, + 1.96087, + 1.95912, + 1.98236, + 1.98821, + 1.9516, + 1.95619, + 2.02611, + 1.98394, + 1.9687, + 1.9193, + 1.90065, + 1.97227, + 1.91581, + 1.93159, + 1.88678, + 1.96777, + 1.90822, + 2.00605, + 1.93586, + 1.98872, + 1.91784, + 1.87839, + 1.93603, + 1.90498, + 1.97621, + 1.97116, + 2.01805, + 1.88633, + 1.97953, + 1.9475, + 2.00233, + 1.96353, + 1.92185, + 1.92314, + 1.97937, + 1.99847, + 1.92785, + 2.00258, + 1.96824, + 2.00776, + 2.01612, + 2.01992, + 1.95369, + 1.93914, + 1.99563, + 1.94701, + 1.94031, + 1.94528, + 1.96042, + 1.87634, + 1.97201, + 2.00407, + 1.96966, + 1.91841, + 1.93842, + 1.98374, + 1.91854, + 2.01102, + 1.95802, + 1.93791, + 1.97447, + 1.99389, + 1.90215, + 1.97638, + 2.02795, + 1.96526, + 1.95481, + 2.00662, + 1.98545, + 1.98168, + 1.96571, + 1.9191, + 1.90479, + 1.95063, + 1.92533, + 1.98968, + 1.99873, + 1.9886, + 2.01919, + 1.97103, + 1.93394, + 1.93393, + 1.99938, + 1.96804, + 1.94282, + 1.92131, + 1.95508, + 1.99982, + 1.94905, + 1.94513, + 2.00505, + 1.9914, + 1.99667, + 2.00357, + 1.94806, + 1.98821, + 1.91391, + 1.93545, + 1.90382, + 1.91899, + 1.90691, + 2.01546, + 1.92868, + 1.93954, + 1.95306, + 2.01139, + 1.93674, + 1.95268, + 1.91445, + 1.93099, + 1.96695, + 1.90718, + 1.96559, + 1.97965, + 1.99131, + 1.95215, + 1.98165, + 2.02754, + 1.98242, + 1.92454, + 1.90726, + 1.94256, + 1.98416, + 1.94241, + 1.95835, + 1.87194, + 1.915, + 1.94581, + 1.99088, + 1.95054, + 1.91561, + 1.96686, + 1.95393, + 1.8958, + 1.95457, + 1.97515, + 1.98473, + 1.98008, + 1.93856, + 1.95622, + 1.98293, + 1.90832, + 1.98032, + 1.98412, + 1.98345, + 2.00628, + 1.89234, + 1.93124, + 1.9189, + 1.96897, + 1.94453, + 1.97169, + 1.95243, + 1.98738, + 2.00436, + 1.96597, + 1.93939, + 2.0087, + 1.97986, + 1.93111, + 1.9553, + 1.9246, + 1.9193, + 1.96772, + 2.01156, + 1.96661, + 1.94821, + 1.85657, + 1.96243, + 1.94744, + 1.95039, + 2.00261, + 1.95025, + 1.93616, + 1.95649, + 2.01825, + 1.97371, + 1.91711, + 1.99027, + 1.93702, + 1.96006, + 1.92997, + 1.90419, + 1.97515, + 1.96562, + 1.91522, + 1.97064, + 1.94258, + 1.88581, + 1.95952, + 1.91051, + 1.98515, + 1.95377, + 1.98391, + 1.88486, + 1.98573, + 1.97312, + 2.01208, + 1.88471, + 1.96404, + 1.9231, + 1.92921, + 1.96775, + 1.91707, + 1.96622, + 1.98026, + 2.03567, + 2.02726, + 2.00526, + 1.96308, + 2.02671, + 1.92991, + 1.91613, + 1.9628, + 1.91566, + 1.93534, + 1.9043, + 1.93649, + 1.94982, + 1.90693, + 1.98251, + 1.99359, + 1.9303, + 2.00752, + 1.92463, + 1.94404, + 1.98053, + 1.90621, + 1.94625, + 1.96926, + 2.02117, + 1.95299, + 1.91649, + 1.98401, + 1.99524, + 1.9932, + 1.9009, + 1.96296, + 1.9222, + 1.92972, + 1.9293, + 1.97229, + 1.91057, + 1.98626, + 1.92968, + 1.98331, + 1.95597, + 1.93686, + 1.94116, + 2.00345, + 1.92524, + 2.01039, + 1.91759, + 1.93482, + 1.94821, + 1.95177, + 1.95889, + 1.86935, + 1.99405, + 1.87767, + 1.93979, + 1.96832, + 1.9717, + 1.87379, + 1.91173, + 1.97723, + 2.01459, + 1.91751, + 1.96033, + 1.95646, + 1.91157, + 1.90925, + 1.97586, + 1.94403, + 1.92181, + 1.95549, + 1.89846, + 1.99541, + 1.98837, + 1.92926, + 1.94585, + 2.00821, + 1.94127, + 1.96055, + 1.96686, + 1.9688, + 2.00608, + 2.03618, + 1.93263, + 1.93273, + 1.99351, + 1.97609, + 2.00285, + 1.95328, + 1.96078, + 1.96906, + 1.95953, + 1.93688, + 1.8941, + 1.9357, + 2.00772, + 2.0243, + 1.9744, + 1.99251, + 1.99392, + 1.94725, + 1.98753, + 1.87983, + 1.95964, + 1.97048, + 1.96031, + 2.01829, + 1.90627, + 1.94428, + 1.96609, + 1.97196, + 1.96765, + 1.95375, + 1.9182, + 2.01935, + 1.9988, + 1.98149, + 1.98468, + 1.96982, + 1.94275, + 1.96768, + 1.99241, + 1.91496, + 1.92985, + 1.9192, + 1.93568, + 1.86913, + 1.97695, + 1.90388, + 1.973, + 2.00545, + 1.99202, + 1.93116, + 1.91259, + 1.88296, + 1.94968, + 2.02245, + 1.99053, + 1.94634, + 1.92335, + 1.94601, + 1.91957, + 1.96721, + 1.96155, + 1.95578, + 1.99804, + 1.97308, + 1.97192, + 1.93278, + 1.99586, + 1.98785, + 2.00151, + 1.98252, + 1.9526, + 1.96387, + 1.95307, + 1.97407, + 2.00137, + 1.99633, + 1.90089, + 1.93632, + 1.91766, + 1.93775, + 1.99138, + 1.95878, + 1.93611, + 1.9049, + 2.02674, + 1.99672, + 1.99696, + 1.99015, + 1.94259, + 1.97976, + 1.95753, + 1.96631, + 1.93229, + 1.94634, + 1.93236, + 1.94069, + 1.95688, + 1.92525, + 1.95004, + 1.96046, + 1.95285, + 1.94777, + 1.90407, + 1.9985, + 1.95356, + 1.91561, + 1.93103, + 1.95786, + 1.92762, + 1.96006, + 1.99027, + 1.9632, + 1.90566, + 1.98402, + 1.9625, + 1.91858, + 1.99667, + 2.00571, + 1.93598, + 1.94064, + 1.94169, + 1.9421, + 1.99361, + 1.98744, + 1.90862, + 1.94516, + 1.94857, + 1.98219, + 2.0496, + 2.01876, + 1.91018, + 1.96115, + 1.96214, + 1.94622, + 1.97607, + 1.89081, + 1.87321, + 1.98222, + 1.91435, + 1.95511, + 1.92419, + 1.91298, + 1.92271, + 1.88206, + 1.89561, + 1.9085, + 1.89732, + 1.99886, + 1.97409, + 1.9998, + 1.97167, + 1.97365, + 1.96472, + 2.0676, + 1.93329, + 1.91406, + 1.9499, + 1.94553, + 1.95389, + 1.90821, + 1.93315, + 1.98229, + 1.95678, + 1.96025, + 1.96028, + 1.9595, + 1.90981, + 1.89862, + 1.93178, + 1.95338, + 1.95793, + 1.92827, + 1.90126, + 1.98016, + 1.9693, + 1.97726, + 1.98079, + 1.93067, + 1.98612, + 2.02269, + 1.90535, + 1.90302, + 1.92914, + 1.87339, + 1.87628, + 1.97088, + 1.94866, + 1.9588, + 1.95355, + 1.95014, + 1.94164, + 1.9532, + 2.01957, + 1.92538, + 1.92938, + 1.98502, + 1.93127, + 1.96259, + 1.99424, + 1.98457, + 2.03483, + 1.95072, + 1.98271, + 2.01228, + 1.95502, + 2.02969, + 1.91887, + 2.00915, + 1.94795, + 1.98147, + 1.95175, + 1.8734, + 1.97696, + 1.99315, + 1.97147, + 1.95296, + 1.99764, + 1.93381, + 1.98352, + 1.96392, + 1.90621, + 1.97947, + 1.93631, + 1.97624, + 1.90753, + 1.96359, + 1.94559, + 1.91472, + 1.94847, + 1.97066, + 1.90796, + 1.90755, + 1.93825, + 1.97343, + 1.96213, + 1.93989, + 1.93812, + 2.00195, + 1.93497, + 1.94057, + 1.96496, + 1.94509, + 1.89868, + 1.96128, + 1.98457, + 1.95766, + 1.949, + 2.04589, + 1.96209, + 2.01578, + 1.97483, + 1.9516, + 1.95659, + 1.89522, + 1.91391, + 1.90362, + 1.95917, + 1.98161, + 1.953, + 1.94872, + 1.95364, + 1.92907, + 2.01951, + 1.87976, + 1.97935, + 1.9651, + 1.96125, + 1.98016, + 1.95402, + 1.89667, + 1.98883, + 1.92775, + 1.95007, + 2.01185, + 1.98455, + 1.97737, + 1.97814, + 1.94288, + 2.00561, + 1.932, + 1.97354, + 1.93004, + 1.96157, + 1.95592, + 1.96859, + 1.93378, + 1.92694, + 1.93169, + 1.89272, + 1.97236, + 1.98064, + 1.9593, + 1.96467, + 1.96668, + 1.95205, + 1.93102, + 1.90394, + 1.94362, + 1.93583, + 1.9786, + 2.01416, + 1.98787, + 1.99599, + 2.02246, + 1.98891, + 1.94502, + 1.92891, + 1.92293, + 1.98825, + 1.95673, + 1.92819, + 1.99713, + 1.88248, + 1.95218, + 1.88483, + 1.94384, + 1.95257, + 1.8953, + 1.95737, + 1.95864, + 1.94424, + 2.02371, + 1.95469, + 1.98219, + 1.95691, + 1.94304, + 1.90884, + 1.9809, + 1.96286, + 1.91628, + 1.92269, + 1.8572, + 1.92198, + 1.93977, + 1.97591, + 1.94359, + 1.87961, + 1.95293, + 1.94019, + 1.97773, + 1.96765, + 1.88061, + 1.90556, + 1.9363, + 2.00088, + 1.92137, + 1.90157, + 1.97114, + 1.93604, + 1.94127, + 1.92278, + 1.9119, + 1.95194, + 1.95393, + 1.95208, + 1.93649, + 1.90274, + 1.93547, + 1.96397, + 1.94352, + 1.96077, + 1.94851, + 1.914, + 1.90888, + 2.01122, + 1.95399, + 1.99894, + 1.92558, + 1.90957, + 1.95812, + 1.92526, + 1.92883, + 1.88316, + 1.92514, + 2.0001, + 1.927, + 1.98376, + 1.94136, + 1.95811, + 1.97758, + 1.9398, + 1.90329, + 1.92893, + 1.92894, + 1.96436, + 1.95364, + 1.88869, + 1.93606, + 2.03627, + 1.89387, + 1.94449, + 1.95805, + 1.9099, + 1.93298, + 1.94024, + 1.97732, + 1.9576, + 1.92632, + 1.88371, + 1.89318, + 1.89805, + 1.98557, + 1.9073, + 1.96748, + 1.98032, + 1.98804, + 1.96027, + 1.97784, + 1.97296, + 1.9718, + 1.90683, + 1.98335, + 1.90942, + 1.89952, + 1.93024, + 1.91363, + 1.95551, + 1.94315, + 1.95338, + 1.95067, + 1.94898, + 1.89859, + 1.89276, + 2.00752, + 1.93466, + 1.98859, + 1.97517, + 1.95262, + 1.89435, + 1.97489, + 1.94462, + 1.9635, + 1.893, + 1.9907, + 1.94562, + 1.9537, + 1.92536, + 1.96477, + 1.94561, + 1.92761, + 1.9499, + 1.88887, + 1.91358, + 1.97172, + 1.94112, + 1.95163, + 1.87646, + 1.98045, + 1.93228, + 2.01146, + 1.95794, + 1.96645, + 1.93619, + 1.98297, + 1.95949, + 1.93283, + 1.95082, + 1.93744, + 1.98659, + 1.95623, + 1.93405, + 1.88713, + 1.98433, + 1.98834, + 1.90188, + 1.97475, + 1.95593, + 2.0059, + 1.89579, + 1.93779, + 1.94937, + 1.95644, + 2.02585, + 1.92467, + 1.93105, + 1.99799, + 1.91276, + 1.9133, + 2.01103, + 1.88012, + 1.92384, + 1.93269, + 1.93081, + 1.99811, + 1.90881, + 2.02541, + 1.94068, + 1.94711, + 1.93834, + 2.01625, + 1.96654, + 1.93828, + 1.96385, + 1.87368, + 1.98738, + 1.93886, + 1.97097, + 1.9817, + 1.93343, + 1.96904, + 1.93027, + 1.95161, + 1.91139, + 1.97701, + 1.96157, + 1.86792, + 1.94032, + 2.00755, + 2.05782, + 1.94078, + 1.99467, + 1.85038, + 1.98023, + 1.9853, + 2.02216, + 1.94999, + 1.99573, + 1.85987, + 1.99583, + 1.94462, + 1.87309, + 1.92445, + 1.91205, + 1.96243, + 1.9411, + 1.89975, + 1.92444, + 1.88337, + 1.97536, + 1.95531, + 1.9076, + 1.91831, + 1.91788, + 1.93464, + 1.93644, + 1.94484, + 1.94335, + 1.94236, + 1.91167, + 1.93304, + 1.89702, + 1.94596, + 1.95084, + 1.95733, + 1.9049, + 1.97366, + 1.93233, + 1.91747, + 1.88526, + 1.89923, + 1.91342, + 1.96428, + 1.89431, + 1.94503, + 1.95557, + 1.97605, + 1.95739, + 1.96395, + 2.01445, + 1.90651, + 1.99186, + 1.95402, + 1.88206, + 1.96211, + 2.01762, + 1.94751, + 1.92439, + 1.96786, + 2.04932, + 1.93576, + 1.95099, + 1.9637, + 1.93624, + 1.97356, + 1.93049, + 1.95252, + 1.93429, + 2.00149, + 1.92206, + 1.86609, + 1.96464, + 1.94563, + 1.97578, + 1.92335, + 1.91393, + 1.87523, + 2.00937, + 2.02892, + 1.92765, + 1.96052, + 1.93188, + 1.94804, + 1.94131, + 1.98614, + 1.94013, + 1.9377, + 1.93531, + 1.92446, + 1.99008, + 1.99141, + 1.93366, + 1.86488, + 1.90012, + 1.92046, + 1.97078, + 1.97527, + 1.95425, + 1.98595, + 1.9951, + 1.95776, + 2.00521, + 1.88496, + 1.94229, + 1.9364, + 1.92311, + 1.92501, + 1.99301, + 1.97788, + 1.97931, + 1.9526, + 1.90609, + 1.94685, + 1.93193, + 1.96921, + 1.9593, + 1.90525, + 1.97211, + 1.93076, + 1.91661, + 1.97243, + 1.86858, + 1.98929, + 1.96717, + 1.89837, + 1.91703, + 1.92658, + 1.91, + 1.94644, + 1.89451, + 1.95362, + 1.99832, + 1.93987, + 1.95487, + 1.9469, + 1.89179, + 1.9629, + 1.99844, + 1.98007, + 2.00662, + 1.93604, + 1.91614, + 1.97981, + 2.0045, + 1.92924, + 1.91744, + 1.95176, + 1.94886, + 1.95319, + 1.99059, + 1.90717, + 1.94924, + 1.92271, + 1.92331, + 2.01754, + 1.90505, + 1.90854, + 1.96666, + 1.93369, + 1.92738, + 1.92062, + 1.96493, + 1.97554, + 1.90828, + 1.92792, + 1.93648, + 1.88707, + 1.92537, + 1.92721, + 1.91238, + 2.01376, + 1.91439, + 1.96637, + 1.92889, + 1.92195, + 1.91907, + 2.01593, + 1.93592, + 1.94905, + 1.99003, + 1.96197, + 1.96021, + 1.9702, + 1.99491, + 1.92021, + 1.93772, + 1.96716, + 1.9352, + 1.91998, + 1.88934, + 1.92512, + 1.99338, + 1.93728, + 1.949, + 1.9283, + 1.91463, + 1.9475, + 1.97568, + 1.96547, + 1.93983, + 1.93649, + 1.9873, + 1.88795, + 1.93334, + 1.94293, + 2.00343, + 1.98894, + 1.91957, + 1.88014, + 1.97678, + 1.90162, + 1.93596, + 1.99617, + 1.99014, + 1.93497, + 1.96344, + 1.91777, + 1.96309, + 1.92363, + 1.90104, + 1.92677, + 1.9997, + 1.94654, + 1.92444, + 2.01253, + 1.96311, + 1.95971, + 1.94277, + 1.92776, + 1.87647, + 1.92249, + 1.96548, + 1.92133, + 1.93535, + 1.94584, + 1.93531, + 1.91324, + 1.9366, + 1.88221, + 1.88483, + 1.93071, + 2.00023, + 1.94088, + 1.97838, + 1.98492, + 1.93968, + 1.91214, + 1.89872, + 1.96912, + 1.85213, + 1.9297, + 1.93558, + 1.97611, + 1.96551, + 1.90474, + 1.91503, + 1.95007, + 1.96837, + 1.94975, + 1.87677, + 1.9885, + 1.93097, + 1.92723, + 1.97983, + 1.95212, + 1.91381, + 1.98592, + 1.93663, + 1.98856, + 1.95174, + 2.01299, + 1.94571, + 1.94727, + 1.96419, + 1.9201, + 1.93321, + 1.91477, + 1.95637, + 2.02377, + 1.95927, + 1.8771, + 1.87183, + 1.90944, + 1.93754, + 1.98075, + 1.93995, + 1.87665, + 1.93753, + 1.88068, + 1.96816, + 1.9136, + 1.90933, + 2.01274, + 1.88794, + 1.91101, + 1.96665, + 1.93926, + 1.89332, + 1.94242, + 1.96961, + 1.98258, + 1.96354, + 1.92748, + 1.86343, + 1.93653, + 1.87586, + 2.03019, + 1.98314, + 1.9515, + 1.95462, + 2.00723, + 1.92209, + 1.93391, + 1.98734, + 1.9333, + 2.0202, + 1.90935, + 1.95647, + 1.92223, + 1.91674, + 1.93162, + 1.97011, + 1.9947, + 1.90525, + 1.93498, + 1.91135, + 1.94386, + 1.93963, + 1.96744, + 1.93245, + 1.84187, + 1.94812, + 1.92852, + 2.03207, + 1.9635, + 1.89476, + 1.96573, + 1.903, + 1.91526, + 1.9765, + 1.95872, + 1.87991, + 1.90886, + 1.97805, + 1.89535, + 1.95224, + 2.0195, + 1.95127, + 2.00518, + 1.98062, + 1.91637, + 2.02097, + 1.99848, + 1.91051, + 2.02326, + 1.97526, + 1.94271, + 1.94622, + 1.91267, + 1.90826, + 1.93462, + 1.89029, + 1.91615, + 2.01299, + 1.97227, + 1.94929, + 1.98089, + 1.99435, + 1.92795, + 1.9736, + 1.97466, + 1.97275, + 1.91535, + 1.99577, + 1.91189, + 1.95657, + 1.93913, + 1.91695, + 1.99986, + 2.01655, + 1.94452, + 1.88216, + 1.97962, + 1.95274, + 1.91392, + 1.87165, + 1.90779, + 1.94764, + 2.01028, + 1.93804, + 1.96113, + 1.97934, + 1.99488, + 1.90531, + 1.98148, + 1.88815, + 1.94505, + 1.91355, + 1.91978, + 1.90947, + 1.95753, + 1.89437, + 1.93898, + 1.93748, + 1.97043, + 1.9361, + 1.95503, + 1.88965, + 1.97041, + 1.92433, + 1.95668, + 1.90366, + 1.93463, + 1.89196, + 1.96508, + 1.93753, + 1.93789, + 1.93092, + 2.0146, + 1.96468, + 1.96714, + 2.00045, + 1.9461, + 1.96375, + 1.90741, + 1.9439, + 1.89652, + 1.92833, + 1.90919, + 1.94386, + 1.99179, + 1.94412, + 1.914, + 1.95382, + 1.98721, + 1.92139, + 1.97717, + 1.94134, + 1.91244, + 1.974, + 1.88372, + 1.90006, + 1.95555, + 1.92947, + 1.87255, + 1.90677, + 1.97652, + 1.87355, + 1.89553, + 1.94453, + 1.8659, + 1.9831, + 1.96646, + 1.88421, + 1.94225, + 1.92048, + 1.908, + 1.93687, + 1.92356, + 1.99273, + 1.94377, + 1.9456, + 1.96818, + 1.94391, + 1.99896, + 1.91805, + 1.95657, + 1.93507, + 1.96283, + 1.96149, + 1.94757, + 1.93362, + 1.89808, + 1.9368, + 1.9565, + 1.90642, + 1.91944, + 1.98033, + 1.93402, + 1.95258, + 1.89539, + 1.99945, + 1.98927, + 1.91466, + 1.98027, + 1.88732, + 1.97984, + 1.96499, + 1.89582, + 1.95803, + 1.91477, + 1.96466, + 1.93703, + 1.94311, + 1.97689, + 2.01124, + 1.91667, + 1.94846, + 1.93329, + 1.97468, + 1.94056, + 1.90207, + 1.94662, + 1.9824, + 1.91634, + 1.93589, + 1.95682, + 1.9002, + 1.98457, + 1.96449, + 1.95437, + 1.90606, + 1.93912, + 1.9281, + 1.96403, + 1.92464, + 1.95756, + 1.97512, + 1.91297, + 1.95538, + 1.98789, + 1.95769, + 1.93455, + 1.96164, + 1.93992, + 1.94864, + 1.94232, + 1.94742, + 1.9185, + 1.89294, + 1.92365, + 1.92313, + 1.95503, + 1.9592, + 1.96855, + 1.93349, + 1.95687, + 1.90604, + 1.95352, + 1.98154, + 2.006, + 1.93091, + 1.90366, + 1.92345, + 1.94657, + 1.93484, + 1.94064, + 1.91682, + 1.97535, + 1.95001, + 1.92684, + 1.88777, + 1.92836, + 1.88914, + 1.90737, + 1.89046, + 1.94276, + 1.88489, + 1.95976, + 2.03497, + 1.95263, + 2.00356, + 1.87281, + 1.90231, + 1.92985, + 1.99002, + 1.96141, + 1.93041, + 1.94028, + 1.99391, + 1.94861, + 1.87762, + 1.94614, + 1.8911, + 1.9352, + 1.90566, + 1.95925, + 1.98351, + 1.91002, + 1.9134, + 1.9592, + 1.93115, + 1.92933, + 1.93691, + 1.92782, + 1.95569, + 1.94108, + 1.9698, + 1.98585, + 1.99849, + 1.96921, + 2.00012, + 1.95076, + 1.903, + 2.00482, + 1.93828, + 1.95012, + 1.93521, + 2.00781, + 1.93175, + 1.98927, + 1.92282, + 1.96321, + 1.95517, + 1.96789, + 1.90995, + 1.97649, + 1.93643, + 1.9482, + 1.92981, + 1.97309, + 1.96037, + 1.95105, + 1.875, + 1.95388, + 1.96275, + 1.96213, + 1.91965, + 1.95116, + 1.9491, + 1.91898, + 1.94353, + 1.91322, + 1.94672, + 1.93114, + 1.89621, + 1.89538, + 1.94372, + 1.97922, + 1.90549, + 1.93432, + 1.87826, + 1.93538, + 1.98038, + 1.89026, + 1.99009, + 1.96232, + 1.96852, + 1.97355, + 1.93561, + 1.87636, + 1.95926, + 1.93666, + 1.93869, + 1.96662, + 1.93526, + 1.86318, + 1.91281, + 1.8983, + 1.90035, + 1.90477, + 1.89812, + 1.91537, + 1.91641, + 1.88822, + 1.90328, + 1.90625, + 1.92143, + 1.91721, + 1.95535, + 1.94313, + 1.92128, + 1.97228, + 1.90396, + 2.00064, + 1.9666, + 1.89527, + 1.91201, + 1.98934, + 1.92286, + 1.89175, + 1.99004, + 1.95911, + 1.99489, + 1.92849, + 1.894, + 1.90351, + 1.93141, + 1.95655, + 1.93733, + 1.918, + 2.06592, + 1.89668, + 1.94321, + 1.95438, + 1.94602, + 1.8543, + 1.92957, + 1.98072, + 1.91772, + 1.99615, + 1.91156, + 1.93968, + 1.9189, + 1.92116, + 1.99652, + 2.01539, + 1.87257, + 1.91207, + 2.0026, + 1.92746, + 1.91068, + 1.94758, + 1.92309, + 1.89727, + 1.98905, + 1.92093, + 1.96566, + 1.94626, + 1.93312, + 1.84898, + 1.90351, + 1.91148, + 1.99148, + 2.02208, + 1.93461, + 1.96637, + 1.97948, + 1.89491, + 1.89591, + 2.01071, + 1.88199, + 1.97355, + 1.96392, + 1.94901, + 1.92355, + 1.89521, + 1.92308, + 1.9357, + 1.9034, + 1.95113, + 1.93566, + 1.88386, + 1.90119, + 1.97003, + 2.02876, + 1.96282, + 1.8879, + 1.92494, + 1.95831, + 1.93525, + 1.97474, + 1.96895, + 1.97316, + 1.96702, + 1.93252, + 1.96162, + 1.97605, + 1.91578, + 2.00732, + 1.9362, + 1.95494, + 2.01949, + 1.90673, + 1.91131, + 1.90915, + 1.94754, + 1.92437, + 1.98394, + 1.93066, + 1.89939, + 1.94373, + 1.93231, + 1.96178, + 1.99999, + 1.94704, + 1.89324, + 1.92364, + 1.90946, + 1.93757, + 1.97212, + 1.91481, + 1.96543, + 1.93616, + 1.90184, + 1.95422, + 1.98921, + 1.96063, + 1.9407, + 1.97704, + 1.94855, + 1.90648, + 1.97604, + 1.89047, + 1.90418, + 1.95983, + 1.90942, + 1.8923, + 1.94085, + 1.92592, + 1.9906, + 2.0043, + 1.98122, + 1.91388, + 1.94631, + 1.93839, + 1.92997, + 2.0134, + 1.95169, + 1.86152, + 1.88413, + 1.90576, + 1.97617, + 1.8754, + 1.93057, + 1.97556, + 1.99244, + 1.99539, + 1.8998, + 1.97838, + 1.95793, + 1.94167, + 1.92323, + 1.96734, + 1.91275, + 1.9688, + 1.95592, + 1.96255, + 1.99572, + 1.9273, + 1.95406, + 1.95181, + 1.96869, + 1.91512, + 1.97945, + 1.94075, + 1.9357, + 1.97978, + 1.975, + 1.95323, + 1.90534, + 1.96648, + 1.9596, + 1.89919, + 1.90911, + 1.96491, + 1.93626, + 1.99923, + 1.92231, + 1.86787, + 1.91517, + 1.91178, + 1.95093, + 2.01344, + 1.91336, + 1.89831, + 1.94353, + 1.90163, + 1.99674, + 1.9911, + 1.9633, + 1.88333, + 1.9181, + 1.94942, + 1.90974, + 1.91119, + 1.91887, + 1.95308, + 1.95797, + 2.05375, + 1.95602, + 1.95142, + 1.95603, + 1.94501, + 1.92126, + 1.93308, + 1.96531, + 1.96945, + 1.93295, + 1.87308, + 1.93856, + 1.97541, + 1.91394, + 1.97091, + 1.99224, + 1.89254, + 1.93019, + 1.92248, + 1.92214, + 1.96309, + 1.90371, + 1.88871, + 1.98354, + 1.94417, + 1.92577, + 1.92228, + 1.88461, + 1.95145, + 1.91099, + 1.92067, + 1.92681, + 1.87553, + 1.8937, + 1.90617, + 1.96364, + 1.97131, + 1.96759, + 1.89627, + 1.96717, + 1.92025, + 1.90727, + 1.93488, + 1.94802, + 1.92526, + 1.96558, + 1.8977, + 1.95853, + 1.93084, + 1.96424, + 1.92764, + 1.88569, + 1.93369, + 1.95445, + 1.94756, + 1.96442, + 1.90859, + 1.92706, + 1.89127, + 1.94097, + 1.93615, + 1.95091, + 1.85966, + 1.94662, + 1.90816, + 1.94305, + 1.94922, + 1.84486, + 1.92356, + 1.93053, + 1.9244, + 1.99663, + 1.97552, + 1.87689, + 1.98795, + 1.87203, + 1.98532, + 1.90226, + 1.97809, + 1.96325, + 1.86965, + 1.94078, + 1.88585, + 1.98079, + 1.89603, + 1.94079, + 1.92063, + 1.96473, + 1.90133, + 1.95843, + 1.84688, + 1.91185, + 1.92476, + 1.88449, + 1.9335, + 1.96336, + 1.85507, + 1.94197, + 1.97346, + 1.9303, + 1.97317, + 2.01781, + 1.97283, + 1.91372, + 1.98612, + 1.90053, + 1.94736, + 1.90981, + 1.96763, + 1.92138, + 1.97403, + 1.9228, + 1.99265, + 1.97898, + 1.82964, + 1.91524, + 1.8658, + 1.93141, + 1.99034, + 1.9504, + 1.95404, + 1.8932, + 2.00271, + 1.91233, + 1.9073, + 1.98407, + 1.9334, + 1.91375, + 1.9574, + 1.95489, + 1.83593, + 1.91688, + 1.9323, + 1.88206, + 1.99888, + 1.97283, + 1.98046, + 1.90552, + 1.95073, + 1.93053, + 1.95528, + 1.90145, + 1.98146, + 1.95205, + 1.91032, + 1.92978, + 1.94742, + 1.95511, + 2.00529, + 2.0051, + 1.94546, + 1.96988, + 1.88514, + 1.92366, + 1.97013, + 1.91784, + 1.95106, + 1.92766, + 1.85697, + 1.96149, + 1.98434, + 1.93621, + 1.9797, + 1.92138, + 1.99607, + 1.96114, + 1.91071, + 1.88029, + 1.94787, + 1.96312, + 1.8933, + 1.93141, + 1.8684, + 1.95842, + 1.89094, + 1.94317, + 1.99095, + 1.95654, + 1.91818, + 1.9345, + 1.99936, + 1.93212, + 1.93381, + 1.93389, + 1.92694, + 1.8728, + 1.88146, + 1.91489, + 1.92196, + 2.0176, + 1.9651, + 1.99691, + 1.89961, + 1.90708, + 2.01109, + 1.93873, + 1.89756, + 1.98576, + 1.85228, + 1.98173, + 1.87245, + 1.91109, + 1.85639, + 1.87661, + 1.95947, + 1.90492, + 1.94597, + 1.95236, + 1.95739, + 1.95027, + 1.94813, + 2.01647, + 1.91149, + 1.91519, + 1.99035, + 1.91517, + 1.93913, + 1.8745, + 1.99158, + 1.95916, + 1.89326, + 1.91891, + 1.85962, + 1.91381, + 1.94621, + 1.91113, + 1.91608, + 1.96515, + 1.92494, + 1.89849, + 2.00669, + 1.9265, + 1.88348, + 1.9634, + 1.97313, + 1.92317, + 1.91308, + 1.9305, + 1.97287, + 1.92902, + 1.90105, + 1.88669, + 1.90178, + 1.97685, + 1.92986, + 1.93228, + 1.91391, + 1.93709, + 1.92177, + 2.02657, + 1.90782, + 1.95636, + 1.90856, + 1.96929, + 1.91203, + 1.89572, + 1.89256, + 1.98135, + 1.894, + 1.9742, + 1.97269, + 1.98494, + 1.93019, + 1.99579, + 1.9121, + 1.85378, + 1.93302, + 1.91763, + 1.95084, + 1.96371, + 1.85813, + 1.92462, + 1.94547, + 1.89458, + 1.94993, + 1.9351, + 1.97645, + 1.91391, + 1.95188, + 1.94693, + 1.89944, + 1.86975, + 1.89799, + 1.97224, + 1.90237, + 1.88304, + 1.94193, + 1.88748, + 1.89714, + 1.93253, + 1.93449, + 1.94736, + 1.92341, + 1.93072, + 1.96139, + 1.90908, + 1.98775, + 1.91061, + 1.87959, + 1.94657, + 1.9198, + 1.95079, + 1.95697, + 1.92562, + 1.8758, + 1.85324, + 1.95047, + 1.94453, + 1.96974, + 1.93145, + 1.94151, + 1.93702, + 1.92659, + 2.0076, + 1.96606, + 1.92364, + 1.97808, + 1.90009, + 1.98887, + 1.91816, + 1.97041, + 1.90765, + 1.91508, + 1.94429, + 1.96974, + 1.94512, + 1.91053, + 1.91712, + 1.90694, + 1.94986, + 1.95189, + 1.97155, + 1.97552, + 1.97235, + 1.88492, + 1.90277, + 1.93998, + 1.92123, + 1.9002, + 1.89712, + 1.88712, + 1.91605, + 1.98995, + 1.95071, + 1.8788, + 1.9465, + 1.95157, + 1.90013, + 1.94089, + 1.99479, + 1.88615, + 1.90067, + 1.90335, + 1.9231, + 1.91675, + 2.00293, + 1.90564, + 1.95141, + 1.95477, + 1.9472, + 1.92578, + 1.93688, + 1.92193, + 1.93941, + 1.95141, + 1.87374, + 1.95621, + 1.92474, + 2.01996, + 1.99032, + 1.93441, + 1.87026, + 1.90181, + 1.95079, + 1.99378, + 1.91364, + 1.94357, + 1.93555, + 1.87093, + 1.91576, + 1.96486, + 1.9203, + 1.91243, + 1.89862, + 1.9381, + 1.92578, + 1.95138, + 1.91525, + 1.91543, + 1.94057, + 1.93247, + 1.90494, + 1.90845, + 1.92802, + 1.91202, + 1.97704, + 2.00656, + 1.89936, + 1.93632, + 1.96991, + 1.93717, + 1.92877, + 1.928, + 1.90681, + 1.93182, + 1.93997, + 1.96944, + 1.92458, + 1.92341, + 1.9171, + 1.91209, + 1.93336, + 1.96265, + 1.93291, + 1.9396, + 1.89681, + 1.93092, + 1.95367, + 1.93605, + 1.89851, + 1.92295, + 1.91328, + 1.96616, + 1.97962, + 1.94314, + 1.91185, + 1.84906, + 1.97953, + 1.97281, + 1.94936, + 1.91396, + 1.96046, + 1.95028, + 1.90689, + 1.85132, + 1.891, + 1.89664, + 1.93376, + 1.89855, + 1.88083, + 1.92486, + 1.87875, + 1.98045, + 1.93819, + 1.88975, + 1.95794, + 1.88334, + 2.03729, + 1.9212, + 1.99457, + 1.92115, + 1.93022, + 1.94117, + 1.90339, + 1.9471, + 1.9164, + 1.87681, + 1.95712, + 1.93437, + 1.88979, + 2.00388, + 1.96095, + 1.94428, + 2.00144, + 1.88269, + 1.94257, + 1.96826, + 1.9547, + 1.93804, + 1.90893, + 1.91983, + 1.90715, + 1.88256, + 1.96337, + 1.9019, + 1.9183, + 1.92926, + 1.94839, + 1.89927, + 1.97932, + 1.94042, + 1.94826, + 1.95331, + 1.93501, + 1.91075, + 1.87079, + 1.89842, + 1.98023, + 1.95434, + 1.89101, + 1.94485, + 1.95729, + 1.94659, + 1.98922, + 1.89305, + 1.93768, + 2.03823, + 1.9002, + 1.90058, + 1.98997, + 1.95036, + 1.8939, + 1.88367, + 1.96966, + 1.92294, + 1.92133, + 1.957, + 1.91447, + 1.94721, + 1.94339, + 1.95887, + 1.97828, + 2.03433, + 1.99138, + 1.95766, + 1.92421, + 1.94308, + 1.90936, + 1.91372, + 1.94925, + 1.9278, + 1.94809, + 1.86981, + 1.92335, + 1.95342, + 1.99177, + 1.89166, + 1.93616, + 1.92392, + 1.88805, + 1.92043, + 1.98909, + 1.90649, + 1.93995, + 1.9326, + 1.93108, + 1.86819, + 1.89785, + 1.94857, + 1.88327, + 1.92083, + 1.89099, + 1.89509, + 1.93953, + 1.96214, + 1.95004, + 1.94404, + 1.9473, + 1.92725, + 1.97665, + 1.90874, + 1.92251, + 1.94479, + 1.9278, + 1.97109, + 2.0131, + 1.90357, + 1.93168, + 1.89182, + 1.94354, + 1.86664, + 1.92117, + 1.90175, + 1.90004, + 1.94033, + 1.98472, + 1.92857, + 1.93344, + 1.93294, + 1.9457, + 1.91618, + 1.92507, + 1.86762, + 1.85383, + 1.98204, + 1.96305, + 1.96269, + 1.95449, + 1.88368, + 1.94525, + 1.86543, + 1.84214, + 1.98001, + 1.93765, + 1.92506, + 1.93818, + 1.95248, + 1.93261, + 1.95372, + 1.94564, + 1.9586, + 1.89915, + 1.86833, + 1.95888, + 1.93043, + 1.97799, + 1.89341, + 1.96774, + 1.91207, + 1.89564, + 1.89088, + 2.00955, + 1.9295, + 1.88259, + 1.8801, + 1.93134, + 1.91732, + 1.93266, + 1.93361, + 1.96068, + 1.89466, + 1.89746, + 1.90371, + 1.87505, + 1.96021, + 1.9255, + 1.92749, + 1.95017, + 1.89188, + 1.95392, + 1.93579, + 1.93057, + 1.93619, + 1.90095, + 1.91312, + 1.88474, + 1.92934, + 1.94037, + 1.93436, + 1.96237, + 1.91746, + 1.92026, + 1.89822, + 1.91521, + 1.88677, + 1.8965, + 1.92748, + 1.89479, + 1.89301, + 1.91363, + 1.94357, + 1.99708, + 1.93147, + 2.01746, + 1.93409, + 1.97243, + 1.93466, + 1.88234, + 1.94529, + 1.92877, + 1.87116, + 1.90629, + 1.90843, + 1.86878, + 1.92002, + 1.94538, + 1.92179, + 1.93251, + 1.89491, + 1.94915, + 1.8983, + 1.92034, + 1.93567, + 1.91998, + 1.94853, + 1.90672, + 1.94697, + 1.9406, + 1.91341, + 1.96702, + 1.98351, + 2.01633, + 1.94063, + 1.89402, + 1.98813, + 2.00803, + 1.91278, + 1.97932, + 1.86827, + 1.87298, + 1.90921, + 1.94044, + 1.9663, + 1.98207, + 1.88709, + 1.89548, + 1.90925, + 1.92744, + 1.89719, + 1.90329, + 1.85791, + 1.91167, + 1.88561, + 1.90941, + 1.99058, + 1.94634, + 1.87024, + 1.91587, + 1.91515, + 1.9732, + 1.99627, + 1.89963, + 1.90712, + 1.93562, + 1.87924, + 1.95523, + 1.90203, + 1.93655, + 1.92854, + 1.92726, + 1.95616, + 1.89989, + 1.92624, + 1.92378, + 1.95413, + 1.90168, + 1.92917, + 1.89649, + 1.88507, + 1.9386, + 1.83354, + 1.91551, + 1.96603, + 1.87212, + 1.9828, + 1.841, + 1.94963, + 1.9909, + 1.83439, + 1.9418, + 1.9503, + 1.90072, + 1.96187, + 1.95112, + 1.9421, + 1.93126, + 1.82235, + 1.98274, + 1.96009, + 1.9205, + 1.9323, + 1.95942, + 1.9048, + 1.90134, + 1.8658, + 1.90087, + 1.94376, + 1.93135, + 1.95171, + 1.91493, + 1.90017, + 1.89356, + 1.95393, + 1.93403, + 1.95129, + 1.93375, + 1.93496, + 1.93606, + 1.93275, + 1.92236, + 1.91851, + 1.9482, + 1.901, + 1.9373, + 1.85615, + 1.89029, + 1.89467, + 1.9089, + 1.80752, + 1.88027, + 1.95811, + 1.88734, + 1.87741, + 1.91846, + 1.90337, + 1.95246, + 1.88781, + 1.90954, + 1.95024, + 1.97128, + 1.94518, + 1.91873, + 1.99291, + 1.96599, + 1.92888, + 1.92781, + 1.941, + 1.9037, + 1.96209, + 1.90777, + 1.88407, + 1.96551, + 1.94542, + 1.95148, + 1.92638, + 1.95206, + 1.94091, + 1.93494, + 1.95649, + 1.89838, + 1.9023, + 1.94065, + 1.90243, + 1.97203, + 1.90213, + 1.83122, + 1.93074, + 1.94478, + 1.97367, + 1.99763, + 1.94857, + 1.85538, + 1.95467, + 1.96614, + 1.92499, + 1.90551, + 1.8828, + 1.95785, + 1.88483, + 1.91047, + 1.89883, + 1.89651, + 1.9031, + 1.92835, + 1.90385, + 1.9669, + 1.94811, + 1.91052, + 1.88865, + 1.91011, + 1.94018, + 1.90242, + 1.95544, + 1.91599, + 1.90356, + 1.89646, + 1.92658, + 1.91497, + 1.92842, + 1.90354, + 1.88746, + 1.93965, + 1.89824, + 1.9514, + 1.8846, + 1.85878, + 1.88692, + 1.98268, + 1.88362, + 1.91181, + 1.92974, + 1.90405, + 1.91173, + 1.91951, + 1.87387, + 1.89523, + 1.93829, + 1.9334, + 1.88928, + 1.90371, + 1.928, + 1.95065, + 1.90311, + 1.93618, + 1.92009, + 1.95145, + 1.97647, + 1.93184, + 1.9533, + 1.92028, + 1.91895, + 1.91679, + 1.90866, + 1.82013, + 1.88896, + 1.87111, + 1.82042, + 1.94783, + 1.91639, + 1.94217, + 1.91184, + 1.91743, + 1.96614, + 1.98506, + 1.92023, + 1.99022, + 1.94412, + 1.86952, + 1.9391, + 1.96387, + 1.92632, + 1.90393, + 1.94497, + 1.93814, + 1.92468, + 1.94645, + 1.90292, + 1.96926, + 1.91462, + 1.95781, + 1.92797, + 1.86734, + 1.94308, + 1.90269, + 1.91714, + 1.98561, + 1.94516, + 1.93131, + 1.91614, + 1.93417, + 1.92749, + 1.92042, + 1.82974, + 1.90638, + 1.89558, + 1.99201, + 1.87831, + 1.90629, + 1.87786, + 1.88168, + 1.96509, + 1.83434, + 1.94533, + 1.97436, + 1.90878, + 1.92358, + 2.03989, + 1.92306, + 1.94574, + 1.89335, + 1.94099, + 1.92511, + 1.92386, + 1.88337, + 1.88767, + 1.89724, + 1.87642, + 1.94097, + 1.86382, + 1.94869, + 1.89886, + 1.96416, + 1.93165, + 1.92141, + 1.8695, + 1.91, + 1.94779, + 1.95512, + 1.89899, + 1.91408, + 1.89279, + 1.96907, + 1.96637, + 1.90919, + 1.93851, + 1.93995, + 1.85046, + 1.88659, + 1.95704, + 1.94303, + 1.92861, + 1.94433, + 1.87922, + 1.91254, + 1.91706, + 1.87679, + 1.86158, + 1.97964, + 1.90476, + 1.95219, + 1.99553, + 1.94777, + 1.9136, + 1.89675, + 2.02064, + 1.91305, + 1.80009, + 1.94087, + 1.90029, + 1.97344, + 1.90139, + 1.98023, + 1.95106, + 1.92306, + 2.00754, + 1.93753, + 1.98253, + 1.8953, + 1.92405, + 1.93237, + 1.94267, + 1.88574, + 1.91298, + 1.98481, + 1.91388, + 1.93915, + 1.93301, + 1.92767, + 1.89124, + 1.98884, + 1.98743, + 1.93264, + 1.95109, + 1.89008, + 1.93312, + 1.94136, + 1.93448, + 1.97003, + 1.96267, + 1.86429, + 1.86806, + 1.97285, + 1.93429, + 1.9503, + 1.93223, + 1.94269, + 1.90346, + 1.92027, + 1.98587, + 1.8905, + 1.91779, + 1.90321, + 1.94587, + 1.92735, + 1.90286, + 1.89654, + 1.90572, + 1.90434, + 1.92275, + 1.96465, + 1.89785, + 1.91235, + 1.9283, + 1.93107, + 1.96544, + 1.89627, + 1.97201, + 1.88465, + 1.85036, + 1.88088, + 1.94032, + 1.90919, + 1.92871, + 1.96534, + 1.87743, + 1.98491, + 1.86956, + 1.92453, + 1.88809, + 1.9006, + 1.94708, + 1.93059, + 1.96719, + 1.88414, + 1.91479, + 1.9072, + 1.91835, + 1.89228, + 1.87372, + 1.93908, + 1.92241, + 1.9382, + 1.99628, + 1.83721, + 1.89382, + 1.9229, + 1.90513, + 1.92572, + 1.94147, + 1.99897, + 1.95264, + 1.92509, + 1.92951, + 1.88776, + 1.97743, + 1.976, + 1.95043, + 1.88058, + 1.9175, + 1.88012, + 1.93412, + 1.93562, + 1.95345, + 1.96817, + 1.89767, + 1.95352, + 1.91565, + 1.94449, + 1.95429, + 1.91576, + 1.95433, + 1.93055, + 1.94794, + 1.89391, + 1.93615, + 1.93105, + 1.97406, + 1.9146, + 1.90364, + 1.9173, + 1.93608, + 1.93909, + 1.93227, + 1.97275, + 1.89151, + 1.955, + 1.88676, + 1.88398, + 1.90984, + 1.96293, + 1.89665, + 1.92023, + 1.90597, + 1.96421, + 1.83987, + 1.90699, + 1.89077, + 1.9066, + 1.93624, + 1.94365, + 1.85519, + 1.87682, + 1.87541, + 1.95949, + 1.94008, + 1.89712, + 1.87619, + 1.86937, + 1.95877, + 1.91471, + 1.93952, + 1.90927, + 1.9694, + 1.86038, + 1.97667, + 1.92677, + 1.91572, + 1.93326, + 1.93627, + 1.90675, + 1.94161, + 1.88927, + 1.9205, + 1.9266, + 1.95163, + 1.94173, + 1.95148, + 1.90677, + 1.90823, + 1.93295, + 1.88235, + 1.97318, + 1.92545, + 1.95889, + 2.02819, + 1.9968, + 1.91761, + 1.96572, + 1.93775, + 1.90934, + 1.93105, + 1.90129, + 1.90305, + 1.9445, + 1.95634, + 1.90573, + 1.89767, + 1.90335, + 1.94311, + 1.93132, + 1.92399, + 1.89202, + 1.97969, + 1.90993, + 1.82068, + 1.98303, + 1.97078, + 1.84476, + 1.91222, + 1.96836, + 1.9401, + 1.99719, + 1.96299, + 1.87151, + 1.96045, + 1.9734, + 2.00387, + 1.97065, + 1.9517, + 1.8715, + 1.94841, + 1.92404, + 1.9141, + 1.93419, + 1.88106, + 1.94231, + 1.92597, + 1.89628, + 1.88056, + 1.93939, + 1.87049, + 1.89581, + 1.84846, + 2.01049, + 1.88432, + 1.95819, + 1.95419, + 1.99557, + 1.98864, + 1.90152, + 1.9057, + 1.90546, + 1.92243, + 1.91772, + 1.89925, + 1.90592, + 1.94576, + 1.91816, + 1.96072, + 1.94377, + 1.88582, + 1.91774, + 1.92517, + 1.90864, + 1.96374, + 1.91323, + 1.90556, + 1.93685, + 1.90614, + 1.91029, + 2.0254, + 1.91353, + 1.83083, + 1.91759, + 1.92438, + 1.9801, + 1.92524, + 1.96863, + 1.87682, + 1.92308, + 1.88299, + 1.9158, + 1.83865, + 1.90922, + 1.91258, + 1.95401, + 1.92945, + 1.92789, + 1.90044, + 1.89629, + 1.92802, + 1.89947, + 1.94174, + 1.85641, + 1.98217, + 1.91864, + 1.9616, + 1.95019, + 1.90628, + 1.91301, + 1.93331, + 1.90436, + 1.89387, + 1.94393, + 1.98699, + 1.85996, + 1.91958, + 1.88149, + 1.95801, + 1.85613, + 1.90623, + 1.87876, + 1.94767, + 1.96351, + 1.94779, + 1.93208, + 1.86909, + 1.88812, + 1.90223, + 1.90754, + 1.90454, + 1.90598, + 1.92436, + 1.95191, + 1.96255, + 1.92846, + 1.91378, + 1.89129, + 1.86858, + 1.83996, + 1.93626, + 1.92607, + 1.93479, + 1.9039, + 1.90641, + 1.96081, + 1.88789, + 1.8548, + 1.87547, + 1.90889, + 1.98396, + 1.85486, + 1.91756, + 1.90111, + 1.92005, + 1.88201, + 1.92666, + 1.86944, + 1.86724, + 1.95319, + 1.89914, + 1.93976, + 1.91426, + 1.93552, + 2.00713, + 1.92827, + 1.93423, + 1.84749, + 1.94963, + 1.94501, + 1.9104, + 1.91973, + 1.85337, + 1.90889, + 1.8707, + 1.91429, + 1.90343, + 1.84598, + 1.90526, + 1.89095, + 1.83412, + 1.89617, + 1.90181, + 1.97153, + 1.93579, + 1.94061, + 1.86137, + 1.95447, + 1.99761, + 1.85934, + 1.91523, + 1.93557, + 1.99958, + 1.95443, + 1.90138, + 1.90683, + 1.86319, + 1.86754, + 1.95339, + 1.99761, + 1.94861, + 1.90535, + 1.9182, + 1.89745, + 1.97264, + 1.96077, + 1.8868, + 1.88885, + 1.92178, + 1.93217, + 1.89323, + 1.90882, + 1.91578, + 1.95125, + 1.89341, + 1.93991, + 1.90315, + 1.94857, + 1.8622, + 1.91969, + 1.93377, + 1.93673, + 1.95238, + 1.90151, + 1.92495, + 1.94783, + 1.85339, + 1.97773, + 1.91755, + 1.93809, + 1.89925, + 1.84476, + 1.87337, + 1.87181, + 1.92659, + 1.93462, + 1.92029, + 1.91292, + 1.94186, + 1.90252, + 1.81919, + 1.90986, + 1.93502, + 1.86957, + 1.88505, + 1.92777, + 1.948, + 1.92198, + 1.97078, + 1.94205, + 1.87305, + 1.88505, + 1.8589, + 1.91265, + 1.90656, + 1.88914, + 1.93699, + 1.88655, + 1.96529, + 1.8761, + 1.86992, + 1.92747, + 1.9751, + 1.98622, + 1.91359, + 1.88929, + 1.94068, + 1.81871, + 1.90393, + 1.91165, + 1.94748, + 1.93084, + 1.94526, + 1.89406, + 1.8824, + 1.9062, + 1.92762, + 1.9497, + 1.9306, + 1.9589, + 1.9359, + 1.89096, + 1.88498, + 1.93576, + 1.93231, + 1.92441, + 1.89613, + 1.90214, + 1.90439, + 1.97123, + 1.93374, + 1.89022, + 1.90001, + 1.91272, + 1.93272, + 1.92404, + 1.85881, + 1.94067, + 1.92159, + 1.91583, + 1.86731, + 1.91677, + 1.98315, + 1.91193, + 1.87902, + 1.92793, + 1.91164, + 1.91652, + 1.95318, + 1.88711, + 1.94685, + 1.87212, + 1.90851, + 1.94687, + 1.93567, + 1.97129, + 1.95667, + 1.90704, + 1.96276, + 1.87802, + 1.94489, + 1.9039, + 1.96104, + 1.93642, + 1.89151, + 1.88871, + 1.95774, + 1.93056, + 1.93682, + 1.9083, + 1.93534, + 1.98085, + 1.96111, + 1.85569, + 1.94889, + 1.95587, + 1.90195, + 1.915, + 1.96066, + 1.88146, + 1.97086, + 1.86486, + 1.8985, + 1.9085, + 1.89878, + 1.95942, + 1.96562, + 1.91221, + 1.9092, + 1.88652, + 1.92158, + 1.94048, + 1.93796, + 1.92643, + 1.85953, + 1.9183, + 1.93001, + 1.98451, + 1.91898, + 1.95028, + 1.95311, + 1.94721, + 1.88326, + 1.95348, + 1.93807, + 1.87572, + 1.94912, + 1.91065, + 1.93433, + 1.98243, + 1.86413, + 1.92531, + 1.92826, + 1.978, + 1.9487, + 1.89589, + 1.84685, + 1.93624, + 1.92262, + 1.93201, + 1.96473, + 1.98637, + 1.88871, + 1.89058, + 1.92831, + 1.93523, + 1.88779, + 1.92556, + 1.99757, + 1.91183, + 1.9853, + 1.94168, + 1.89053, + 1.91543, + 1.90491, + 1.98293, + 1.93557, + 1.90037, + 1.9436, + 1.92631, + 1.81038, + 1.94534, + 1.88524, + 1.90349, + 1.91605, + 1.90754, + 1.9236, + 1.93614, + 1.94948, + 1.93355, + 1.94986, + 1.95426, + 1.92526, + 1.97424, + 1.92613, + 1.96668, + 1.91653, + 1.97163, + 1.96485, + 1.91595, + 1.94231, + 1.92101, + 1.91657, + 1.87641, + 1.90554, + 1.92248, + 1.92945, + 1.96735, + 1.91283, + 1.94713, + 1.87912, + 1.95001, + 1.90563, + 1.98847, + 1.88236, + 1.92784, + 1.93252, + 1.92005, + 1.93973, + 1.86425, + 1.8514, + 1.92832, + 1.88543, + 1.9358, + 1.92336, + 1.88702, + 1.82142, + 1.90662, + 1.88931, + 1.93282, + 1.89019, + 1.88316, + 1.91902, + 1.95134, + 1.94319, + 1.91982, + 1.94131, + 1.87583, + 1.94846, + 1.93097, + 1.94543, + 1.8536, + 1.87662, + 1.94207, + 1.91342, + 1.94546, + 1.87634, + 1.92166, + 1.85897, + 1.82884, + 1.9593, + 1.9641, + 1.90061, + 1.90405, + 1.97221, + 1.83594, + 1.98778, + 1.88017, + 1.90155, + 1.90856, + 1.89585, + 1.90914, + 1.97795, + 1.91585, + 1.94498, + 1.90108, + 1.84538, + 1.93017, + 1.93581, + 1.91264, + 1.91429, + 1.94952, + 1.94106, + 1.95029, + 1.89125, + 1.94328, + 1.93361, + 1.86939, + 1.96494, + 1.90735, + 1.9212, + 1.97439, + 1.97347, + 1.94139, + 1.94746, + 1.93516, + 1.84338, + 1.95018, + 1.99782, + 1.92026, + 1.92854, + 1.95255, + 1.89613, + 1.93882, + 1.93453, + 1.98261, + 2.02049, + 1.88942, + 1.923, + 1.92665, + 1.95453, + 1.89221, + 1.95892, + 1.91435, + 1.9362, + 1.97908, + 1.92447, + 1.89364, + 1.86999, + 1.94464, + 1.96632, + 1.94083, + 1.8537, + 1.89416, + 2.0029, + 1.84889, + 1.94234, + 1.98936, + 1.8771, + 1.95278, + 1.93761, + 1.8573, + 1.91054, + 1.84765, + 1.95621, + 1.83888, + 1.86302, + 1.94138, + 1.93171, + 1.89087, + 1.91, + 1.88917, + 1.89981, + 1.90445, + 1.89645, + 1.90776, + 1.87894, + 1.94529, + 1.8606, + 1.94202, + 1.9418, + 1.9343, + 1.92812, + 1.93082, + 1.88138, + 1.96359, + 1.92591, + 1.90575, + 1.96048, + 1.85506, + 1.88279, + 1.95842, + 1.92874, + 1.8865, + 1.93879, + 1.89811, + 1.9385, + 1.94514, + 1.87891, + 1.91613, + 1.95585, + 1.89282, + 1.94966, + 1.97594, + 1.96846, + 1.87198, + 1.86709, + 1.82777, + 1.91836, + 1.94214, + 1.92153, + 1.87493, + 1.85685, + 1.88129, + 1.99427, + 1.87287, + 1.92532, + 1.92704, + 1.96969, + 1.93876, + 1.92551, + 1.8888, + 1.92515, + 1.94386, + 1.90357, + 1.9278, + 1.92956, + 1.89503, + 1.8714, + 1.89102, + 1.9132, + 1.93782, + 1.93668, + 1.87965, + 1.86944, + 1.95088, + 1.96413, + 1.91793, + 1.91312, + 1.91736, + 1.88803, + 1.96676, + 1.88643, + 1.91421, + 1.89281, + 1.89071, + 1.94956, + 1.88727, + 1.88991, + 1.94454, + 1.93285, + 1.93214, + 1.92247, + 1.81764, + 1.91856, + 1.92249, + 1.85175, + 1.90399, + 1.88896, + 1.89468, + 1.82241, + 1.8988, + 1.89394, + 1.92889, + 1.90881, + 1.86807, + 1.9418, + 1.8649, + 1.90602, + 1.87121, + 1.90921, + 1.9679, + 1.92221, + 1.91462, + 1.92235, + 1.97157, + 1.95764, + 1.91667, + 1.93295, + 1.89008, + 1.8893, + 1.96022, + 1.85937, + 1.90086, + 1.93088, + 1.88524, + 1.87212, + 1.86629, + 1.92055, + 1.96114, + 1.93551, + 1.85796, + 1.9556, + 1.95127, + 1.94179, + 1.93043, + 1.91846, + 1.98531, + 1.89084, + 1.93306, + 1.94695, + 1.90639, + 1.8969, + 1.88359, + 1.97213, + 1.90512, + 1.87663, + 1.89002, + 1.86999, + 1.90648, + 1.92699, + 1.89338, + 1.88947, + 1.97413, + 1.93204, + 1.92249, + 1.91288, + 1.88437, + 1.89161, + 1.86754, + 1.89254, + 1.91047, + 1.90126, + 1.85587, + 1.9509, + 1.94498, + 1.92925, + 1.93233, + 1.92973, + 1.9512, + 1.90803, + 1.87993, + 1.85393, + 1.90327, + 1.93877, + 1.89326, + 1.91159, + 1.93161, + 1.95061, + 1.92195, + 1.97568, + 1.88993, + 1.89828, + 1.85996, + 1.91697, + 1.90879, + 1.83324, + 1.95449, + 1.9689, + 1.9155, + 1.84016, + 1.86721, + 1.79147, + 1.87974, + 1.94363, + 1.98853, + 1.92054, + 1.92772, + 1.87183, + 1.94988, + 1.94968, + 1.89512, + 1.95872, + 1.86821, + 1.85364, + 1.94803, + 1.89038, + 1.94107, + 1.84185, + 1.8594, + 1.96749, + 1.88824, + 1.90037, + 1.95317, + 1.91184, + 1.93369, + 1.89585, + 1.96196, + 1.96523, + 1.87488, + 1.93907, + 1.93786, + 1.91049, + 2.00867, + 1.93451, + 1.88408, + 1.86725, + 1.8915, + 1.89194, + 1.91198, + 1.92819, + 1.90521, + 1.87293, + 1.94436, + 1.89141, + 1.91207, + 1.93088, + 1.9009, + 1.97551, + 1.89865, + 1.90232, + 1.87169, + 1.9353, + 1.93459, + 1.87844, + 1.93532, + 1.94951, + 1.87139, + 1.83868, + 1.91593, + 1.90148, + 1.92494, + 1.89296, + 1.89462, + 1.8584, + 1.95049, + 1.86487, + 1.92426, + 1.93875, + 1.89198, + 1.90463, + 1.88866, + 1.96898, + 1.91797, + 1.95272, + 1.96082, + 1.91281, + 1.92643, + 1.92419, + 1.87007, + 1.89544, + 1.94805, + 1.84939, + 1.91176, + 1.85722, + 1.96981, + 1.9299, + 1.88535, + 1.89919, + 1.8869, + 1.95847, + 1.9501, + 1.85081, + 1.92908, + 1.92457, + 1.88456, + 1.87512, + 1.90691, + 1.88777, + 1.92923, + 1.9827, + 1.92265, + 1.94924, + 1.91246, + 1.95389, + 1.93171, + 1.90951, + 1.94819, + 1.89016, + 1.90467, + 1.90228, + 1.85986, + 1.93523, + 1.92172, + 1.89695, + 1.92785, + 1.94854, + 1.84389, + 1.94144, + 1.94048, + 1.85197, + 1.98446, + 1.90687, + 1.96096, + 1.83349, + 1.87997, + 1.87136, + 1.87351, + 1.82067, + 1.96834, + 1.97547, + 1.92412, + 1.90922, + 1.95478, + 1.92194, + 1.92639, + 1.91129, + 1.86798, + 1.88427, + 1.89213, + 1.85861, + 1.92222, + 1.90903, + 1.89439, + 1.93018, + 1.8888, + 1.95262, + 1.9377, + 1.93677, + 1.90286, + 1.94078, + 1.84312, + 1.8817, + 1.88877, + 1.9523, + 1.88364, + 1.97502, + 1.94516, + 1.86082, + 1.98664, + 1.94234, + 1.84198, + 1.91281, + 1.97107, + 1.89681, + 1.86954, + 1.87805, + 1.87422, + 2.00645, + 1.91878, + 1.92243, + 1.83154, + 1.87011, + 1.92654, + 1.90705, + 1.96852, + 1.88474, + 1.90012, + 1.92024, + 1.94105, + 1.93482, + 1.87481, + 1.87886, + 1.95903, + 1.94193, + 1.9475, + 1.92588, + 1.91743, + 1.88132, + 1.88784, + 1.87593, + 1.95391, + 1.92341, + 1.81218, + 1.92909, + 1.89429, + 1.90132, + 1.9699, + 1.86859, + 1.92271, + 1.88409, + 1.85159, + 1.93433, + 1.93513, + 1.9601, + 1.95186, + 1.90971, + 1.92572, + 1.93555, + 1.89075, + 1.91385, + 1.94841, + 1.91123, + 1.89936, + 1.90901, + 1.92289, + 1.92424, + 1.88441, + 1.88779, + 1.91002, + 1.91114, + 1.93361, + 1.95551, + 1.95006, + 1.89988, + 1.96804, + 1.95558, + 1.92827, + 1.88672, + 1.92559, + 1.89571, + 1.88174, + 1.91804, + 1.86285, + 1.91011, + 1.92086, + 1.91331, + 1.88731, + 1.93874, + 1.95702, + 1.86976, + 1.91414, + 1.89549, + 1.94012, + 1.9609, + 1.94449, + 1.88616, + 1.90619, + 1.90171, + 1.95495, + 1.88415, + 1.95539, + 1.94533, + 1.91146, + 1.90992, + 1.907, + 1.85545, + 1.95283, + 1.94047, + 1.95706, + 1.94957, + 1.85915, + 1.8745, + 1.97033, + 1.99545, + 1.88829, + 1.94409, + 1.91418, + 1.86465, + 1.94016, + 1.90693, + 1.87203, + 1.89988, + 1.95208, + 1.92028, + 1.91307, + 2.01021, + 1.9271, + 1.8987, + 1.94369, + 1.88138, + 1.86686, + 1.97555, + 1.94943, + 1.92598, + 1.93391, + 1.86151, + 1.91509, + 1.99467, + 1.88326, + 1.88726, + 1.88975, + 1.86546, + 1.86123, + 1.92961, + 1.95244, + 1.95612, + 1.84435, + 1.86686, + 1.89544, + 1.94486, + 1.93069, + 1.92311, + 1.93712, + 1.93309, + 1.8859, + 1.9022, + 1.84949, + 1.90923, + 1.87092, + 1.88934, + 1.83164, + 1.95605, + 1.88705, + 1.92983, + 1.94384, + 1.85565, + 1.96172, + 1.85169, + 1.92676, + 1.87128, + 1.92088, + 1.91364, + 1.91247, + 1.94429, + 1.93462, + 1.96755, + 1.89588, + 1.94141, + 1.96903, + 1.89872, + 1.93896, + 2.00121, + 1.86917, + 1.90139, + 1.91865, + 1.93595, + 1.86648, + 1.87268, + 1.88051, + 1.89009, + 1.85794, + 1.90544, + 1.88405, + 1.91429, + 1.90028, + 1.89066, + 1.94216, + 1.98899, + 1.92389, + 1.82488, + 1.84803, + 1.98334, + 1.90673, + 1.94713, + 1.9192, + 1.92624, + 1.91717, + 1.91817, + 1.94882, + 1.90997, + 1.94473, + 1.93276, + 1.89714, + 1.93114, + 1.89048, + 1.93178, + 1.91891, + 1.94125, + 1.87324, + 1.87242, + 1.90996, + 1.91507, + 1.93386, + 1.93872, + 1.9041, + 1.88523, + 1.96495, + 1.9513, + 1.8948, + 1.87202, + 1.89115, + 1.94977, + 2.01341, + 1.90988, + 1.99898, + 1.909, + 1.93826, + 1.94539, + 1.93217, + 1.86049, + 1.87217, + 1.89878, + 1.89198, + 1.94106, + 1.94684, + 1.9271, + 1.95768, + 1.9989, + 1.86892, + 1.90808, + 1.89044, + 1.89065, + 1.98894, + 1.91314, + 1.89747, + 1.89802, + 1.94524, + 1.91024, + 1.9598, + 1.936, + 1.94862, + 1.93858, + 1.93679, + 1.90085, + 1.88925, + 1.91091, + 1.88977, + 1.8797, + 1.88541, + 1.87475, + 1.87681, + 1.88708, + 1.92756, + 2.00702, + 1.9545, + 1.91741, + 1.87069, + 1.85443, + 1.92229, + 1.92842, + 1.80193, + 1.86518, + 1.89555, + 1.91374, + 1.94372, + 1.90606, + 1.88833, + 1.90511, + 1.83957, + 1.91194, + 1.95785, + 1.88155, + 1.89665, + 1.89393, + 1.86371, + 1.86706, + 1.96444, + 1.86699, + 1.89033, + 1.89523, + 1.97265, + 1.90867, + 1.91646, + 1.90571, + 1.96069, + 1.95405, + 1.90078, + 1.90857, + 1.91398, + 1.91386, + 1.93509, + 1.88581, + 1.89403, + 1.89226, + 1.85995, + 1.86663, + 1.88968, + 1.96037, + 1.98757, + 1.91499, + 1.87869, + 1.92596, + 1.91781, + 1.89947, + 1.90601, + 1.90036, + 1.90024, + 1.90474, + 1.89433, + 1.90777, + 1.94925, + 1.94041, + 1.89188, + 1.83982, + 1.93134, + 1.84717, + 1.93441, + 1.94629, + 1.9071, + 1.9211, + 1.93776, + 1.93955, + 1.91847, + 1.79408, + 1.99092, + 1.90469, + 1.86877, + 1.9637, + 1.96642, + 1.95072, + 1.95473, + 1.90777, + 1.88362, + 1.93889, + 1.90448, + 1.89116, + 1.9184, + 1.98457, + 1.93922, + 1.8291, + 1.90257, + 1.93626, + 1.96857, + 1.86036, + 1.92042, + 1.90912, + 1.94348, + 1.9657, + 1.96312, + 1.92467, + 1.90862, + 1.89561, + 1.8834, + 1.92688, + 1.89745, + 1.90251, + 1.95188, + 1.84629, + 1.87373, + 1.91895, + 1.91026, + 1.91554, + 1.92764, + 1.93096, + 1.92018, + 1.87516, + 1.86704, + 1.89069, + 1.90745, + 1.89173, + 1.87129, + 1.87234, + 1.93767, + 1.91211, + 2.02745, + 1.95784, + 1.91843, + 1.96069, + 1.91247, + 1.8916, + 1.88483, + 1.91833, + 1.91503, + 1.8709, + 1.93441, + 1.84627, + 1.89737, + 1.92913, + 1.93305, + 1.91726, + 1.92321, + 1.82371, + 1.86448, + 1.88605, + 1.90859, + 1.86578, + 1.90981, + 1.87837, + 1.90053, + 1.94463, + 1.88724, + 1.97309, + 1.96308, + 1.90104, + 1.95781, + 1.91869, + 1.87905, + 1.87807, + 1.90662, + 1.88738, + 1.91886, + 1.94197, + 1.91169, + 1.86747, + 1.9388, + 1.90926, + 1.92888, + 1.93188, + 1.84332, + 1.93333, + 1.84837, + 1.95958, + 1.95456, + 1.90826, + 1.92018, + 1.94273, + 1.95068, + 1.88269, + 1.90586, + 1.95305, + 1.9392, + 1.903, + 1.94829, + 1.91927, + 1.98141, + 1.85118, + 1.92681, + 1.94982, + 1.93264, + 1.89614, + 1.95254, + 1.87918, + 1.94932, + 1.92734, + 1.88766, + 1.90773, + 1.90834, + 1.91493, + 1.90093, + 1.88408, + 1.89604, + 1.93622, + 1.89698, + 1.86012, + 1.90165, + 1.95251, + 1.87085, + 1.86935, + 1.90496, + 1.91094, + 1.92247, + 1.9682, + 1.87208, + 1.96818, + 1.92362, + 1.89818, + 1.95388, + 1.88612, + 1.96245, + 1.88919, + 1.90593, + 1.92343, + 1.92473, + 1.93183, + 1.8816, + 1.90611, + 1.94958, + 1.92784, + 1.90084, + 1.9342, + 1.94704, + 1.88567, + 1.93058, + 1.94168, + 1.85923, + 1.86745, + 1.91224, + 1.87596, + 1.91232, + 1.85541, + 1.89238, + 1.86553, + 1.92008, + 1.9717, + 1.8919, + 1.90528, + 1.92503, + 1.94822, + 1.82775, + 1.87351, + 1.87301, + 1.89434, + 1.91861, + 1.95537, + 1.99002, + 1.94804, + 1.88884, + 1.92329, + 1.93849, + 1.95217, + 1.83058, + 1.97018, + 1.90426, + 1.94702, + 1.92879, + 1.89519, + 1.86178, + 1.95132, + 1.91848, + 1.92129, + 1.89435, + 1.8866, + 1.95164, + 1.95711, + 1.8963, + 1.91726, + 1.90109, + 1.85152, + 1.94412, + 1.90523, + 1.93546, + 1.88843, + 1.88712, + 1.8666, + 1.94606, + 1.93585, + 1.92239, + 1.89381, + 1.89814, + 1.85074, + 1.81513, + 1.95627, + 1.89675, + 1.92499, + 1.91972, + 1.92959, + 1.91764, + 1.87262, + 1.94673, + 1.85866, + 1.95893, + 1.89169, + 1.90053, + 1.9027, + 1.91496, + 1.91936, + 1.91936, + 1.84974, + 1.96991, + 1.89198, + 1.897, + 1.93511, + 1.85072, + 1.87805, + 1.90793, + 1.92024, + 1.93477, + 1.90126, + 1.91332, + 1.86085, + 1.89997, + 1.95678, + 1.9112, + 1.95388, + 1.93932, + 1.90213, + 1.88809, + 1.90328, + 1.93446, + 1.92292, + 1.85193, + 1.8979, + 1.89242, + 1.9464, + 1.95242, + 1.90669, + 1.92154, + 1.94324, + 1.9411, + 1.94989, + 1.94142, + 1.86209, + 1.92119, + 1.88105, + 1.89427, + 1.86823, + 1.96413, + 1.85534, + 1.95653, + 1.82501, + 1.89821, + 1.94377, + 1.89335, + 1.90368, + 1.92903, + 1.9084, + 1.98078, + 1.93277, + 1.82945, + 1.94855, + 1.84181, + 1.93801, + 1.91062, + 1.90053, + 1.90337, + 1.95322, + 1.90717, + 1.90905, + 1.86396, + 1.92125, + 1.93364, + 1.889, + 1.87918, + 1.89981, + 1.90823, + 1.87888, + 1.9678, + 1.88769, + 1.907, + 1.8804, + 1.88978, + 1.91382, + 1.90217, + 1.87691, + 1.9691, + 1.97763, + 1.86138, + 1.92238, + 1.95277, + 1.88592, + 1.91714, + 1.89184, + 1.8925, + 1.92222, + 1.84047, + 1.83724, + 1.83995, + 1.92514, + 1.92017, + 1.92259, + 1.91711, + 1.83503, + 1.90669, + 1.89425, + 1.87261, + 1.93384, + 1.90074, + 1.85623, + 1.93333, + 1.87113, + 1.85687, + 1.95622, + 1.87921, + 1.98096, + 1.93047, + 1.90115, + 1.87306, + 1.94826, + 1.88986, + 1.91819, + 1.91592, + 1.91697, + 1.89813, + 1.93293, + 1.89999, + 1.87325, + 1.85609, + 1.91779, + 1.86093, + 1.86151, + 1.94337, + 1.9009, + 1.93174, + 1.85084, + 1.93166, + 1.91196, + 1.99994, + 1.89362, + 1.94074, + 1.81413, + 1.89013, + 1.93026, + 1.95717, + 1.90888, + 1.79356, + 1.9427, + 1.912, + 1.92505, + 1.91821, + 1.94834, + 1.95647, + 1.87896, + 1.9324, + 1.8497, + 1.95646, + 1.9219, + 1.89331, + 1.91809, + 1.91975, + 1.90753, + 1.92783, + 1.92949, + 1.94767, + 1.88343, + 1.91725, + 1.88292, + 1.87831, + 1.93308, + 1.94093, + 1.84983, + 1.99494, + 1.95111, + 1.85053, + 1.94202, + 1.88058, + 1.87813, + 1.92712, + 1.90368, + 1.88393, + 1.90206, + 1.91592, + 1.947, + 1.93779, + 1.89352, + 1.88939, + 1.86558, + 1.92518, + 1.92073, + 2.01221, + 1.93862, + 1.92983, + 1.90029, + 1.87514, + 1.91934, + 1.91155, + 1.83163, + 1.90525, + 1.92033, + 1.86115, + 1.89532, + 1.9774, + 1.92514, + 1.83991, + 1.91304, + 1.864, + 1.95481, + 1.83291, + 1.85941, + 1.94623, + 1.94252, + 1.84162, + 1.89438, + 1.94786, + 1.88124, + 1.93927, + 1.90921, + 1.88524, + 1.87148, + 1.88094, + 1.92003, + 1.9175, + 1.90807, + 1.86856, + 1.90959, + 1.90706, + 1.8901, + 1.89895, + 1.90219, + 1.8708, + 1.8676, + 1.94945, + 1.84765, + 1.96701, + 1.95951, + 1.89101, + 1.82687, + 1.96857, + 1.88662, + 1.8417, + 1.86179, + 1.94273, + 1.91387, + 1.92779, + 1.94725, + 1.93562, + 1.93647, + 1.92331, + 1.87937, + 1.89649, + 1.9014, + 1.9009, + 1.84864, + 1.89171, + 1.91525, + 1.93123, + 1.92092, + 1.95457, + 1.865, + 1.88184, + 1.92551, + 1.94116, + 1.85661, + 1.89485, + 1.86615, + 1.87844, + 1.94995, + 1.9472, + 1.88099, + 1.89887, + 1.90874, + 1.94508, + 1.90148, + 1.92045, + 1.88876, + 1.86274, + 1.91966, + 1.89405, + 1.81976, + 1.88538, + 1.89813, + 1.84851, + 1.89373, + 1.92157, + 1.9361, + 1.96239, + 1.9061, + 1.93451, + 1.87335, + 1.90411, + 1.89713, + 1.87754, + 1.92505, + 1.93949, + 1.95683, + 1.87564, + 1.93017, + 1.88748, + 1.91734, + 1.8943, + 1.90121, + 1.87702, + 1.91119, + 1.99068, + 1.84873, + 1.90968, + 1.84008, + 1.92501, + 1.88215, + 1.86165, + 1.83472, + 1.93535, + 1.83038, + 1.87687, + 1.87947, + 1.868, + 1.9305, + 1.88055, + 1.86326, + 1.84779, + 1.95615, + 1.89223, + 1.91743, + 1.90109, + 1.89156, + 1.95531, + 1.89797, + 1.91833, + 1.89238, + 1.86095, + 1.95222, + 2.00292, + 1.89642, + 1.86344, + 1.93019, + 1.91423, + 1.94333, + 1.92508, + 1.86868, + 1.92105, + 1.9369, + 1.93871, + 1.83597, + 1.81581, + 1.92172, + 1.90453, + 1.90467, + 1.88393, + 1.87411, + 1.87974, + 1.88772, + 1.93826, + 1.95298, + 1.83295, + 1.88548, + 1.89272, + 1.89873, + 1.8992, + 1.93869, + 1.86985, + 1.92996, + 1.92858, + 1.90236, + 1.97189, + 1.86641, + 1.89065, + 1.84123, + 1.93955, + 1.91118, + 1.86707, + 1.96107, + 1.89974, + 1.8701, + 1.91322, + 1.91088, + 1.90301, + 1.85358, + 1.84664, + 1.91812, + 1.84288, + 1.83288, + 1.87466, + 1.89709, + 1.82498, + 1.86155, + 1.8756, + 1.8999, + 1.91252, + 1.95948, + 1.90237, + 1.95671, + 1.81797, + 1.92749, + 1.88567, + 1.90553, + 1.87891, + 1.94909, + 1.9126, + 1.89714, + 1.88499, + 1.94698, + 1.85319, + 1.85645, + 1.87097, + 1.85027, + 1.86751, + 1.90263, + 1.9193, + 1.94909, + 1.91692, + 1.88033, + 1.87837, + 1.88316, + 1.95097, + 1.86339, + 1.87371, + 1.89056, + 1.92129, + 1.94876, + 1.90219, + 1.89103, + 1.91283, + 1.92891, + 1.87829, + 1.85374, + 1.84017, + 1.90724, + 1.91175, + 1.94451, + 1.92106, + 1.98218, + 1.89814, + 1.88245, + 1.8982, + 1.87257, + 1.88418, + 1.85654, + 1.9414, + 1.89919, + 1.88024, + 1.91836, + 1.88946, + 1.88392, + 1.92315, + 1.91853, + 1.87337, + 1.93152, + 1.87209, + 1.93287, + 1.9059, + 1.90559, + 1.93138, + 1.95418, + 1.89373, + 1.88532, + 1.9267, + 1.91591, + 1.8972, + 1.93243, + 1.9273, + 1.91034, + 1.87855, + 1.87658, + 1.90628, + 1.85251, + 1.93004, + 1.96931, + 1.83961, + 1.89049, + 1.90444, + 1.81201, + 1.85224, + 1.94652, + 1.88548, + 1.98069, + 1.95921, + 1.88406, + 1.92122, + 1.89853, + 1.8639, + 1.85833, + 1.8679, + 1.84291, + 1.90414, + 1.89853, + 1.91067, + 1.89156, + 1.88756, + 1.97128, + 1.8454, + 1.97562, + 1.9539, + 1.89481, + 1.94946, + 1.92226, + 1.98704, + 1.9365, + 1.88799, + 1.92376, + 1.92317, + 1.91839, + 1.91388, + 1.91198, + 1.88888, + 1.88499, + 1.88869, + 1.87937, + 1.93176, + 1.9246, + 1.96274, + 1.91646, + 1.91014, + 1.93027, + 1.90069, + 1.93918, + 1.96957, + 1.87496, + 1.90658, + 1.91793, + 1.87122, + 1.87289, + 1.94557, + 1.86041, + 1.96009, + 1.93872, + 1.91626, + 1.85837, + 1.89121, + 1.86614, + 1.85229, + 1.85726, + 1.92826, + 1.98489, + 1.94296, + 1.91414, + 1.93129, + 1.90846, + 1.89334, + 1.87587, + 1.91529, + 1.96049, + 1.90679, + 1.86906, + 1.94594, + 1.92161, + 1.8422, + 1.92224, + 1.8426, + 1.85511, + 1.84221, + 1.85076, + 1.89198, + 1.92349, + 1.88173, + 1.92207, + 1.92661, + 2.00454, + 1.92071, + 1.85754, + 1.94825, + 1.94255, + 1.89022, + 1.86921, + 1.88642, + 1.95832, + 1.88899, + 1.90084, + 1.93382, + 1.91946, + 1.83539, + 1.93374, + 1.93504, + 1.91402, + 1.93458, + 1.87769, + 1.88379, + 1.88181, + 1.91467, + 1.91502, + 1.95188, + 1.88866, + 1.89681, + 1.84433, + 1.87122, + 1.91535, + 1.91722, + 1.97517, + 1.88158, + 1.85847, + 1.93695, + 1.8908, + 1.89423, + 1.8416, + 1.91528, + 1.92174, + 1.89173, + 1.88147, + 1.95144, + 1.94883, + 1.90245, + 1.97829, + 1.83781, + 1.9311, + 1.84968, + 1.93573, + 1.90225, + 1.87028, + 1.97623, + 1.9018, + 1.87328, + 1.88192, + 1.84538, + 1.8741, + 1.8915, + 1.93982, + 2.02884, + 1.89347, + 1.90958, + 1.91429, + 1.91233, + 1.92402, + 1.89165, + 1.8967, + 1.94119, + 1.8987, + 1.88061, + 1.90134, + 1.89399, + 1.91044, + 1.92534, + 1.89951, + 1.90237, + 1.93234, + 1.92213, + 1.91278, + 1.92844, + 1.97111, + 1.88481, + 1.8492, + 1.87132, + 1.94349, + 1.90489, + 1.82446, + 1.91877, + 1.85686, + 1.84299, + 1.95147, + 1.89941, + 1.91305, + 2.00956, + 1.88445, + 1.96234, + 1.95297, + 1.87819, + 1.87843, + 1.93676, + 1.86222, + 1.91974, + 1.87604, + 1.88549, + 1.91261, + 1.97055, + 1.88517, + 1.92968, + 1.88643, + 1.84512, + 1.8807, + 1.92284, + 1.89046, + 1.85794, + 1.94384, + 1.93897, + 1.88314, + 1.93296, + 1.89242, + 1.92083, + 1.91838, + 1.86341, + 1.87536, + 1.87639, + 1.89657, + 1.90851, + 1.91088, + 1.8814, + 1.92377, + 2.01336, + 1.90862, + 1.87602, + 1.81566, + 1.93134, + 1.97, + 1.87586, + 1.91137, + 1.91695, + 1.91872, + 1.95924, + 1.92802, + 1.89402, + 1.89174, + 1.80352, + 1.82789, + 1.93425, + 1.96918, + 1.84852, + 1.88705, + 1.88775, + 1.83824, + 1.83676, + 1.91337, + 1.844, + 1.89973, + 1.83667, + 1.91701, + 1.82666, + 1.87823, + 1.97091, + 1.93496, + 1.88823, + 1.88559, + 1.91377, + 1.89151, + 1.89035, + 1.90105, + 1.85569, + 1.94203, + 1.87719, + 1.89065, + 1.90371, + 1.88084, + 1.87331, + 1.8688, + 1.90522, + 1.86918, + 1.9694, + 1.85483, + 1.86122, + 1.91788, + 1.91176, + 1.92413, + 1.87041, + 1.85806, + 1.8731, + 1.88539, + 1.91566, + 1.89919, + 1.91097, + 1.96104, + 1.89508, + 1.98339, + 1.80513, + 1.95638, + 1.85669, + 1.89453, + 1.92779, + 1.91355, + 1.93373, + 1.95864, + 1.86706, + 1.92964, + 1.90326, + 1.86789, + 1.94376, + 1.91442, + 1.8579, + 1.88882, + 1.99484, + 1.86896, + 1.95865, + 1.81779, + 1.88087, + 1.86961, + 1.8748, + 1.9451, + 1.92931, + 1.86442, + 1.87312, + 1.93511, + 1.9308, + 1.83393, + 1.89186, + 1.82268, + 1.86841, + 1.93666, + 1.89858, + 1.90007, + 1.86347, + 1.95636, + 1.86894, + 1.83355, + 1.90367, + 1.93889, + 1.88893, + 1.91209, + 1.87138, + 1.92302, + 1.86705, + 1.92834, + 1.89954, + 1.95951, + 1.9608, + 1.96239, + 1.9384, + 1.90386, + 1.88728, + 1.92158, + 1.87991, + 1.92063, + 1.91518, + 1.90097, + 1.90791, + 1.81265, + 1.96855, + 1.91688, + 1.89643, + 1.88704, + 1.92988, + 1.86394, + 1.93382, + 1.87782, + 1.87375, + 1.82157, + 1.92651, + 1.86742, + 1.98795, + 1.90446, + 1.85796, + 1.97362, + 2.0011, + 1.90826, + 1.92485, + 1.88367, + 1.91704, + 1.90442, + 1.82834, + 1.90826, + 1.89689, + 1.84038, + 1.8916, + 1.90616, + 1.90907, + 1.87936, + 1.89695, + 1.89878, + 1.95948, + 1.86516, + 1.93328, + 1.94128, + 1.87707, + 1.8711, + 1.89763, + 1.93972, + 1.97389, + 1.93522, + 1.93064, + 1.89938, + 1.92767, + 1.91503, + 1.91738, + 1.91744, + 1.93042, + 1.85629, + 1.94058, + 1.88623, + 1.98335, + 1.87407, + 1.95695, + 1.90957, + 1.9377, + 1.89805, + 1.9069, + 1.89601, + 1.89502, + 1.90543, + 1.95699, + 1.90084, + 1.92712, + 1.8987, + 1.82098, + 1.88771, + 1.89413, + 1.96447, + 1.86617, + 1.86737, + 1.94538, + 1.89292, + 1.85675, + 1.94584, + 1.87575, + 1.88465, + 1.94316, + 1.85506, + 1.87099, + 1.88731, + 1.94448, + 1.93352, + 1.92977, + 1.95946, + 1.91709, + 1.94619, + 1.91751, + 1.91746, + 1.91118, + 1.95234, + 1.88201, + 1.85777, + 1.92093, + 1.92748, + 1.89977, + 1.85723, + 1.84009, + 1.89894, + 1.86061, + 1.87516, + 1.89148, + 1.91135, + 1.92271, + 1.79798, + 1.93205, + 1.87752, + 1.92293, + 1.89662, + 1.89602, + 1.90306, + 1.91224, + 1.85811, + 1.91647, + 1.86096, + 1.89767, + 1.87871, + 1.92366, + 1.89946, + 1.93193, + 1.83065, + 1.8923, + 1.93887, + 1.89284, + 1.93711, + 1.89709, + 1.89451, + 1.95809, + 1.88105, + 1.86061, + 1.90346, + 1.94777, + 1.93241, + 1.88944, + 1.91681, + 1.89256, + 1.89185, + 1.92332, + 1.88691, + 1.87562, + 1.90006, + 1.95136, + 1.8701, + 1.92814, + 1.8466, + 1.92897, + 1.88078, + 1.85739, + 1.86902, + 1.93377, + 1.97361, + 1.8194, + 1.92161, + 1.92265, + 1.90185, + 1.88903, + 1.90399, + 1.9202, + 1.90571, + 1.90991, + 1.84729, + 1.90296, + 1.93332, + 1.86185, + 1.93006, + 1.92773, + 1.9134, + 1.90089, + 1.88254, + 1.93349, + 1.84782, + 1.91966, + 1.85123, + 1.88017, + 1.88678, + 1.96179, + 1.96911, + 1.90514, + 1.91314, + 1.90974, + 1.82423, + 1.82535, + 1.85607, + 1.87597, + 1.94739, + 1.85459, + 1.88782, + 1.92344, + 1.95696, + 1.88421, + 1.88526, + 1.88501, + 1.8607, + 1.9309, + 1.87087, + 1.91492, + 1.85231, + 1.9419, + 1.8767, + 1.90953, + 1.92177, + 1.89258, + 1.89515, + 1.92755, + 1.92931, + 1.8743, + 1.88694, + 1.89603, + 1.90079, + 1.94133, + 1.90038, + 1.87593, + 1.95186, + 1.94273, + 1.91541, + 1.81544, + 1.88674, + 1.86013, + 1.81602, + 1.86247, + 1.84502, + 1.91118, + 1.94237, + 1.86405, + 1.91282, + 1.89009, + 1.94248, + 1.89708, + 1.91653, + 1.93199, + 1.8292, + 1.85084, + 1.93445, + 1.90773, + 2.00349, + 1.8557, + 1.86076, + 1.92023, + 1.93303, + 1.88839, + 1.90509, + 1.94477, + 1.95067, + 1.9304, + 1.8897, + 1.90505, + 1.8982, + 1.92995, + 1.92853, + 1.8263, + 1.95808, + 2.00245, + 1.90518, + 1.90879, + 1.88331, + 1.79796, + 1.93757, + 1.94194, + 1.91827, + 1.88548, + 1.90384, + 1.88876, + 1.97322, + 1.8935, + 1.90085, + 1.89472, + 1.96149, + 1.96135, + 1.92016, + 1.85943, + 1.87931, + 1.82677, + 1.91255, + 1.94468, + 1.89498, + 1.89288, + 1.89087, + 1.93944, + 1.90928, + 1.88224, + 1.86194, + 1.89155, + 1.91813, + 1.89934, + 1.89301, + 1.89099, + 1.94297, + 1.89574, + 1.97311, + 1.91574, + 1.89061, + 1.94327, + 1.8543, + 1.85289, + 1.87397, + 1.92724, + 1.89987, + 1.9061, + 1.8473, + 1.8511, + 1.92708, + 1.89427, + 1.93657, + 1.89666, + 1.85442, + 1.97243, + 1.88189, + 1.89221, + 1.90266, + 1.91751, + 1.85089, + 1.90161, + 1.91781, + 1.90503, + 1.94103, + 1.90623, + 1.89949, + 1.86593, + 1.92192, + 1.87517, + 1.90302, + 1.82033, + 1.89596, + 1.89075, + 1.89339, + 1.87827, + 1.89167, + 1.90781, + 1.92155, + 1.87601, + 1.90721, + 1.93222, + 1.8362, + 1.87572, + 1.87687, + 1.86344, + 1.92916, + 1.83857, + 1.88292, + 1.94343, + 1.88509, + 1.92433, + 1.85716, + 1.90937, + 1.86974, + 1.88366, + 1.91592, + 1.93797, + 1.9024, + 1.86413, + 1.99078, + 1.94494, + 1.87519, + 1.84845, + 1.89118, + 1.91975, + 1.87122, + 1.80652, + 1.95788, + 1.95053, + 1.91417, + 1.90344, + 1.94345, + 1.98127, + 1.90647, + 1.8851, + 1.84559, + 1.88694, + 1.91451, + 1.90452, + 1.95527, + 1.9752, + 1.90947, + 1.93896, + 1.91568, + 1.9477, + 1.93282, + 1.82454, + 1.87918, + 1.85753, + 1.87004, + 1.92014, + 1.87878, + 1.86111, + 1.9126, + 1.90152, + 1.85139, + 1.85931, + 1.8265, + 1.89338, + 1.81848, + 1.89513, + 1.8254, + 1.84018, + 1.96416, + 1.88336, + 1.93115, + 1.94685, + 1.90555, + 1.91619, + 1.8464, + 1.87027, + 1.90489, + 1.89347, + 1.8676, + 1.95477, + 1.82259, + 1.9387, + 1.90086, + 1.90641, + 1.86244, + 1.91928, + 1.86466, + 1.8524, + 1.89537, + 1.89803, + 1.86552, + 1.93545, + 1.89996, + 1.98381, + 1.89434, + 2.00183 + ] + }, + "mem-allocated-bytes": { + "start_step": 0, + "end_step": 100000, + "step_interval": 5, + "values": [ + 1117047808.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1117048320.0, + 1118882816.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0, + 1118883328.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 1000, + "step_interval": 5, + "values": [ + 0.45353, + 0.23209, + 0.25297, + 0.23205, + 0.2415, + 0.23918, + 0.24626, + 0.2488, + 0.2476, + 0.23596, + 0.2485, + 0.23586, + 0.24061, + 0.23338, + 0.24468, + 0.23241, + 0.23571, + 0.23584, + 0.24489, + 0.23889, + 0.23646, + 0.24278, + 0.25148, + 0.24502, + 0.23865, + 0.2462, + 0.24847, + 0.24321, + 0.24593, + 0.2318, + 0.23928, + 0.23065, + 0.24653, + 0.25709, + 0.24503, + 0.25272, + 0.23876, + 0.23279, + 0.24315, + 0.24757, + 0.23216, + 0.2345, + 0.23488, + 0.23029, + 0.23721, + 0.23297, + 0.23275, + 0.24479, + 0.23101, + 0.23709, + 0.23499, + 0.24015, + 0.22428, + 0.22672, + 0.23275, + 0.23251, + 0.24233, + 0.22902, + 0.23811, + 0.23007, + 0.22896, + 0.22706, + 0.23094, + 0.23004, + 0.2316, + 0.23295, + 0.23045, + 0.23442, + 0.2372, + 0.2457, + 0.24889, + 0.24452, + 0.24207, + 0.23029, + 0.23179, + 0.23908, + 0.23194, + 0.23722, + 0.23168, + 0.22972, + 0.23308, + 0.23595, + 0.23116, + 0.23601, + 0.22899, + 0.22491, + 0.23136, + 0.23255, + 0.23006, + 0.23447, + 0.24359, + 0.23347, + 0.23242, + 0.23813, + 0.23653, + 0.23156, + 0.23175, + 0.22917, + 0.23357, + 0.23801, + 0.23139, + 0.24071, + 0.2432, + 0.23216, + 0.23038, + 0.23623, + 0.23784, + 0.24029, + 0.23416, + 0.2287, + 0.23405, + 0.22745, + 0.23034, + 0.23069, + 0.23327, + 0.23354, + 0.26181, + 0.23973, + 0.24615, + 0.24032, + 0.23533, + 0.23077, + 0.24415, + 0.24273, + 0.22938, + 0.23886, + 0.23963, + 0.23902, + 0.24358, + 0.23909, + 0.23603, + 0.23088, + 0.23813, + 0.23879, + 0.22401, + 0.22639, + 0.22532, + 0.23021, + 0.23264, + 0.23304, + 0.22785, + 0.23129, + 0.2273, + 0.2342, + 0.23183, + 0.24365, + 0.23386, + 0.22935, + 0.22818, + 0.23377, + 0.23758, + 0.23452, + 0.23466, + 0.23651, + 0.22953, + 0.23245, + 0.23621, + 0.23631, + 0.23014, + 0.23192, + 0.2339, + 0.22968, + 0.22665, + 0.22848, + 0.22875, + 0.22621, + 0.23896, + 0.23524, + 0.22545, + 0.22718, + 0.22611, + 0.22976, + 0.22134, + 0.2263, + 0.23067, + 0.23293, + 0.22112, + 0.22919, + 0.2383, + 0.23477, + 0.22381, + 0.2317, + 0.24013, + 0.23142, + 0.22907, + 0.2316, + 0.23856, + 0.22676, + 0.22578, + 0.22978, + 0.23092, + 0.2225, + 0.22875, + 0.22386, + 0.23257, + 0.23442, + 0.22749, + 0.22365, + 0.22888, + 0.22815 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml index 64784c36a..5cc9a2e0d 100644 --- a/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml @@ -3,44 +3,38 @@ ENV_VARS: NVTE_ALLOW_NONDETERMINISTIC_ALGO: '1' NVTE_FLASH_ATTN: '0' NVTE_FUSED_ATTN: '0' - TEST_TYPE: 'release' - MODEL_ARGS: # T5 model args --encoder-num-layers: 12 --decoder-num-layers: 12 --hidden-size: 768 - --num-attention-heads: 12 + --num-attention-heads: 12 --kv-channels: 64 --ffn-hidden-size: 3072 --encoder-seq-length: 512 --decoder-seq-length: 128 - --max-position-embeddings: 512 + --max-position-embeddings: 512 --init-method-std: 0.015 - # Training args - --micro-batch-size: 32 - --global-batch-size: 512 - --train-iters: 100000 - --weight-decay: 1e-2 - --clip-grad: 1.0 + --micro-batch-size: 32 + --global-batch-size: 512 + --train-iters: 100000 + --weight-decay: 1e-2 + --clip-grad: 1.0 --bf16: true --lr: 0.0001 - --lr-decay-style: linear - --min-lr: 1.0e-5 - --lr-warmup-fraction: .01 + --lr-decay-style: linear + --min-lr: 1.0e-5 + --lr-warmup-fraction: .01 --distributed-backend: nccl - # Transformer Engine args --use-mcore-models: true --transformer-impl: transformer_engine - # Model parallel --tensor-model-parallel-size: 4 - --pipeline-model-parallel-size: 1 + --pipeline-model-parallel-size: 1 --encoder-pipeline-model-parallel-size: 0 - # Data args --data-path: ${DATA_BLEND} --vocab-file: ${DATA_PATH}/bert-large-cased-vocab.txt @@ -48,11 +42,10 @@ MODEL_ARGS: --split: 99982,9,9 --data-cache-path: ${DATA_CACHE_PATH} --vocab-extra-ids: 100 - # EVAL_AND_LOGGING_ARGS --log-interval: 100 --save-interval: 2000 - --eval-interval: 1000 + --eval-interval: 1000 --save: ${CHECKPOINT_PATH} --load: ${CHECKPOINT_PATH} --eval-iters: 10 @@ -64,4 +57,4 @@ MODEL_ARGS: --log-validation-ppl-to-tensorboard: true --timing-log-level: 2 --wandb-project: megatron-core-release-runs - --wandb-exp-name: ${WANDB_EXPERIMENT} \ No newline at end of file + --wandb-exp-name: ${WANDB_EXPERIMENT} diff --git a/tests/functional_tests/test_scripts/retro/pretrain_retro_distributed_test.sh b/tests/functional_tests/test_scripts/retro/pretrain_retro_distributed_test.sh deleted file mode 100755 index 9501d9d40..000000000 --- a/tests/functional_tests/test_scripts/retro/pretrain_retro_distributed_test.sh +++ /dev/null @@ -1,169 +0,0 @@ -#! /bin/bash - -echo "------ARGUMENTS LIST --------" -for ARGUMENT in "$@" -do - KEY=$(echo $ARGUMENT | cut -f1 -d=) - - KEY_LENGTH=${#KEY} - VALUE="${ARGUMENT:$KEY_LENGTH+1}" - - export "$KEY"="$VALUE" - echo "$KEY=$VALUE" -done -echo "---------------------------------" - -set -exo pipefail -if [[ -z $MBS ]]; then MBS=4; fi - -GPUS_PER_NODE=8 -# Change for multinode config -MASTER_ADDR=localhost -MASTER_PORT=6000 -NODE_RANK=0 -WORLD_SIZE=$(($GPUS_PER_NODE*$NUM_NODES)) - -command="export CUDA_DEVICE_MAX_CONNECTIONS=1;" - -TRANSFORMER_IMPL=local -TRAINING_DTYPE=bf16 - -USE_LEGACY=1 -if [[ $USE_CORE -eq 1 ]]; then - echo "Running using megatron core" - TRANSFORMER_IMPL=local - TRAINING_DTYPE=bf16 - command="$command export NVTE_ALLOW_NONDETERMINISTIC_ALGO=0;" - unset USE_LEGACY - export NVTE_ALLOW_NONDETERMINISTIC_ALGO=0 -fi - -if [[ $USE_TE -eq 1 ]]; then - echo "Running with TransformerEngine ..." - TRANSFORMER_IMPL=transformer_engine - TRAINING_DTYPE=bf16 -else - echo "Running with local transformer implementation ..." -fi - -if [[ $CHECKPOINT_RESUME_TEST -eq 1 ]]; then - echo "Running checkpoint resume test..." - __SAVE_INTERVAL=50 - if [[ $MAX_STEPS -ne 100 ]]; then - echo "Overriding MAX_STEPS=100" - MAX_STEPS=100 - fi -else - __SAVE_INTERVAL=10000 # inf -fi -set +x -# Runs the "345M" parameter model -DISTRIBUTED_ARGS="--max-restarts 3 --nproc_per_node $GPUS_PER_NODE --nnodes $NUM_NODES" - -build_args() { - ARGS=" \ - --exit-interval $MAX_STEPS \ - \ - --recompute-activations \ - --use-flash-attn \ - --apply-layernorm-1p \ - --untie-embeddings-and-output-weights \ - --disable-bias-linear \ - --no-position-embedding \ - --use-rotary-position-embeddings \ - --rotary-percent 0.5 \ - --swiglu \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --exit-duration-in-mins 220 \ - --tensor-model-parallel-size $TP_SIZE \ - --pipeline-model-parallel-size 1 \ - --num-layers 24 \ - --hidden-size 1024 \ - --num-attention-heads 16 \ - --seq-length 2048 \ - --max-position-embeddings 2048 \ - --micro-batch-size $MBS \ - --global-batch-size 256 \ - --train-samples 100000 \ - --lr-decay-samples 99000 \ - --lr-warmup-samples 1000 \ - --lr 2.5e-5 \ - --min-lr 2.5e-6 \ - --lr-decay-style cosine \ - --log-interval 5 \ - --eval-iters 100 \ - --eval-interval 2000 \ - --tokenizer-type GPT2BPETokenizer \ - --vocab-file /workspace/data/retro_data/vocab/gpt2-vocab.json \ - --merge-file /workspace/data/retro_data/vocab/gpt2-merges.txt \ - --data-path /workspace/data/retro_data/inputs/wiki-200k_text_document \ - --split 98,2,0 \ - --clip-grad 1.0 \ - --weight-decay 0.1 \ - --adam-beta1 0.9 \ - --adam-beta2 0.95 \ - --init-method-std 0.007 \ - --log-params-norm \ - --log-num-zeros-in-grad \ - --log-validation-ppl-to-tensorboard \ - --log-timers-to-tensorboard \ - --tensorboard-dir ${TENSORBOARD_DIR} \ - --save-interval $__SAVE_INTERVAL \ - --save $CHECKPOINT_PATH \ - --load $CHECKPOINT_PATH \ - --bf16 \ - --transformer-impl $TRANSFORMER_IMPL \ - --${TRAINING_DTYPE} \ - ${USE_LEGACY:+--use-legacy-models} \ - ${ADDITIONAL_PARAMS:+$ADDITIONAL_PARAMS} \ - --retro-workdir /workspace/data/retro_data/neighbors - --retro-add-retriever \ - --num-workers 32 \ -" -} - -build_args -torch_run_cmd="torchrun $DISTRIBUTED_ARGS \ - pretrain_retro.py \ - ${ARGS}" - -command="$command $torch_run_cmd" - -if [[ $CHECKPOINT_RESUME_TEST -eq 1 ]]; then - MAX_STEPS=50 - build_args - torch_run_cmd="torchrun $DISTRIBUTED_ARGS \ - pretrain_retro.py \ - ${ARGS}" - command="$command; rm -rf $CHECKPOINT_PATH/iter_0000100; echo 50 > $CHECKPOINT_PATH/latest_checkpointed_iteration.txt; $torch_run_cmd" -fi -echo "-------------------- THE FINAL PRETRAIN SCRIPT COMMAND THAT WILL BE RUN ------------" -echo "$command" -echo "-----------------------------------------------------------------------------" - -pip install h5py -pip install transformers -pip install faiss-gpu - -echo "$command" > $SCRIPTS_DIR/pretrain_retro_distributed_command.sh -eval $command - -echo "Saving test results to $TENSORBOARD_DIR" -PYTHONPATH=$PWD python3 ./tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py \ - --logs-dir $TENSORBOARD_DIR \ - --output-path ${TENSORBOARD_DIR}/results.json - -if [[ $SKIP_PYTEST != 1 ]]; then - echo "-----------------------------------------------------------------------------" - if [[ $CHECKPOINT_RESUME_TEST -eq 1 ]]; then - echo "Running pytest 1st vs 2nd run comparison" - export LOGS_DIR=$TENSORBOARD_DIR - pytest ./tests/functional_tests/python_test_utils/test_resume_checkpoint_pipeline.py - else - echo "Running pytest checks against golden values" - export EXPECTED_METRICS_FILE="./tests/functional_tests/test_results/jet/${JOB_NAME}.json" - export LOGS_DIR=$TENSORBOARD_DIR - pytest ./tests/functional_tests/python_test_utils/test_ci_pipeline.py - fi -fi diff --git a/tests/functional_tests/python_test_utils/jet/common.py b/tests/test_utils/python_scripts/common.py similarity index 53% rename from tests/functional_tests/python_test_utils/jet/common.py rename to tests/test_utils/python_scripts/common.py index 5ee31bc23..dd2e2e470 100644 --- a/tests/functional_tests/python_test_utils/jet/common.py +++ b/tests/test_utils/python_scripts/common.py @@ -9,6 +9,18 @@ BASE_PATH = pathlib.Path(__file__).parent.resolve() +def resolve_cluster_config(cluster: str) -> str: + if cluster == "dgxh100_eos": + return "eos" + if cluster == "dgxa100_dracooci": + return "draco-oci-iad" + if cluster == "dgxa100_dracooci-ord": + return "draco-oci-ord" + if cluster == "dgxh100_coreweave": + return "coreweave" + raise ValueError(f"Unknown cluster {cluster} provided.") + + def flatten_products( workload_manifest: jetclient.JETWorkloadManifest, ) -> jetclient.JETWorkloadManifest: @@ -37,6 +49,16 @@ def flatten_workload( return workload_manifests +def set_build_dependency( + workload_manifests: List[jetclient.JETWorkloadManifest], +) -> List[jetclient.JETWorkloadManifest]: + for workload_manifest in workload_manifests: + workload_manifest.spec.build = workload_manifest.spec.build.format( + **dict(workload_manifest.spec) + ) + return workload_manifests + + def load_config(config_path: str) -> jetclient.JETWorkloadManifest: """Loads and parses a yaml file into a JETWorkloadManifest""" with open(config_path) as stream: @@ -48,12 +70,14 @@ def load_config(config_path: str) -> jetclient.JETWorkloadManifest: def load_and_flatten(config_path: str) -> List[jetclient.JETWorkloadManifest]: """Wrapper function for doing all the fun at once.""" - return flatten_workload(flatten_products(load_config(config_path=config_path))) + return set_build_dependency( + flatten_workload(flatten_products(load_config(config_path=config_path))) + ) def filter_by_test_case( workload_manifests: List[jetclient.JETWorkloadManifest], test_case: str -) -> jetclient.JETWorkloadManifest: +) -> Optional[jetclient.JETWorkloadManifest]: """Returns a workload with matching name. Raises an error if there no or more than a single workload.""" workload_manifests = list( workload_manifest @@ -62,10 +86,12 @@ def filter_by_test_case( ) if len(workload_manifests) > 1: - raise ValueError("Duplicate test_case found!") + print("Duplicate test_case found!") + return None if len(workload_manifests) == 0: - raise ValueError("No test_case found!") + print("No test_case found!") + return None return workload_manifests[0] @@ -81,7 +107,27 @@ def filter_by_scope( ) if len(workload_manifests) == 0: - raise ValueError("No test_case found!") + print("No test_case found!") + return [] + + return workload_manifests + + +def filter_by_environment( + workload_manifests: List[jetclient.JETWorkloadManifest], environment: str +) -> List[jetclient.JETWorkloadManifest]: + workload_manifests = list( + workload_manifest + for workload_manifest in workload_manifests + if ( + hasattr(workload_manifest.spec, "environment") + and workload_manifest.spec.environment == environment + ) + ) + + if len(workload_manifests) == 0: + print("No test_case found!") + return [] return workload_manifests @@ -97,38 +143,91 @@ def filter_by_model( ) if len(workload_manifests) == 0: - raise ValueError("No test_case found!") + print("No test_case found!") + return [] + + return workload_manifests + + +def filter_by_tag( + workload_manifests: List[jetclient.JETWorkloadManifest], tag: str +) -> List[jetclient.JETWorkloadManifest]: + """Returns all workload with matching tag.""" + workload_manifests = list( + workload_manifest + for workload_manifest in workload_manifests + if hasattr(workload_manifest.spec, "tag") and workload_manifest.spec.tag == tag + ) + + if len(workload_manifests) == 0: + print("No test_case found!") + return [] + + return workload_manifests + + +def filter_by_test_cases( + workload_manifests: List[jetclient.JETWorkloadManifest], test_cases: str +) -> List[jetclient.JETWorkloadManifest]: + """Returns a workload with matching name. Raises an error if there no or more than a single workload.""" + workload_manifests = list( + workload_manifest + for workload_manifest in workload_manifests + for test_case in test_cases.split(",") + if workload_manifest.spec.test_case == test_case + ) + + if len(workload_manifests) == 0: + print("No test_case found!") + return [] return workload_manifests def load_workloads( container_tag: str, + n_repeat: int = 1, + time_limit: int = 1800, + tag: Optional[str] = None, + environment: Optional[str] = None, + test_cases: str = "all", scope: Optional[str] = None, model: Optional[str] = None, test_case: Optional[str] = None, container_image: Optional[str] = None, ) -> List[jetclient.JETWorkloadManifest]: """Return all workloads from disk that match scope and platform.""" - recipes_dir = BASE_PATH / ".." / ".." / "jet_recipes" - local_dir = BASE_PATH / ".." / ".." / "local_recipes" + recipes_dir = BASE_PATH / ".." / "recipes" + local_dir = BASE_PATH / ".." / "local_recipes" workloads: List[jetclient.JETWorkloadManifest] = [] build_workloads: List[jetclient.JETClient] = [] for file in list(recipes_dir.glob("*.yaml")) + list(local_dir.glob("*.yaml")): - workloads += load_and_flatten(config_path=file) + workloads += load_and_flatten(config_path=str(file)) if file.stem.startswith("_build"): - build_workloads.append(load_config(config_path=file)) + build_workloads.append(load_config(config_path=str(file))) if scope: workloads = filter_by_scope(workload_manifests=workloads, scope=scope) - if model: + if workloads and environment: + workloads = filter_by_environment(workload_manifests=workloads, environment=environment) + + if workloads and model: workloads = filter_by_model(workload_manifests=workloads, model=model) - if test_case: + if workloads and tag: + workloads = filter_by_tag(workload_manifests=workloads, tag=tag) + + if workloads and test_cases != "all": + workloads = filter_by_test_cases(workload_manifests=workloads, test_cases=test_cases) + + if workloads and test_case: workloads = [filter_by_test_case(workload_manifests=workloads, test_case=test_case)] + if not workloads: + return [] + for workload in list(workloads): for build_workload in build_workloads: if ( @@ -137,4 +236,6 @@ def load_workloads( container_image = container_image or build_workload.spec.source.image build_workload.spec.source.image = f"{container_image}:{container_tag}" workloads.append(build_workload) + workload.spec.n_repeat = n_repeat + workload.spec.time_limit = time_limit return workloads diff --git a/tests/test_utils/python_scripts/generate_jet_trigger_job.py b/tests/test_utils/python_scripts/generate_jet_trigger_job.py new file mode 100644 index 000000000..0913b19bd --- /dev/null +++ b/tests/test_utils/python_scripts/generate_jet_trigger_job.py @@ -0,0 +1,155 @@ +import pathlib +from typing import Optional + +import click +import yaml + +from tests.test_utils.python_scripts import common + +BASE_PATH = pathlib.Path(__file__).parent.resolve() + + +@click.command() +@click.option("--scope", required=True, type=str, help="Test scope") +@click.option("--environment", required=True, type=str, help="LTS or dev features") +@click.option("--n-repeat", required=False, default=1, type=int) +@click.option("--time-limit", required=False, default=1, type=int) +@click.option( + "--test-cases", required=True, type=str, help="Comma-separated list of test_cases, or 'all'" +) +@click.option("--a100-cluster", required=True, type=str, help="A100 Cluster to run on") +@click.option("--h100-cluster", required=True, type=str, help="H100 Cluster to run on") +@click.option("--output-path", required=True, type=str, help="Path to write GitLab job to") +@click.option("--container-image", required=True, type=str, help="LTS Container image to use") +@click.option("--container-tag", required=True, type=str, help="Container tag to use") +@click.option( + "--dependent-job", + required=True, + type=str, + help="Name of job that created the downstream pipeline", +) +@click.option("--tag", required=False, type=str, help="Tag (only relevant for unit tests)") +@click.option( + "--run-name", required=False, type=str, help="Run name (only relevant for release tests)" +) +@click.option( + "--wandb-experiment", + required=False, + type=str, + help="Wandb experiment (only relevant for release tests)", +) +def main( + scope: str, + environment: str, + n_repeat: int, + time_limit: int, + test_cases: str, + a100_cluster: str, + h100_cluster: str, + output_path: str, + container_image: str, + container_tag: str, + dependent_job: str, + tag: Optional[str] = None, + run_name: Optional[str] = None, + wandb_experiment: Optional[str] = None, +): + list_of_test_cases = [ + test_case + for test_case in common.load_workloads( + scope=scope, + container_tag=container_tag, + environment=environment, + test_cases=test_cases, + tag=tag, + ) + if test_case.type != "build" + ] + + tags = [ + "arch/amd64", + "env/prod", + "origin/jet-fleet", + "owner/jet-core", + "purpose/jet-client", + "team/megatron", + ] + + if not list_of_test_cases: + gitlab_pipeline = { + "stages": ["empty-pipeline-placeholder"], + "default": {"interruptible": True}, + "empty-pipeline-placeholder-job": { + "stage": "empty-pipeline-placeholder", + "image": f"{container_image}:{container_tag}", + "tags": tags, + "rules": [ + {"if": '$CI_PIPELINE_SOURCE == "parent_pipeline"'}, + {"if": '$CI_MERGE_REQUEST_ID'}, + ], + "timeout": "7 days", + "needs": [{"pipeline": '$PARENT_PIPELINE_ID', "job": "functional:configure"}], + "script": ["sleep 1"], + "artifacts": {"paths": ["results/"], "when": "always"}, + }, + } + + else: + gitlab_pipeline = { + "stages": list(set([test_case.spec.model for test_case in list_of_test_cases])), + "default": {"interruptible": True}, + } + + for test_case in list_of_test_cases: + if test_case.spec.platforms == "dgx_a100": + cluster = a100_cluster + elif test_case.spec.platforms == "dgx_h100": + cluster = h100_cluster + else: + raise ValueError(f"Platform {test_case.spec.platforms} unknown") + + job_tags = list(tags) + job_tags.append(f"cluster/{common.resolve_cluster_config(cluster)}") + + script = [ + "export PYTHONPATH=$(pwd); " + "python tests/test_utils/python_scripts/launch_jet_workload.py", + f"--model {test_case.spec.model}", + f"--environment {test_case.spec.environment}", + f"--n-repeat {n_repeat}", + f"--time-limit {time_limit}", + f"--test-case '{test_case.spec.test_case}'", + f"--container-tag {container_tag}", + f"--cluster {cluster}", + ] + + if tag is not None: + script.append(f"--tag {tag}") + + if run_name is not None and wandb_experiment is not None: + script.append(f"--run-name {run_name}") + test_case.spec.model + script.append( + f"--wandb-experiment {wandb_experiment}-{test_case.spec.model}-{test_case.spec.test_case}" + ) + + gitlab_pipeline[test_case.spec.test_case] = { + "stage": f"{test_case.spec.model}", + "image": f"{container_image}:{container_tag}", + "tags": job_tags, + "rules": [ + {"if": '$CI_PIPELINE_SOURCE == "parent_pipeline"'}, + {"if": '$CI_MERGE_REQUEST_ID'}, + ], + "timeout": "7 days", + "needs": [{"pipeline": '$PARENT_PIPELINE_ID', "job": dependent_job}], + "script": [" ".join(script)], + "artifacts": {"paths": ["results/"], "when": "always"}, + } + + with open(output_path, 'w') as outfile: + yaml.dump(gitlab_pipeline, outfile, default_flow_style=False) + + +if __name__ == "__main__": + main() diff --git a/tests/functional_tests/python_test_utils/jet/generate_local_jobs.py b/tests/test_utils/python_scripts/generate_local_jobs.py similarity index 92% rename from tests/functional_tests/python_test_utils/jet/generate_local_jobs.py rename to tests/test_utils/python_scripts/generate_local_jobs.py index bc9ad2230..175492175 100644 --- a/tests/functional_tests/python_test_utils/jet/generate_local_jobs.py +++ b/tests/test_utils/python_scripts/generate_local_jobs.py @@ -12,13 +12,13 @@ import jetclient import yaml -from tests.functional_tests.python_test_utils.jet import common +from tests.test_utils.python_scripts import common def load_script(config_path: str) -> str: with open(config_path) as stream: try: - jetclient.JETWorkloadManifest(**yaml.safe_load(stream)).spec.script + return jetclient.JETWorkloadManifest(**yaml.safe_load(stream)).spec.script except yaml.YAMLError as exc: raise exc diff --git a/tests/test_utils/python_scripts/launch_jet_workload.py b/tests/test_utils/python_scripts/launch_jet_workload.py new file mode 100644 index 000000000..6e0580fcd --- /dev/null +++ b/tests/test_utils/python_scripts/launch_jet_workload.py @@ -0,0 +1,302 @@ +import json +import os +import pathlib +import re +import signal +import sys +import tempfile +import time +from typing import List, Optional + +import click +import jetclient +import requests +import yaml +from jet import workloads +from jetclient.facades.objects import log as jet_log +from jetclient.services.dtos.pipeline import PipelineStatus + +from tests.test_utils.python_scripts import common + +BASE_PATH = pathlib.Path(__file__).parent.resolve() + + +def register_pipeline_terminator(pipeline: jetclient.JETPipeline): + def sigterm_handler(_signo, _stack_frame): + print(f"Trying to terminate pipeline {pipeline.jet_id}") + pipeline.cancel() + print(f"Pipeline {pipeline.jet_id} terminated") + sys.exit(0) + + signal.signal(signal.SIGINT, sigterm_handler) + signal.signal(signal.SIGTERM, sigterm_handler) + + +def launch_and_wait_for_completion( + test_case: str, + environment: str, + n_repeat: int, + time_limit: int, + container_image: Optional[str], + container_tag: str, + cluster: str, + account: str, + tag: Optional[str], + run_name: Optional[str], + wandb_experiment: Optional[str], +) -> jetclient.JETPipeline: + n_submit_errors = 0 + + while n_submit_errors < 3: + pipeline = jetclient.JETClient( + customer='mcore', gitlab_ci_token=os.getenv("RO_API_TOKEN"), env="prod" + ).workloads.submit( + workloads=common.load_workloads( + test_case=test_case, + n_repeat=n_repeat, + time_limit=time_limit, + tag=tag, + container_image=container_image, + container_tag=container_tag, + environment=environment, + ), + config_id=f"mcore/{common.resolve_cluster_config(cluster)}", + custom_config={ + "launchers": {cluster: {"account": account, "ntasks_per_node": 8}}, + "executors": { + "jet-ci": { + "environments": { + cluster: { + "variables": { + "RUN_NAME": run_name or "", + "WANDB_API_KEY": os.getenv("WANDB_API_KEY") or "", + "WANDB_EXPERIMENT": wandb_experiment or "", + } + } + } + } + }, + }, + wait_for_validation=True, + max_wait_time=(60 * 60), + ) + if pipeline.get_status() == PipelineStatus.SUBMISSION_FAILED: + n_submit_errors += 1 + print(f"Failed submitting pipeline. Let's try again ({n_submit_errors}/3)") + continue + break + + register_pipeline_terminator(pipeline=pipeline) + + print( + f"Pipeline triggered; inspect it here: https://gitlab-master.nvidia.com/dl/jet/ci/-/pipelines/{pipeline.jet_id}", + flush=True, + ) + + n_wait_attempts = 0 + while n_wait_attempts < 3: + try: + pipeline.wait(max_wait_time=60 * 60 * 24 * 7, interval=60 * 1) + break + except (requests.exceptions.ConnectionError, json.decoder.JSONDecodeError) as e: + print(e) + time.sleep(60 * 3**n_wait_attempts) + pipeline = workloads.get_pipeline(pipeline.jet_id) + n_wait_attempts += 1 + + print(f"Pipeline terminated; status: {pipeline.get_status()}") + return pipeline + + +def download_job_assets(logs: List[jet_log.JETLog], iteration: int = 0) -> List[str]: + if not logs: + return [""] + + assets_base_path = BASE_PATH / ".." / ".." / ".." / ".." / "results" / f"iteration={iteration}" + + for restart_idx, log in enumerate(logs): + assets = log.get_assets() + assets_path = assets_base_path / f"restart={restart_idx}" + assets_path.mkdir(parents=True, exist_ok=True) + for log_filename in assets.keys(): + with open(assets_path / log_filename, "w") as fh: + assets[log_filename].download(pathlib.Path(fh.name)) + return assets + + +def extract_logs_to_string(logs: List[jet_log.JETLog]) -> List[str]: + if not logs: + return [""] + + assets = logs[0].get_assets() + log_filename = [key for key in assets.keys() if key.endswith(".log")][0] + + with tempfile.NamedTemporaryFile() as tmp_file: + assets[log_filename].download(pathlib.Path(tmp_file.name)) + with open(pathlib.Path(tmp_file.name), "r") as fh: + return fh.readlines() + + +def parse_failed_job(logs: List[str]) -> Optional[bool]: + for log_row in logs[::-1]: + match = re.search(r"Job finished with status 'FAILED'", log_row) + if match is not None: + return True + return False + + +def parse_finished_training(logs: List[str]) -> Optional[bool]: + for log_row in logs[::-1]: + match = re.search(r"after training is done", log_row) + if match is not None: + return True + return False + + +@click.command() +@click.option("--model", required=True, type=str, help="Model") +@click.option("--test-case", required=True, type=str, help="Test case") +@click.option( + "--environment", required=True, type=click.Choice(['dev', 'lts']), help="Pytorch LTS or DEV" +) +@click.option("--n-repeat", required=False, default=1, type=int) +@click.option("--time-limit", required=False, default=1800, type=int) +@click.option( + "--account", + required=False, + type=str, + help="Slurm account to use", + default="coreai_dlalgo_mcore", +) +@click.option("--cluster", required=True, type=str, help="Cluster to run on") +@click.option("--container-tag", required=True, type=str, help="Base image of Mcore image") +@click.option("--container-image", required=False, type=str, help="Base image of Mcore image") +@click.option("--tag", required=False, type=str, help="Tag (only relevant for unit tests)") +@click.option( + "--run-name", required=False, type=str, help="Run name (only relevant for release tests)" +) +@click.option( + "--wandb-experiment", + required=False, + type=str, + help="Wandb experiment (only relevant for release tests)", +) +def main( + model: str, + test_case: str, + environment: str, + n_repeat: int, + time_limit: int, + account: str, + cluster: str, + container_tag: str, + tag: Optional[str] = None, + container_image: Optional[str] = None, + run_name: Optional[str] = None, + wandb_experiment: Optional[str] = None, +): + model_config_path = pathlib.Path( + BASE_PATH + / ".." + / ".." + / "functional_tests" + / "test_cases" + / model + / test_case + / "model_config.yaml" + ) + + if model_config_path.exists(): + with open(model_config_path) as stream: + try: + test_case_dict = yaml.safe_load(stream) + except yaml.YAMLError as exc: + print(exc) + + test_type = test_case_dict['TEST_TYPE'] + else: + test_type = "unit_test" + + if test_type == "release" and (run_name is None or wandb_experiment is None): + print(f"Not all arguments provided ({run_name=}, {wandb_experiment=})") + sys.exit(1) + + n_attempts = 0 + n_nondeterminism_attemps = 0 + n_iteration = 0 + while True and n_attempts < 3 and n_nondeterminism_attemps < 2: + pipeline = launch_and_wait_for_completion( + test_case=test_case, + environment=environment, + n_repeat=n_repeat, + time_limit=time_limit, + container_image=container_image, + container_tag=container_tag, + cluster=cluster, + account=account, + tag=tag, + run_name=run_name, + wandb_experiment=wandb_experiment, + ) + + main_job = [job for job in pipeline.get_jobs() if job.name.startswith("basic")][0] + + n_download_attempt = 0 + while n_download_attempt < 3: + try: + jet_log = main_job.get_logs() + logs = extract_logs_to_string(logs=jet_log) + download_job_assets(logs=jet_log, iteration=n_iteration) + break + except (requests.exceptions.ConnectionError, json.decoder.JSONDecodeError) as e: + print(e) + time.sleep((3**n_download_attempt) * 60) + n_download_attempt += 1 + + concat_logs = "\n".join(logs) + print(f"Logs:\n{concat_logs}") + + success = pipeline.get_status() == PipelineStatus.SUCCESS + + if test_type == "unit_test": + success = success and ( + ( + re.search(r'=.*?\bpassed\b.*?=', concat_logs) + and not re.search(r'=.*?\bfailed\b.*?=', concat_logs) + ) + or "0 selected" in concat_logs + ) + sys.exit(int(not success)) # invert for exit 0 + + if test_type != "release": + if success: + sys.exit(int(not success)) # invert for exit 0 + + if ( + "Some NCCL operations have failed or timed out." in concat_logs + or "uncorrectable ECC error encountered" in concat_logs + or "illegal memory access" in concat_logs + or "illegal instruction" in concat_logs + ): + print("Detected NCCL failure, attempt restart.") + n_attempts += 1 + continue + + if "FAILED tests/functional_tests/python_test_utils/test_ci_pipeline.py" in concat_logs: + print("Non-determinism, let's try another node.") + n_nondeterminism_attemps += 1 + continue + + if parse_failed_job(logs=logs): + n_attempts += 1 + continue + + if parse_finished_training(logs=logs): + success = pipeline.get_status() == PipelineStatus.SUCCESS + sys.exit(int(not success)) # invert for exit 0 + n_iteration += 1 + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/tests/functional_tests/jet_recipes/_build-mcore.yaml b/tests/test_utils/recipes/_build-mcore-dev.yaml similarity index 70% rename from tests/functional_tests/jet_recipes/_build-mcore.yaml rename to tests/test_utils/recipes/_build-mcore-dev.yaml index 81b38b69c..123250d74 100644 --- a/tests/functional_tests/jet_recipes/_build-mcore.yaml +++ b/tests/test_utils/recipes/_build-mcore-dev.yaml @@ -2,10 +2,10 @@ type: build format_version: 1 maintainers: [maanug] spec: - name: mcore-pyt + name: mcore-pyt-dev platforms: [linux/amd64] source: # The image tag will be added via `jet-tests.yaml` # Tags are one of {buildcache, $CI_PIPELINE_ID} - image: gitlab-master.nvidia.com/adlr/megatron-lm/mcore_ci + image: gitlab-master.nvidia.com/adlr/megatron-lm/mcore_ci_dev \ No newline at end of file diff --git a/tests/test_utils/recipes/_build-mcore-lts.yaml b/tests/test_utils/recipes/_build-mcore-lts.yaml new file mode 100644 index 000000000..d017b71c1 --- /dev/null +++ b/tests/test_utils/recipes/_build-mcore-lts.yaml @@ -0,0 +1,11 @@ +type: build +format_version: 1 +maintainers: [maanug] +spec: + name: mcore-pyt-lts + platforms: [linux/amd64] + source: + # The image tag will be added via `jet-tests.yaml` + # Tags are one of {buildcache, $CI_PIPELINE_ID} + image: gitlab-master.nvidia.com/adlr/megatron-lm/mcore_ci_lts + \ No newline at end of file diff --git a/tests/functional_tests/jet_recipes/_build-nemo.yaml b/tests/test_utils/recipes/_build-nemo.yaml similarity index 100% rename from tests/functional_tests/jet_recipes/_build-nemo.yaml rename to tests/test_utils/recipes/_build-nemo.yaml diff --git a/tests/functional_tests/jet_recipes/bert.yaml b/tests/test_utils/recipes/bert.yaml similarity index 58% rename from tests/functional_tests/jet_recipes/bert.yaml rename to tests/test_utils/recipes/bert.yaml index 088436e8e..5a4d5a85a 100644 --- a/tests/functional_tests/jet_recipes/bert.yaml +++ b/tests/test_utils/recipes/bert.yaml @@ -5,8 +5,8 @@ loggers: [stdout] spec: name: "{test_case}" model: bert - build: mcore-pyt nodes: 1 + build: mcore-pyt-{environment} gpus: 8 platforms: dgx_a100 artifacts: @@ -22,28 +22,34 @@ spec: "TENSORBOARD_PATH={assets_dir}/tensorboard" "CHECKPOINT_PATH=/workspace/checkpoints" "TRAINING_SCRIPT_PATH=pretrain_bert.py" - "TEST_CASE_PATH=./tests/functional_tests/test_cases/{model}/{test_case}" + "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + "N_REPEAT={n_repeat}" ) bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} products: - - scope: [mr] - time_limit: [12000] + - environment: [lts, dev] + scope: [mr] + time_limit: [1800] + n_repeat: [5] test_case: - bert_mr_mcore_tp2_pp2_dgx_a100_1N8G - # - bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G + - bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G - bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G - bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G - bert_mr_tp1_pp4_vp2_dgx_a100_1N8G - bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G - bert_mr_tp2_pp2_dgx_a100_1N8G - bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G - - scope: [nightly] - time_limit: [12000] + - environment: [lts, dev] + scope: [nightly] + n_repeat: [5] + time_limit: [3600] test_case: - - bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2 - - bert_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2 - - bert_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1 - - bert_345m_nightly_dgx_a100_1N8G_tp1_pp2 - - bert_345m_nightly_dgx_a100_1N8G_tp4_pp1 \ No newline at end of file + - bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2 + - bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2 + - bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1 + - bert_nightly_dgx_a100_1N8G_tp1_pp2 + - bert_nightly_dgx_a100_1N8G_tp4_pp1 diff --git a/tests/test_utils/recipes/gpt-modelopt.yaml b/tests/test_utils/recipes/gpt-modelopt.yaml new file mode 100644 index 000000000..d75b1dbbc --- /dev/null +++ b/tests/test_utils/recipes/gpt-modelopt.yaml @@ -0,0 +1,37 @@ +type: basic +format_version: 1 +maintainers: [mcore] +loggers: [stdout] +spec: + name: "{test_case}" + model: gpt + build: mcore-pyt-{environment} + nodes: 1 + gpus: 2 + artifacts: + /workspace/data/gpt3_data: text/the_pile/shard00 + /workspace/checkpoints/teacher: model/gpt_dummy_pyt/ckpt/24.10.0_bf16_teacher + script: |- + ls + cd /opt/megatron-lm + + ARGUMENTS=( + "DATA_PATH=/workspace/data/gpt3_data" + "DATA_CACHE_PATH=/workspace/data/cache" + "OUTPUT_PATH={assets_dir}" + "TENSORBOARD_PATH={assets_dir}/tensorboard" + "CHECKPOINT_PATH=/workspace/checkpoints" + "TRAINING_SCRIPT_PATH=./examples/export/knowledge_distillation/pretrain_gpt_modelopt.py" + "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + ) + + bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} + +products: + - scope: [nightly] + platforms: [dgx_a100] + time_limit: [1200] + environment: [lts, dev] # Disable dev for now + test_case: + - gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume diff --git a/tests/functional_tests/jet_recipes/gpt-nemo.yaml b/tests/test_utils/recipes/gpt-nemo.yaml similarity index 69% rename from tests/functional_tests/jet_recipes/gpt-nemo.yaml rename to tests/test_utils/recipes/gpt-nemo.yaml index f14d2f0af..01e79b479 100644 --- a/tests/functional_tests/jet_recipes/gpt-nemo.yaml +++ b/tests/test_utils/recipes/gpt-nemo.yaml @@ -9,7 +9,7 @@ spec: nodes: 1 gpus: 8 platforms: dgx_a100 - time_limit: 12000 + time_limit: 1800 scope: null script: |- ls @@ -22,13 +22,17 @@ spec: "TENSORBOARD_PATH={assets_dir}/tensorboard" "CHECKPOINT_PATH=/workspace/checkpoints" "TRAINING_SCRIPT_PATH=/opt/NeMo/examples/nlp/language_modeling/megatron_gpt_pretraining.py" - "TEST_CASE_PATH=/opt/megatron-lm/tests/functional_tests/test_cases/{model}/{test_case}" + "TRAINING_PARAMS_PATH=/opt/megatron-lm/tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=/opt/megatron-lm/tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + "N_REPEAT={n_repeat}" ) bash /opt/megatron-lm/tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} products: - - scope: [mr] + - environment: [dev] + scope: [mr] + n_repeat: [5] test_case: - gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_tp2_pp4_vp3_seq_par_overlap_p2p_dgx_a100_1N8G - gpt3-nemo_126m_mr_mbs4_gbs64_mcore_te_tp1_pp1_dgx_a100_1N8G diff --git a/tests/functional_tests/jet_recipes/gpt.yaml b/tests/test_utils/recipes/gpt.yaml similarity index 80% rename from tests/functional_tests/jet_recipes/gpt.yaml rename to tests/test_utils/recipes/gpt.yaml index 8c09d0bd1..2e84eb584 100644 --- a/tests/functional_tests/jet_recipes/gpt.yaml +++ b/tests/test_utils/recipes/gpt.yaml @@ -5,7 +5,7 @@ loggers: [stdout] spec: name: "{test_case}" model: gpt - build: mcore-pyt + build: mcore-pyt-{environment} nodes: 1 gpus: 8 artifacts: @@ -21,15 +21,19 @@ spec: "TENSORBOARD_PATH={assets_dir}/tensorboard" "CHECKPOINT_PATH=/workspace/checkpoints" "TRAINING_SCRIPT_PATH=pretrain_gpt.py" - "TEST_CASE_PATH=./tests/functional_tests/test_cases/{model}/{test_case}" + "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + "N_REPEAT={n_repeat}" ) bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} products: - - scope: [mr] + - environment: [lts, dev] + scope: [mr] platforms: [dgx_a100] - time_limit: [12000] + time_limit: [1800] + n_repeat: [5] test_case: - gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G - gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G @@ -53,8 +57,7 @@ products: - gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G - gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G - gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G + - gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G @@ -63,26 +66,20 @@ products: - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dgx_a100_1N8G + # - gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G # torch >= 2.4.0 + - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dgx_a100_1N8G + - gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G @@ -104,20 +101,32 @@ products: - gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G - gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G - gpt3_mr_tp2_pp2_dgx_a100_1N8G - - scope: [nightly] + - gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G + - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G + - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G + - gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G + - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G + - gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G # cp and attention + - gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G # cp and attention + - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G # cp and attention + - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G # cp and attention + - gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G # cp and attention with a2a+p2p comm type + - gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G # cp and attention with a2a+p2p comm type + - environment: [lts, dev] + scope: [nightly] platforms: [dgx_a100] - time_limit: [12000] + time_limit: [3600] + n_repeat: [5] test_case: - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather + # - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te # torch >= 2.4.0 - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2 - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4 - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist - # - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel - # - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_te_2experts + # - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts # non-determinism - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1 - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist @@ -129,22 +138,29 @@ products: - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce - - gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_4experts - gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce - gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts - gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce - gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1 - gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce - gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch - - scope: [weekly] + - environment: [lts] + scope: [nightly] + platforms: [dgx_a100] + time_limit: [3600] + n_repeat: [5] + test_case: + - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel # non-determinism in dev + - environment: [lts, dev] + scope: [weekly] platforms: [dgx_h100] time_limit: [9000] test_case: - gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp + # - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp + # - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp + # - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp + # - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp diff --git a/tests/test_utils/recipes/multimodal-llava.yaml b/tests/test_utils/recipes/multimodal-llava.yaml new file mode 100644 index 000000000..3989ebeef --- /dev/null +++ b/tests/test_utils/recipes/multimodal-llava.yaml @@ -0,0 +1,49 @@ +type: basic +format_version: 1 +maintainers: [mcore] +loggers: [stdout] +launchers: + type:slurm: + ntasks_per_node: '{gpus}' +spec: + name: '{test_case}' + model: multimodal-llava + build: mcore-pyt-{environment} + nodes: 1 + gpus: 8 + platforms: dgx_a100 + time_limit: 1800 + scope: null + script: |- + ls + cd /opt/megatron-lm + + ARGUMENTS=( + "DATA_PATH='-'" + "DATA_CACHE_PATH='-'" + "OUTPUT_PATH={assets_dir}" + "TENSORBOARD_PATH={assets_dir}/tensorboard" + "CHECKPOINT_PATH=/workspace/checkpoints" + "TRAINING_SCRIPT_PATH=pretrain_vlm.py" + "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + "N_REPEAT={n_repeat}" + ) + + bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} + +products: + - environment: [lts, dev] + scope: [mr] + n_repeat: [5] + gpus: [8] + test_case: + - multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G + - multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G + - environment: [lts, dev] + scope: [mr] + n_repeat: [5] + gpus: [7] + test_case: + - multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G + - multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G diff --git a/tests/functional_tests/jet_recipes/t5.yaml b/tests/test_utils/recipes/t5.yaml similarity index 55% rename from tests/functional_tests/jet_recipes/t5.yaml rename to tests/test_utils/recipes/t5.yaml index dbbbc508d..e9583a3ed 100644 --- a/tests/functional_tests/jet_recipes/t5.yaml +++ b/tests/test_utils/recipes/t5.yaml @@ -5,7 +5,7 @@ loggers: [stdout] spec: name: "{test_case}" model: t5 - build: mcore-pyt + build: mcore-pyt-{environment} nodes: 1 gpus: 8 platforms: dgx_a100 @@ -22,29 +22,40 @@ spec: "TENSORBOARD_PATH={assets_dir}/tensorboard" "CHECKPOINT_PATH=/workspace/checkpoints" "TRAINING_SCRIPT_PATH=pretrain_t5.py" - "TEST_CASE_PATH=./tests/functional_tests/test_cases/{model}/{test_case}" + "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + "N_REPEAT={n_repeat}" ) bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} products: - - scope: [mr] - time_limit: [12000] + - environment: [lts, dev] + scope: [mr] + time_limit: [1800] + n_repeat: [5] test_case: - t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G - t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G - - t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G - t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G - - t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G - t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G - - t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G - t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G - - scope: [weekly] + - t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G + - t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G + - environment: [lts] + scope: [mr] + time_limit: [1800] + n_repeat: [5] + test_case: + - t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G + - environment: [lts, dev] + scope: [nightly] time_limit: [9000] + n_repeat: [1] test_case: - - t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch - - t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1 - - t5_220m_weekly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel - - t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1 - - t5_220m_weekly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch - - t5_220m_weekly_dgx_a100_1N8G_mcore_tp2_pp1_vp1 \ No newline at end of file + - t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch + - t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1 + - t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel + - t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1 + - t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch + - t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1 diff --git a/tests/test_utils/recipes/unit-tests.yaml b/tests/test_utils/recipes/unit-tests.yaml new file mode 100644 index 000000000..cda58d92e --- /dev/null +++ b/tests/test_utils/recipes/unit-tests.yaml @@ -0,0 +1,80 @@ +type: basic +format_version: 1 +maintainers: [mcore] +loggers: [stdout] +spec: + name: '{test_case}' + model: unit-tests + nodes: 1 + build: mcore-pyt-{environment} + gpus: 8 + platforms: dgx_h100 + script: |- + ls + + export TAG={tag} + export ENVIRONMENT={environment} + export BUCKET="{test_case}" + export UNIT_TEST_REPEAT={n_repeat} + export UNIT_TEST_TIMEOUT=10 + + set -euxo pipefail + + if [[ "$TAG" == "latest" ]]; then + TEST_PATH="/opt/megatron-lm" + else + TEST_PATH="/opt/megatron-lm-legacy/" + fi + + cd $TEST_PATH + + MARKER=() + if [[ "$TAG" == "legacy" ]]; then + MARKER+=("not internal") + fi + + if [[ "$ENVIRONMENT" == "lts" ]]; then + MARKER+=("not flaky") + fi + + if [[ "$ENVIRONMENT" == "dev" ]]; then + MARKER+=("not flaky_in_dev") + fi + + MARKER_ARG=$(printf "%s" "${{MARKER[0]}}") + for element in "${{MARKER[@]:1}}"; do + MARKER_ARG+=" and $element" + done + + IGNORE_TEST_CASES=$(cat /opt/megatron-lm/tests/test_utils/recipes/unit-tests.yaml | yq eval 'with(.products[].test_case; del(.[] | select(. == env(BUCKET)))) | .products[].test_case[]' | tr " " "\n") + IGNORE_ARGS=() + while IFS= read -r test_case; do + if [[ $test_case == *\** ]]; then + FILES=($(ls $test_case)) + echo ${{FILES[@]}} + for file in "${{FILES[@]}}"; do + IGNORE_ARGS+=("--ignore='$file'") + done + else + IGNORE_ARGS+=("--ignore=$test_case") + fi + done <<< "$IGNORE_TEST_CASES" + + for i in $(seq $UNIT_TEST_REPEAT); do + CMD=$(echo pytest -xvs --cov-report=term --cov-report=html --cov=megatron/core --no-cov-on-fail ${{IGNORE_ARGS[@]}} -m "'${{MARKER_ARG}}'" $BUCKET) + eval "$CMD" + done + +products: + - environment: [lts, dev] + tag: [latest, legacy] + scope: [unit-tests] + n_repeat: [1] + time_limit: [1800] + test_case: + - tests/unit_tests/data/ + - tests/unit_tests/dist_checkpointing/*.py + - tests/unit_tests/dist_checkpointing/models/ + - tests/unit_tests/transformer/*.py + - tests/unit_tests/transformer/moe + - tests/unit_tests diff --git a/tests/test_utils/shell_scripts/notify.sh b/tests/test_utils/shell_scripts/notify.sh new file mode 100644 index 000000000..ff4b40107 --- /dev/null +++ b/tests/test_utils/shell_scripts/notify.sh @@ -0,0 +1,215 @@ +set -euxo pipefail + +collect_jobs() { + DOWNSTREAM_PIPELINE_ID=$1 + PAGE=1 + PER_PAGE=100 + RESULTS="[]" + + while true; do + # Fetch the paginated results + RESPONSE=$( + curl \ + -s \ + --globoff \ + --header "PRIVATE-TOKEN: $RO_API_TOKEN" \ + "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${DOWNSTREAM_PIPELINE_ID}/jobs?page=$PAGE&per_page=$PER_PAGE" + ) + # Combine the results + RESULTS=$(jq -s '.[0] + .[1]' <<<"$RESULTS $RESPONSE") + + # Check if there are more pages + if [[ $(jq 'length' <<<"$RESPONSE") -lt $PER_PAGE ]]; then + break + fi + + # Increment the page number + PAGE=$((PAGE + 1)) + done + + echo "$RESULTS" +} + +CI_PIPELINE_ID=${1:-16595865} +ENVIRONMENT=${2} + +CI_PROJECT_ID=${CI_PROJECT_ID:-19378} + +# Fetch Elastic logs +set +x +PIPELINE_JSON=$( + curl \ + --fail \ + --silent \ + --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ + "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/bridges?per_page=100" +) || ret_code=$? +set -x +if [[ ${ret_code:-0} -ne 0 ]]; then + echo CI_PIPELINE_ID=$CI_PIPELINE_ID does not exist + exit 1 +fi + +# Fetch GitLab logs of JET downstream pipeline +DOWNSTREAM_PIPELINE_IDS=$(jq \ + -c --arg environment "$ENVIRONMENT" ' + .[] + | select(.name | startswith($environment)) + | { + id: .downstream_pipeline.id, + name: .name + } + ' <<<"$PIPELINE_JSON") + +PIPELINE_URL=https://${GITLAB_ENDPOINT}/ADLR/megatron-lm/-/pipelines/$CI_PIPELINE_ID +JOB_URL=https://${GITLAB_ENDPOINT}/ADLR/megatron-lm/-/jobs/ + +while IFS= read -r DOWNSTREAM_PIPELINE; do + + if [[ $DOWNSTREAM_PIPELINE == null ]]; then + FAILED_JOBS=$(curl \ + --fail \ + --silent \ + --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ + "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/jobs?per_page=100" | + jq --arg JOB_URL "$JOB_URL" '[.[] | select(.status == "failed") | ("<" + $JOB_URL + (.id | tostring) + "|" + .name + ">")] | join("\n• Job: ")' | tr -d '"') + curl \ + -X POST \ + -H "Content-type: application/json" \ + --data ' + { + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "<'$PIPELINE_URL'|Report of '$DATE' ('$CONTEXT')>:\n" + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "\n• Job: '"$FAILED_JOBS"'" + } + }, + ] + + }' \ + $WEBHOOK_URL + + else + DOWNSTREAM_PIPELINE_ID=$(echo $DOWNSTREAM_PIPELINE | jq '.id' | tr -d '"') + DOWNSTREAM_PIPELINE_NAME=$(echo $DOWNSTREAM_PIPELINE | jq '.name' | tr -d '"') + + set +x + JOBS=$(echo "$(collect_jobs $DOWNSTREAM_PIPELINE_ID)" | jq '[.[] | {id, name, status}]') + echo $JOBS + set -x + + FAILED_JOBS=$( + echo "$JOBS" | + jq --arg GITLAB_ENDPOINT "$GITLAB_ENDPOINT" '[ + .[] + | select(.status != "success") + | { + name, + id, + "url": ("https://" + $GITLAB_ENDPOINT + "/adlr/megatron-lm/-/jobs/" + (.id | tostring)), + } + ]' + ) + set -x + + for row in $(echo "${FAILED_JOBS}" | jq -r '.[] | @base64'); do + _jq() { + echo ${row} | base64 --decode | jq -r ${1} + } + JOB_ID=$(_jq '.id') + FULL_LOG=$(curl \ + --location \ + --header "PRIVATE-TOKEN: ${RO_API_TOKEN}" \ + "https://${GITLAB_ENDPOINT}/api/v4/projects/${CI_PROJECT_ID}/jobs/${JOB_ID}/trace") + + if [[ "$FULL_LOG" == *exception* ]]; then + LAST_EXCEPTION_POS=$(echo "$FULL_LOG" | grep -o -b 'exception' | tail -1 | cut -d: -f1) + SHORT_LOG=${FULL_LOG:$LAST_EXCEPTION_POS-500:499} + else + SHORT_LOG=${FULL_LOG: -1000} + fi + + FAILED_JOBS=$(echo "$FAILED_JOBS" | + jq \ + --argjson JOB_ID "$JOB_ID" \ + --arg SLURM_FAILURE "$SHORT_LOG" ' + .[] |= ((select(.id==$JOB_ID) += { + "slurm_failure_reason": $SLURM_FAILURE})) + ') + done + + NUM_FAILED=$(echo "$FAILED_JOBS" | jq 'length') + NUM_TOTAL=$(echo "$JOBS" | jq 'length') + _CONTEXT="$CONTEXT - $DOWNSTREAM_PIPELINE_NAME" + + if [[ $NUM_FAILED -eq 0 ]]; then + BLOCKS='[ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": ":doge3d: <'$PIPELINE_URL'|Report of '$DATE' ('$_CONTEXT')>: All '$NUM_TOTAL' passed" + } + } + ]' + else + BLOCKS=$( + echo "$FAILED_JOBS" | + jq --arg DATE "$DATE" --arg CONTEXT "$_CONTEXT" --arg URL "$PIPELINE_URL" --arg NUM_FAILED "$NUM_FAILED" --arg NUM_TOTAL "$NUM_TOTAL" ' + [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": (":doctorge: <" + $URL + "|Report of " + $DATE + " (" + $CONTEXT + ")>: " + $NUM_FAILED + " of " + $NUM_TOTAL + " failed") + } + } + ] + [ + .[] + | { + "type": "section", + "text": { + "type": "mrkdwn", + "text": ( + "• Job: <" +.url + "|" + .name + ">" + + "\n SLURM failure reason: \n```" + .slurm_failure_reason + "```" + + ) + } + } + ] + [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": ("===============================================") + } + } + ]' + ) + fi + + for row in $(echo "${BLOCKS}" | jq -r '.[] | @base64'); do + _jq() { + echo ${row} | base64 --decode + } + + curl \ + -X POST \ + -H "Content-type: application/json" \ + --data '{"blocks": '["$(_jq)"]'}' \ + $WEBHOOK_URL + done + + fi + +done <<<"$DOWNSTREAM_PIPELINE_IDS" diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py index 787dd48c7..4833b30e3 100644 --- a/tests/unit_tests/conftest.py +++ b/tests/unit_tests/conftest.py @@ -1,16 +1,35 @@ -import gc -import sys +import os from pathlib import Path -from unittest import mock import pytest import torch +import torch.distributed -from megatron.core.dist_checkpointing.strategies.base import StrategyAction, get_default_strategy +from megatron.core.utils import is_te_min_version from tests.unit_tests.dist_checkpointing import TempNamedDir from tests.unit_tests.test_utilities import Utils +def pytest_sessionfinish(session, exitstatus): + if exitstatus == 5: + session.exitstatus = 0 + + +@pytest.fixture(scope="session", autouse=True) +def cleanup(): + yield + if torch.distributed.is_initialized(): + torch.distributed.barrier() + torch.distributed.destroy_process_group() + + +@pytest.fixture(scope="function", autouse=True) +def set_env(): + if is_te_min_version("1.3"): + os.environ['NVTE_FLASH_ATTN'] = '0' + os.environ['NVTE_FUSED_ATTN'] = '0' + + @pytest.fixture(scope="session") def tmp_path_dist_ckpt(tmp_path_factory) -> Path: """Common directory for saving the checkpoint. diff --git a/tests/unit_tests/data/test_bin_reader.py b/tests/unit_tests/data/test_bin_reader.py index b8b6ec5dd..0485d130e 100644 --- a/tests/unit_tests/data/test_bin_reader.py +++ b/tests/unit_tests/data/test_bin_reader.py @@ -90,6 +90,7 @@ class _LocalClientError(Exception): @pytest.mark.flaky +@pytest.mark.flaky_in_dev def test_bin_reader(): with tempfile.TemporaryDirectory() as temp_dir: # set the default nltk data path diff --git a/tests/unit_tests/data/test_gpt_dataset.py b/tests/unit_tests/data/test_gpt_dataset.py index 817ea227f..42a8532b7 100644 --- a/tests/unit_tests/data/test_gpt_dataset.py +++ b/tests/unit_tests/data/test_gpt_dataset.py @@ -26,7 +26,6 @@ def sample_N(dataset, N, randomize): return samples -@pytest.mark.flaky def test_mock_gpt_dataset(): if torch.distributed.is_available(): Utils.initialize_distributed() diff --git a/tests/unit_tests/data/test_preprocess_data.py b/tests/unit_tests/data/test_preprocess_data.py index 4eca14e58..faf54efa8 100644 --- a/tests/unit_tests/data/test_preprocess_data.py +++ b/tests/unit_tests/data/test_preprocess_data.py @@ -183,7 +183,6 @@ def gpt2_merge(odir): return path -@pytest.mark.flaky def test_preprocess_data_gpt(): with tempfile.TemporaryDirectory() as temp_dir: @@ -215,6 +214,7 @@ def bert_vocab(odir): @pytest.mark.flaky +@pytest.mark.flaky_in_dev def test_preprocess_data_bert(): with tempfile.TemporaryDirectory() as temp_dir: diff --git a/tests/unit_tests/dist_checkpointing/conftest.py b/tests/unit_tests/dist_checkpointing/conftest.py index 83cbc684f..3702ac5ed 100644 --- a/tests/unit_tests/dist_checkpointing/conftest.py +++ b/tests/unit_tests/dist_checkpointing/conftest.py @@ -5,6 +5,11 @@ from megatron.core.dist_checkpointing.strategies.base import StrategyAction, get_default_strategy +def pytest_sessionfinish(session, exitstatus): + if exitstatus == 5: + session.exitstatus = 0 + + @pytest.fixture(scope='session', autouse=True) def set_default_dist_ckpt_strategy(): def get_pyt_dist_save_sharded_strategy(): diff --git a/tests/unit_tests/dist_checkpointing/models/common.py b/tests/unit_tests/dist_checkpointing/models/common.py index 4b908ba3f..30097b70b 100644 --- a/tests/unit_tests/dist_checkpointing/models/common.py +++ b/tests/unit_tests/dist_checkpointing/models/common.py @@ -59,15 +59,17 @@ def common_test_parallel_reconfiguration_e2e( use_fpsl, load_order="tp-dp-pp", store_order="tp-dp-pp", + src_tp_pp_kwargs=None, + dst_tp_pp_kwargs=None, ): """Test model saving and loading with different TP/PP""" + Utils.initialize_model_parallel(*src_tp_pp, **(src_tp_pp_kwargs or {}), order=load_order) with TempNamedDir( tmp_path_dist_ckpt / 'test_gpt_model_reconfiguration_model_A' ) as ckpt_dir_A, TempNamedDir( tmp_path_dist_ckpt / 'test_gpt_model_reconfiguration_model_B' ) as ckpt_dir_B: # Save checkpoint A - Utils.initialize_model_parallel(*src_tp_pp, order=load_order) gpt_model_A = initialize_model_fn( 1, src_layer_spec_fn, @@ -87,7 +89,7 @@ def common_test_parallel_reconfiguration_e2e( # Load checkpoint A with different TP/PP and save as checkpoint B # No FPS this time, only FPL - Utils.initialize_model_parallel(*dest_tp_pp, order=store_order) + Utils.initialize_model_parallel(*dest_tp_pp, **(dst_tp_pp_kwargs or {}), order=store_order) gpt_model_B = initialize_model_fn( 2, dst_layer_spec_fn, diff --git a/tests/unit_tests/dist_checkpointing/models/test_mamba.py b/tests/unit_tests/dist_checkpointing/models/test_mamba.py index 6bdcd9b82..94a57984d 100644 --- a/tests/unit_tests/dist_checkpointing/models/test_mamba.py +++ b/tests/unit_tests/dist_checkpointing/models/test_mamba.py @@ -14,12 +14,12 @@ FullyParallelLoadStrategyWrapper, FullyParallelSaveStrategyWrapper, ) -from megatron.core.ssm.mamba_mixer import MambaMixer, MambaMixerSubmodules -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.custom_layers.transformer_engine import ( +from megatron.core.extensions.transformer_engine import ( TELayerNormColumnParallelLinear, TERowParallelLinear, ) +from megatron.core.ssm.mamba_mixer import MambaMixer, MambaMixerSubmodules +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed from megatron.core.transformer.transformer_config import TransformerConfig from tests.unit_tests.dist_checkpointing import TempNamedDir from tests.unit_tests.test_utilities import Utils @@ -74,7 +74,6 @@ class TestMambaReconfiguration: # (False, (1, 1, 4), (8, 1, 1), True), ], ) - @pytest.mark.flaky def test_parallel_reconfiguration_e2e( self, tmp_path_dist_ckpt, src_tp_pp_exp, dest_tp_pp_exp, use_glu, use_fpsl ): diff --git a/tests/unit_tests/dist_checkpointing/models/test_moe_experts.py b/tests/unit_tests/dist_checkpointing/models/test_moe_experts.py index 4a8f153ed..e5e3ac98b 100644 --- a/tests/unit_tests/dist_checkpointing/models/test_moe_experts.py +++ b/tests/unit_tests/dist_checkpointing/models/test_moe_experts.py @@ -2,6 +2,7 @@ import pytest import torch +from transformer_engine.pytorch.fp8 import check_fp8_support, fp8_autocast from megatron.core import parallel_state from megatron.core.dist_checkpointing import load, load_plain_tensors, save @@ -22,8 +23,10 @@ from tests.unit_tests.dist_checkpointing import TempNamedDir from tests.unit_tests.test_utilities import Utils +fp8_available, reason_for_no_fp8 = check_fp8_support() -def initialize_expert_layer(seed, glu=True, expert_type='sequential', **config_kwargs): + +def initialize_expert_layer(seed, glu=True, expert_type='sequential', fp8=False, **config_kwargs): torch.manual_seed(seed) model_parallel_cuda_manual_seed(seed) @@ -32,7 +35,7 @@ def initialize_expert_layer(seed, glu=True, expert_type='sequential', **config_k num_local_experts = num_moe_experts // parallel_state.get_expert_model_parallel_world_size() default_config_kwargs = dict( num_layers=pp_size, - hidden_size=12, + hidden_size=16, num_attention_heads=4, num_moe_experts=num_moe_experts, use_cpu_initialization=True, @@ -41,7 +44,7 @@ def initialize_expert_layer(seed, glu=True, expert_type='sequential', **config_k default_config_kwargs.update(**config_kwargs) transformer_config = TransformerConfig(**default_config_kwargs) transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( - num_experts=num_moe_experts, moe_grouped_gemm=(expert_type != 'sequential') + num_experts=num_moe_experts, moe_grouped_gemm=(expert_type != 'sequential'), fp8=fp8 ) if expert_type == 'grouped': model = GroupedMLP(num_local_experts, transformer_config) @@ -84,37 +87,63 @@ def teardown_method(self, method): Utils.destroy_model_parallel() @pytest.mark.parametrize( - "use_fpsl,src_tp_pp_exp,dest_tp_pp_exp,use_glu", + "use_fpsl,src_tp_pp_ep_etp,dest_tp_pp_ep_etp,use_glu", [ # changing PP is impossible because the number of layers must be the same - (False, (2, 4, 1), (2, 4, 1), False), - (True, (2, 4, 1), (2, 4, 1), False), - (False, (1, 1, 1), (1, 1, 1), False), - (True, (1, 1, 1), (1, 1, 4), False), - (False, (1, 1, 8), (1, 1, 2), False), - (False, (2, 2, 2), (4, 2, 1), False), - (True, (1, 1, 4), (8, 1, 1), False), - (False, (1, 8, 1), (1, 8, 1), False), - (False, (1, 1, 4), (2, 1, 1), False), - (False, (1, 1, 1), (1, 1, 1), True), - (False, (1, 1, 1), (1, 1, 4), True), - (True, (1, 1, 1), (2, 1, 1), True), - (False, (1, 1, 4), (8, 1, 1), True), + (False, (2, 4, 1, 2), (2, 4, 1, 2), False), + (True, (2, 4, 1, 2), (2, 4, 1, 2), False), + (False, (2, 4, 1, 2), (1, 4, 1, 2), False), + (True, (2, 1, 1, 2), (1, 1, 1, 2), False), + (False, (1, 1, 1, 1), (1, 1, 1, 1), False), + (True, (1, 1, 1, 1), (1, 1, 4, 1), False), + (False, (1, 1, 8, 1), (1, 1, 2, 1), False), + (False, (2, 2, 2, 2), (4, 2, 1, 4), False), + (True, (1, 1, 4, 1), (8, 1, 1, 1), False), + (False, (1, 8, 1, 1), (1, 8, 1, 1), False), + (False, (1, 1, 4, 1), (2, 1, 1, 2), False), + (False, (2, 1, 4, 1), (2, 1, 1, 4), False), + (False, (1, 1, 1, 1), (1, 1, 1, 1), True), + (False, (1, 1, 1, 1), (1, 1, 4, 1), True), + (True, (1, 1, 1, 1), (2, 1, 1, 1), True), + (False, (1, 1, 4, 1), (8, 1, 1, 8), True), ], ) @pytest.mark.parametrize("expert_type", expert_type) + @pytest.mark.parametrize( + "load_order,store_order", + [ + ("tp-ep-dp-pp", "tp-ep-dp-pp"), + # ("tp-ep-dp-pp", "ep-tp-dp-pp"), + # ("ep-tp-dp-pp", "ep-tp-dp-pp"), + # ("ep-tp-dp-pp", "tp-ep-dp-pp"), + ], + ) def test_parallel_reconfiguration_e2e( - self, tmp_path_dist_ckpt, src_tp_pp_exp, dest_tp_pp_exp, use_glu, use_fpsl, expert_type + self, + tmp_path_dist_ckpt, + src_tp_pp_ep_etp, + dest_tp_pp_ep_etp, + use_glu, + use_fpsl, + expert_type, + load_order, + store_order, ): - """Test model saving and loading with different TP/PP/expert parallelism""" - src_tp, src_pp, src_exp = src_tp_pp_exp - dest_tp, dest_pp, dest_exp = dest_tp_pp_exp + """Test model saving and loading with different TP/PP/EP/ETP(expert-tensor-parallel)""" + src_tp, src_pp, src_ep, src_etp = src_tp_pp_ep_etp + dest_tp, dest_pp, dest_ep, dest_etp = dest_tp_pp_ep_etp if expert_type == 'grouped': add_bias_linear = False else: add_bias_linear = True # Save checkpoint A - Utils.initialize_model_parallel(src_tp, src_pp, expert_model_parallel_size=src_exp) + Utils.initialize_model_parallel( + src_tp, + src_pp, + expert_model_parallel_size=src_ep, + expert_tensor_parallel_size=src_etp, + order=store_order, + ) with TempNamedDir( tmp_path_dist_ckpt / 'test_expert_layer_reconfiguration_model_A' ) as ckpt_dir_A, TempNamedDir( @@ -135,9 +164,15 @@ def test_parallel_reconfiguration_e2e( save(sharded_state_dict, ckpt_dir_A, save_strategy) Utils.destroy_model_parallel() - # Load checkpoint A with different TP/PP/expert and save as checkpoint B + # Load checkpoint A with different TP/PP/EP and save as checkpoint B # No FPS this time, only FPL - Utils.initialize_model_parallel(dest_tp, dest_pp, expert_model_parallel_size=dest_exp) + Utils.initialize_model_parallel( + dest_tp, + dest_pp, + expert_model_parallel_size=dest_ep, + expert_tensor_parallel_size=dest_etp, + order=load_order, + ) model_B = initialize_expert_layer( 1, use_glu, expert_type, add_bias_linear=add_bias_linear ) @@ -230,3 +265,113 @@ def test_sequential_grouped_mlp_interchangeable( diffs = diff(state_dict_A, state_dict_B) assert not any(map(bool, diffs)), diffs Utils.destroy_model_parallel() + + @pytest.mark.skipif( + not is_te_min_version("1.11.0"), + reason="FP8 support of TEGroupedMLP is only available in TE 1.11.0 and later.", + ) + @pytest.mark.skipif(not fp8_available, reason=reason_for_no_fp8) + @pytest.mark.parametrize( + "src_module,dst_module,src_tp_pp_exp,dest_tp_pp_exp", + [ + # Changing tp/pp/dp doesn't affect _extra_state + ('sequential', 'te_grouped', (1, 1, 1), (1, 1, 4)), + ('sequential', 'te_grouped', (1, 1, 4), (1, 1, 1)), + ('te_grouped', 'sequential', (1, 1, 1), (1, 1, 4)), + ('te_grouped', 'sequential', (1, 1, 4), (1, 1, 1)), + ], + ) + def test_sequential_grouped_mlp_extra_state( + self, tmp_path_dist_ckpt, src_tp_pp_exp, dest_tp_pp_exp, src_module, dst_module + ): + """Test saving and loading _extra_state""" + src_tp, src_pp, src_exp = src_tp_pp_exp + dest_tp, dest_pp, dest_exp = dest_tp_pp_exp + use_glu = True + Utils.initialize_model_parallel(src_tp, src_pp, expert_model_parallel_size=src_exp) + with TempNamedDir( + tmp_path_dist_ckpt / 'test_grouped_mlp_extra_state_model_A' + ) as ckpt_dir_A, TempNamedDir( + tmp_path_dist_ckpt / 'test_grouped_mlp_extra_state_model_B' + ) as ckpt_dir_B, fp8_autocast(): + tokens_per_expert = torch.tensor([16] * (8 // src_exp)) + input_tensor = torch.randn(tokens_per_expert.sum(), 16, device="cuda") + + # Save checkpoint A + model_A = initialize_expert_layer(1, use_glu, expert_type=src_module, fp8=True) + model_A = model_A.cuda() + # fp8 meta is initialized at the first step + model_A(input_tensor, tokens_per_expert) + sharded_state_dict = model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()) + + save_strategy = get_default_save_sharded_strategy() + save(sharded_state_dict, ckpt_dir_A, save_strategy) + Utils.destroy_model_parallel() + + Utils.initialize_model_parallel(dest_tp, dest_pp, expert_model_parallel_size=dest_exp) + load_strategy = None + + # model_A load checkpoint A + model_A = initialize_expert_layer(1, use_glu, expert_type=src_module, fp8=True) + model_A = model_A.cuda() + state_dict = load( + model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()), + ckpt_dir_A, + load_strategy, + ) + model_A.load_state_dict(state_dict) + + # model_B load checkpoint A + model_B = initialize_expert_layer(1, use_glu, expert_type=dst_module, fp8=True) + model_B = model_B.cuda() + state_dict = load( + model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), + ckpt_dir_A, + load_strategy, + ) + model_B.load_state_dict(state_dict) + + # Should be bitwise equal + if src_module == "te_grouped": + model_A, model_B = model_B, model_A + torch.testing.assert_close( + torch.cat( + [ + model_A.local_experts[i] + .linear_fc1.fp8_meta["scaling_fwd"] + .amax_history.view(-1, 1) + for i in range(8 // dest_exp) + ], + dim=1, + ).view(1024, -1), + model_B.linear_fc1.fp8_meta["scaling_fwd"].amax_history, + rtol=0, + atol=0, + ) + + Utils.destroy_model_parallel() + + @pytest.mark.skipif( + not is_te_min_version("1.9.0"), + reason="TEGroupedMLP is only supported in TE 1.9.0 and later.", + ) + @pytest.mark.parametrize("ep_size", [1, 2]) + def test_te_grouped_linear_torch_native(self, tmp_path_dist_ckpt, ep_size): + """Test saving and loading torch native checkpoints""" + use_glu = True + Utils.initialize_model_parallel(1, 1, expert_model_parallel_size=ep_size) + with TempNamedDir(tmp_path_dist_ckpt / 'test_te_grouped_linear_torch_native') as ckpt_dir: + tokens_per_expert = torch.tensor([16] * (8 // ep_size)) + input_tensor = torch.randn(tokens_per_expert.sum(), 16, device="cuda") + + # Save checkpoint + model = initialize_expert_layer(1, use_glu, expert_type="te_grouped") + model = model.cuda() + model(input_tensor, tokens_per_expert) + torch.save(model.state_dict(), ckpt_dir / f"model_ep{torch.distributed.get_rank()}.pt") + + # Load checkpoint + state_dict = torch.load(ckpt_dir / f"model_ep{torch.distributed.get_rank()}.pt") + model.load_state_dict(state_dict) + + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/dist_checkpointing/models/test_t5_model.py b/tests/unit_tests/dist_checkpointing/models/test_t5_model.py index 07c9f8676..57e1cdb90 100644 --- a/tests/unit_tests/dist_checkpointing/models/test_t5_model.py +++ b/tests/unit_tests/dist_checkpointing/models/test_t5_model.py @@ -4,7 +4,7 @@ import torch from megatron.core import parallel_state as ps -from megatron.core.dist_checkpointing import load, load_plain_tensors, save +from megatron.core.dist_checkpointing import load, save from megatron.core.dist_checkpointing.validation import StrictHandling from megatron.core.models.retro.decoder_spec import ( get_retro_decoder_layer_local_spec, @@ -27,13 +27,39 @@ from megatron.core.transformer.transformer_block import TransformerBlockSubmodules from megatron.core.transformer.transformer_config import TransformerConfig from tests.unit_tests.dist_checkpointing import TempNamedDir +from tests.unit_tests.dist_checkpointing.models.common import ( + common_test_parallel_reconfiguration_e2e, +) from tests.unit_tests.test_utilities import Utils -def initialize_t5_model(seed, encoder_spec_fn, decoder_spec_fn, num_layers=2, **config_kwargs): +def initialize_t5_model(seed, encoder_decoder_spec_fn, num_layers=8, **config_kwargs): + encoder_spec_fn, decoder_spec_fn = encoder_decoder_spec_fn torch.manual_seed(seed) model_parallel_cuda_manual_seed(seed) + if ps.get_pipeline_model_parallel_decoder_start() is None: + encoder_layers_per_pipeline = num_layers // ps.get_pipeline_model_parallel_world_size() + decoder_layers_per_pipeline = num_layers // ps.get_pipeline_model_parallel_world_size() + pre_process = ps.is_pipeline_first_stage() + post_process = ps.is_pipeline_last_stage() + add_encoder = None + add_decoder = None + else: + encoder_layers_per_pipeline = num_layers // ps.get_pipeline_model_parallel_decoder_start() + decoder_layers_per_pipeline = num_layers // ( + ps.get_pipeline_model_parallel_world_size() + - ps.get_pipeline_model_parallel_decoder_start() + ) + + rank = ps.get_pipeline_model_parallel_rank() + first_decoder_rank = ps.get_pipeline_model_parallel_decoder_start() + world_size = ps.get_pipeline_model_parallel_world_size() + pre_process = rank == 0 or rank == first_decoder_rank + post_process = (rank == (first_decoder_rank - 1)) or (rank == (world_size - 1)) + add_encoder = ps.is_inside_encoder() + add_decoder = ps.is_inside_decoder() + default_config_kwargs = dict( num_layers=num_layers, hidden_size=16, @@ -45,20 +71,20 @@ def initialize_t5_model(seed, encoder_spec_fn, decoder_spec_fn, num_layers=2, ** ) default_config_kwargs.update(**config_kwargs) transformer_config = TransformerConfig(**default_config_kwargs) - pre_process = ps.is_pipeline_first_stage() - post_process = ps.is_pipeline_last_stage() - en_block_spec = TransformerBlockSubmodules([encoder_spec_fn()] * num_layers) - de_block_spec = TransformerBlockSubmodules([decoder_spec_fn()] * num_layers) + en_block_spec = TransformerBlockSubmodules([encoder_spec_fn()] * encoder_layers_per_pipeline) + de_block_spec = TransformerBlockSubmodules([decoder_spec_fn()] * decoder_layers_per_pipeline) model = T5Model( encoder_config=transformer_config, config=transformer_config, transformer_encoder_layer_spec=en_block_spec, transformer_decoder_layer_spec=de_block_spec, - pre_process=False, - post_process=False, vocab_size=29184, max_sequence_length=4, + pre_process=pre_process, + post_process=post_process, + add_encoder=add_encoder, + add_decoder=add_decoder, ) with torch.no_grad(): @@ -90,18 +116,18 @@ def test_sharded_state_dict_save_load( 'retro': (get_retro_encoder_layer_local_spec, get_retro_decoder_layer_local_spec), }, } - src_encoder_spec_fn, src_decoder_spec_fn = enc_dec_spec_fn[src_spec_type][model_type] - dst_encoder_spec_fn, dst_decoder_spec_fn = enc_dec_spec_fn[dst_spec_type][model_type] + src_encoder_decoder_spec_fn = enc_dec_spec_fn[src_spec_type][model_type] + dst_encoder_decoder_spec_fn = enc_dec_spec_fn[dst_spec_type][model_type] Utils.initialize_model_parallel(1, 1) - gpt_model = initialize_t5_model(1, src_encoder_spec_fn, src_decoder_spec_fn) + gpt_model = initialize_t5_model(1, src_encoder_decoder_spec_fn) with TempNamedDir(tmp_path_dist_ckpt / 'test_gpt_model') as ckpt_dir: # Save sharded_state_dict = gpt_model.sharded_state_dict() save(sharded_state_dict, ckpt_dir) # Load - gpt_model = initialize_t5_model(2, dst_encoder_spec_fn, dst_decoder_spec_fn) + gpt_model = initialize_t5_model(2, dst_encoder_decoder_spec_fn) sharded_state_dict = gpt_model.sharded_state_dict() state_dict, missing_keys, unexpected_keys = load( @@ -113,3 +139,84 @@ def test_sharded_state_dict_save_load( gpt_model.load_state_dict(state_dict) Utils.destroy_model_parallel() + + +class TestT5ModelReconfiguration: + + # def teardown_method(self, method): + # Utils.destroy_model_parallel() + + @pytest.mark.parametrize('src_spec_type', ['local']) # ['te', 'local']) + @pytest.mark.parametrize('dst_spec_type', ['local']) # ['te', 'local']) + @pytest.mark.parametrize('model_type', ['t5']) + @pytest.mark.parametrize( + ('use_fpsl', 'src_tp_pp_encpp', 'dest_tp_pp_encpp'), + [ + (False, (1, 1, None), (1, 1, None)), + (False, (1, 1, 1), (1, 1, 1)), + (False, (2, 1, 1), (2, 1, 1)), + (False, (2, 2, 2), (2, 2, 2)), + (True, (2, 2, 2), (2, 2, 2)), + (True, (2, 1, 1), (1, 2, 2)), + ], + ) + def test_parallel_reconfiguration_e2e( + self, + tmp_path_dist_ckpt, + src_tp_pp_encpp, + dest_tp_pp_encpp, + use_fpsl, + src_spec_type, + dst_spec_type, + model_type, + ): + """Test model saving and loading with different TP/PP""" + + *src_tp_pp, src_encpp = src_tp_pp_encpp + *dest_tp_pp, dst_encpp = dest_tp_pp_encpp + + enc_dec_spec_fn = { + 'te': { + 't5': (t5_encoder_te_spec, t5_decoder_te_spec), + 'retro': (get_retro_encoder_layer_te_spec, get_retro_decoder_layer_te_spec), + }, + 'local': { + 't5': (t5_encoder_local_spec, t5_decoder_local_spec), + 'retro': (get_retro_encoder_layer_local_spec, get_retro_decoder_layer_local_spec), + }, + } + + common_test_parallel_reconfiguration_e2e( + initialize_t5_model, + tmp_path_dist_ckpt, + src_tp_pp, + dest_tp_pp, + enc_dec_spec_fn[src_spec_type][model_type], + enc_dec_spec_fn[dst_spec_type][model_type], + use_fpsl, + src_tp_pp_kwargs=dict(encoder_pipeline_model_parallel_size=src_encpp), + dst_tp_pp_kwargs=dict(encoder_pipeline_model_parallel_size=dst_encpp), + ) + + def test_pipeline_parallel_setup(self): + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=1, + encoder_pipeline_model_parallel_size=1, + ) + assert ps.get_pipeline_model_parallel_world_size() == 2 + assert ps.get_pipeline_model_parallel_rank() == Utils.rank // 4 + + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=1, + encoder_pipeline_model_parallel_size=3, + ) + assert ps.get_pipeline_model_parallel_world_size() == 4 + assert ps.get_pipeline_model_parallel_rank() == Utils.rank // 2 + + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, pipeline_model_parallel_size=2 + ) + assert ps.get_pipeline_model_parallel_world_size() == 2 + assert ps.get_pipeline_model_parallel_rank() == Utils.rank // 4 diff --git a/tests/unit_tests/dist_checkpointing/test_fp8.py b/tests/unit_tests/dist_checkpointing/test_fp8.py index d2dcb367c..a93f263d5 100644 --- a/tests/unit_tests/dist_checkpointing/test_fp8.py +++ b/tests/unit_tests/dist_checkpointing/test_fp8.py @@ -51,7 +51,6 @@ def get_ten(dtype: str = 'fp8'): (False, (2, 4), (2, 4), None), ], ) - @pytest.mark.flaky def test_fp8_save_load( self, tmp_path_dist_ckpt, use_fpsl, src_tp_pp, dest_tp_pp, load_exchange_algo ): diff --git a/tests/unit_tests/dist_checkpointing/test_fully_parallel.py b/tests/unit_tests/dist_checkpointing/test_fully_parallel.py index 623e37d6b..a383bd3ef 100644 --- a/tests/unit_tests/dist_checkpointing/test_fully_parallel.py +++ b/tests/unit_tests/dist_checkpointing/test_fully_parallel.py @@ -282,6 +282,7 @@ def test_load_distribution(self, parallelization_along_dp, tmp_path_dist_ckpt): @pytest.mark.parametrize('state_dict_device', ['cpu', 'cuda']) @pytest.mark.flaky + @pytest.mark.flaky_in_dev def test_memory_usage(self, state_dict_device, tmp_path_dist_ckpt): Utils.initialize_model_parallel(2, 1) diff --git a/tests/unit_tests/dist_checkpointing/test_local.py b/tests/unit_tests/dist_checkpointing/test_local.py index e4dfc6f8e..69919feda 100644 --- a/tests/unit_tests/dist_checkpointing/test_local.py +++ b/tests/unit_tests/dist_checkpointing/test_local.py @@ -61,7 +61,8 @@ def teardown_method(self, method): Utils.destroy_model_parallel() @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) - def test_sharded_tensors(self, tp, pp): + @pytest.mark.parametrize(('use_torch_fsdp2'), [True, False]) + def test_sharded_tensors(self, tp, pp, use_torch_fsdp2): Utils.initialize_model_parallel(tp, pp) num_floating_point_operations_so_far = 0 model, optimizer = setup_model_and_optimizer(1, tp, pp) @@ -73,6 +74,7 @@ def test_sharded_tensors(self, tp, pp): mock_args = SimpleNamespace() mock_args.no_save_optim = False mock_args.no_save_rng = True + mock_args.use_torch_fsdp2 = use_torch_fsdp2 # Test save_local state_dict = generate_state_dict( mock_args, diff --git a/tests/unit_tests/dist_checkpointing/test_nonpersistent.py b/tests/unit_tests/dist_checkpointing/test_nonpersistent.py index 346751e26..89e609af7 100644 --- a/tests/unit_tests/dist_checkpointing/test_nonpersistent.py +++ b/tests/unit_tests/dist_checkpointing/test_nonpersistent.py @@ -29,7 +29,6 @@ def teardown_method(self, method): Utils.destroy_model_parallel() @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) - @pytest.mark.flaky def test_basic_save_load_scenarios(self, tmp_path_dist_ckpt, tp, pp): Utils.initialize_model_parallel(tp, pp) num_floating_point_operations_so_far = 0 @@ -118,7 +117,6 @@ def test_basic_save_load_scenarios(self, tmp_path_dist_ckpt, tp, pp): class TestLegacySaveAndLoad: @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) - @pytest.mark.flaky def test_basic_save_load_scenario(self, tmp_path_dist_ckpt, tp, pp): Utils.initialize_model_parallel(tp, pp) num_floating_point_operations_so_far = 0 diff --git a/tests/unit_tests/dist_checkpointing/test_optimizer.py b/tests/unit_tests/dist_checkpointing/test_optimizer.py index 19d1ee9e8..ab43cc4f4 100644 --- a/tests/unit_tests/dist_checkpointing/test_optimizer.py +++ b/tests/unit_tests/dist_checkpointing/test_optimizer.py @@ -178,7 +178,6 @@ def teardown_method(self, method): # ((2, 1), 2, 2), ], ) - @pytest.mark.flaky def test_dp_sharding(self, tmp_path_dist_ckpt, tp_pp, src_dp, dest_dp, use_fpsl, initialize_fn): src_world_size = tp_pp[0] * tp_pp[1] * src_dp dest_world_size = tp_pp[0] * tp_pp[1] * dest_dp @@ -256,7 +255,6 @@ def test_dp_sharding(self, tmp_path_dist_ckpt, tp_pp, src_dp, dest_dp, use_fpsl, ('src_tp_pp', 'dest_tp_pp', 'use_glu'), [((2, 2), (2, 4), False), ((1, 8), (4, 1), True), ((2, 4), (4, 2), False)], ) - @pytest.mark.flaky def test_finetune_doesnt_load_optimizer( self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp, use_glu ): @@ -281,6 +279,8 @@ def test_finetune_doesnt_load_optimizer( Utils.destroy_model_parallel() Utils.initialize_model_parallel(*dest_tp_pp) + mock_args.tensor_model_parallel_size = dest_tp_pp[0] + mock_args.pipeline_model_parallel_size = dest_tp_pp[1] model, optimizer = setup_model_and_optimizer( seed=3, tp=dest_tp_pp[0], @@ -293,7 +293,10 @@ def test_finetune_doesnt_load_optimizer( # Load with different TPxPP should raise DistributeOptimizer error with pytest.raises(RuntimeError) as exc_info: load_checkpoint_no_arg_checks(model, optimizer, None) - assert "(TP, PP) mismatch" in str(exc_info.value) + # "(TP, PP) mismatch" check is for backwards compatibility tests + assert "(TP, PP) mismatch" in str( + exc_info.value + ) or "(TP, PP, encoder TP, encoder PP) mismatch" in str(exc_info.value) # Check that the state didn't change assert not any(diff(model[0].state_dict(), model_unloaded_state_dict)) @@ -329,7 +332,6 @@ def test_finetune_doesnt_load_optimizer( assert not diffs[0] and not diffs[1] and diffs[2] assert not any(diff(optimizer.state_dict(), optim_unloaded_state_dict)) - @pytest.mark.flaky def test_can_load_deprecated_bucket_space_format(self, tmp_path_dist_ckpt): # sync=True to make sure other ranks wait for rank 0 to finish creating directory. tp = 4 @@ -398,9 +400,18 @@ def teardown_method(self, method): @pytest.mark.parametrize( ('src_tp_pp', 'dest_tp_pp'), [((2, 4), (2, 4)), ((2, 4), (4, 2)), ((8, 1), (1, 2))] ) - @pytest.mark.flaky def test_fp32_optimizer_resharding(self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp): # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + + def preprocess_fn(optim_common_dict): + import copy + + preprocessed_optimzier_common_dict = copy.deepcopy(optim_common_dict) + list = preprocessed_optimzier_common_dict['optimizer']['param_groups'] + for dict_item in list: + del dict_item['wd_mult'] + return preprocessed_optimzier_common_dict + Utils.initialize_model_parallel(*src_tp_pp) with TempNamedDir( tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_A', sync=True @@ -417,7 +428,11 @@ def test_fp32_optimizer_resharding(self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_ bf16=False, ) - save(optimizer_A.sharded_state_dict(model_A[0].sharded_state_dict()), ckpt_dir_A) + save( + optimizer_A.sharded_state_dict(model_A[0].sharded_state_dict()), + ckpt_dir_A, + preprocess_common_before_consistancy_check=preprocess_fn, + ) Utils.destroy_model_parallel() # Load checkpoint A with different TP/PP and save as checkpoint B @@ -465,7 +480,6 @@ def teardown_method(self, method): ('src_tp_pp', 'dest_tp_pp'), [((2, 4), (2, 4)), ((2, 4), (2, 2)), ((2, 4), (4, 2)), ((8, 1), (1, 2))], ) - @pytest.mark.flaky def test_optimizer_resharding( self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp, use_dist_opt, bf16 ): diff --git a/tests/unit_tests/dist_checkpointing/test_serialization.py b/tests/unit_tests/dist_checkpointing/test_serialization.py index 19e99de55..e59896c92 100644 --- a/tests/unit_tests/dist_checkpointing/test_serialization.py +++ b/tests/unit_tests/dist_checkpointing/test_serialization.py @@ -2,14 +2,24 @@ import io import logging +import os import numpy as np import pytest import torch from torch.distributed.checkpoint import CheckpointException as PyTCheckpointingException +from torch.distributed.checkpoint import FileSystemReader + +try: + from torch.distributed import DeviceMesh + from torch.distributed._tensor import DTensor + + HAVE_DTENSOR = True +except ImportError: + HAVE_DTENSOR = False from megatron.core import parallel_state -from megatron.core.dist_checkpointing import ShardedTensor, load, save +from megatron.core.dist_checkpointing import ShardedTensor, load, remove_sharded_tensors, save from megatron.core.dist_checkpointing.core import CheckpointingException, maybe_load_config from megatron.core.dist_checkpointing.dict_utils import diff from megatron.core.dist_checkpointing.mapping import ShardedObject, ShardedTensorFactory @@ -18,7 +28,9 @@ load_tensors_metadata, ) from megatron.core.dist_checkpointing.strategies.base import StrategyAction, get_default_strategy +from megatron.core.dist_checkpointing.strategies.torch import TorchDistSaveShardedStrategy from megatron.core.dist_checkpointing.validation import StrictHandling +from megatron.core.utils import is_torch_min_version from tests.unit_tests.dist_checkpointing import TempNamedDir from tests.unit_tests.test_utilities import Utils @@ -42,6 +54,16 @@ def test_single_process_save_load(self, tmp_path_dist_ckpt): ), } + if HAVE_DTENSOR: + mesh = DeviceMesh.from_group( + parallel_state.get_data_parallel_group(with_context_parallel=True), "cuda" + ) + sharded_state_dict['sd_keyD'] = ShardedTensor.from_rank_offsets( + 'keyD', + DTensor.from_local(torch.ones(3, 5, 7), mesh)._local_tensor, + replica_id=Utils.rank, + ) + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. with TempNamedDir( tmp_path_dist_ckpt / 'test_single_process_save_load', sync=True @@ -56,6 +78,9 @@ def test_single_process_save_load(self, tmp_path_dist_ckpt): assert not (ckpt_dir / 'keyC').exists() assert not (ckpt_dir / 'sd_keyA').is_dir() + if HAVE_DTENSOR: + assert (ckpt_dir / 'keyD').is_dir() + load_ssd = { 'load_sd_keyA': ShardedTensor.from_rank_offsets( 'keyA', torch.ones(2, 4), replica_id=Utils.rank @@ -79,11 +104,22 @@ def test_multi_process_save(self, tmp_path_dist_ckpt): 'sd_keyB': ShardedTensor.from_rank_offsets( 'keyB', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) ), + 'lr': 0.01, + 'rank': torch.distributed.get_rank(), } + def preprocess_fn(x): + del x['rank'] + return x + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. with TempNamedDir(tmp_path_dist_ckpt / 'test_multi_process_save', sync=True) as ckpt_dir: - save(state_dict, ckpt_dir) + save( + state_dict, + ckpt_dir, + validate_access_integrity=True, + preprocess_common_before_consistancy_check=preprocess_fn, + ) saved_config = maybe_load_config(ckpt_dir) if saved_config.sharded_backend == 'zarr': @@ -94,6 +130,42 @@ def test_multi_process_save(self, tmp_path_dist_ckpt): Utils.destroy_model_parallel() + def test_multi_process_save_log_difference(self, tmp_path_dist_ckpt, caplog): + Utils.initialize_model_parallel(2, 4) + + state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), (0, Utils.rank, Utils.world_size) + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) + ), + 'rank': torch.distributed.get_rank(), + } + + def preprocess_fn(x): + return x + + with caplog.at_level(logging.WARNING): + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir( + tmp_path_dist_ckpt / 'test_multi_process_save', sync=True + ) as ckpt_dir: + save( + state_dict, + ckpt_dir, + validate_access_integrity=True, + preprocess_common_before_consistancy_check=preprocess_fn, + ) + # pylint: disable=line-too-long + if torch.distributed.get_rank() == 0: + assert ( + "There is difference in the common state dict in different ranks. The differences are {1: ([], [], [(('rank',), , )]), 2: ([], [], [(('rank',), , )]), 3: ([], [], [(('rank',), , )]), 4: ([], [], [(('rank',), , )]), 5: ([], [], [(('rank',), , )]), 6: ([], [], [(('rank',), , )]), 7: ([], [], [(('rank',), , )])}" + in caplog.text + ) + + Utils.destroy_model_parallel() + def test_partition_change_save_load(self, tmp_path_dist_ckpt, strategy=None): Utils.initialize_model_parallel(2, 4) @@ -443,6 +515,59 @@ def test_tensor_shape_mismatch(self, tmp_path_dist_ckpt): Utils.destroy_model_parallel() + @pytest.mark.skipif( + not is_torch_min_version("2.3.0"), + reason="remove_sharded_tensors relies on Torch APIs introduced in v2.3.0", + ) + def test_remove_sharded_tensors(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 4) + + # Global tensor is just a range(32) repeated twice over the first dimension + global_tensor = torch.arange(4).unsqueeze(0).expand(2, 4) + state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), (0, Utils.rank, Utils.world_size) + ), + 'sd_prefix_key_to_remove': ShardedTensor.from_rank_offsets( + 'prefix_key_to_remove', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) + ), + } + + prefix_name = "prefix" ## we will drop all tensors whose keys begin with "prefix" + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir( + tmp_path_dist_ckpt / 'test_remove_sharded_tensor_prefix', sync=True + ) as ckpt_dir: + save_strategy = TorchDistSaveShardedStrategy( + "torch_dist", 1, separation_hint=prefix_name + ) + save(state_dict, ckpt_dir, save_strategy) + + files = os.listdir(ckpt_dir) + prefix_files = [f for f in files if f.startswith(prefix_name)] + assert len(prefix_files) == torch.distributed.get_world_size() + + fs_reader = FileSystemReader(ckpt_dir) + original_metadata = fs_reader.read_metadata() + assert set(original_metadata.state_dict_metadata.keys()) == { + 'keyA', + 'prefix_key_to_remove', + } + + if torch.distributed.get_rank() == 0: + remove_sharded_tensors(ckpt_dir, key_prefix=prefix_name) + torch.distributed.barrier() + + files = os.listdir(ckpt_dir) + prefix_files = [f for f in files if f.startswith(prefix_name)] + assert len(prefix_files) == 0 + + new_metadata = fs_reader.read_metadata() + assert set(new_metadata.state_dict_metadata.keys()) == {'keyA'} + + Utils.destroy_model_parallel() + class TestNonStrictLoad: def setup_method(self, method): diff --git a/tests/unit_tests/dist_checkpointing/utils.py b/tests/unit_tests/dist_checkpointing/utils.py index 5dcf60b47..50677f095 100644 --- a/tests/unit_tests/dist_checkpointing/utils.py +++ b/tests/unit_tests/dist_checkpointing/utils.py @@ -113,7 +113,10 @@ def init_basic_mock_args(args, tp, pp, bf16=True): args.ddp_average_in_collective = False args.tensor_model_parallel_size = tp args.pipeline_model_parallel_size = pp + args.encoder_tensor_model_parallel_size = 0 + args.encoder_pipeline_model_parallel_size = 0 args.enable_ft_package = False + args.use_torch_fsdp2 = False return args diff --git a/tests/unit_tests/distributed/test_grad_reduce_for_replicated_embedder.py b/tests/unit_tests/distributed/test_grad_reduce_for_replicated_embedder.py new file mode 100644 index 000000000..8028c041c --- /dev/null +++ b/tests/unit_tests/distributed/test_grad_reduce_for_replicated_embedder.py @@ -0,0 +1,47 @@ +import pytest +import torch + +from megatron.core import ModelParallelConfig, parallel_state +from megatron.core.distributed.finalize_model_grads import _allreduce_conditional_embedding_grads +from tests.unit_tests.test_utilities import Utils + +rank = Utils.rank + + +def test_allreduce_conditional_embedding_grads(): + + Utils.initialize_model_parallel(tensor_model_parallel_size=1, pipeline_model_parallel_size=4) + + # For virtual pipeline parallelism. + model = [torch.nn.Linear(10, 10, bias=True).cuda() for _ in range(2)] + # Here we only reduce weights, not bias to compare the results. + for chunk in model: + setattr(chunk.weight, "pipeline_parallel", True) + + config = ModelParallelConfig( + pipeline_model_parallel_size=4, sequence_parallel=False, pipeline_dtype=torch.float + ) + config.has_cond_embedder = True + + pp_rank = parallel_state.get_pipeline_model_parallel_rank() + pp_world_size = parallel_state.get_pipeline_model_parallel_world_size() + + # Init different grads for each model chunk and rank. + for i, chunk in enumerate(model): + for param in chunk.parameters(): + param.main_grad = torch.ones_like(param) * (pp_rank * 10.0 + i) + + _allreduce_conditional_embedding_grads(model, config) + + expect_value = 0 + for i in range(len(model)): + for j in range(pp_world_size): + expect_value += j * 10.0 + i + expect_weight_grad = torch.ones([10, 10]).cuda() * expect_value + + for i, chunk in enumerate(model): + expect_bias_grad = torch.ones([10]).cuda() * (pp_rank * 10.0 + i) + assert torch.equal(chunk.weight.main_grad, expect_weight_grad) + assert torch.equal(chunk.bias.main_grad, expect_bias_grad) + + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/distributed/test_param_and_grad_buffer.py b/tests/unit_tests/distributed/test_param_and_grad_buffer.py index 9174665ee..5ff2a682a 100644 --- a/tests/unit_tests/distributed/test_param_and_grad_buffer.py +++ b/tests/unit_tests/distributed/test_param_and_grad_buffer.py @@ -6,8 +6,9 @@ import torch from megatron.core import parallel_state -from megatron.core.distributed import DistributedDataParallelConfig -from megatron.core.distributed.param_and_grad_buffer import _ParamAndGradBuffer, partition_buckets +from megatron.core.distributed import DistributedDataParallel, DistributedDataParallelConfig +from megatron.core.distributed.param_and_grad_buffer import partition_buckets +from megatron.core.transformer import TransformerConfig from tests.unit_tests.test_utilities import TestModel, Utils @@ -25,6 +26,7 @@ def get_model_and_buffers( grad_reduce_in_fp32=True, use_distributed_optimizer=use_distributed_optimizer, overlap_grad_reduce=overlap_grad_reduce, + bucket_size=bucket_size, ) model = TestModel( input_dim=input_dim, @@ -32,24 +34,16 @@ def get_model_and_buffers( num_layers=num_layers, bias=bias, shared_embedding=shared_embedding, + ).bfloat16() + + # Wrap with DistributedDataParallel, and get underlying buffer. + # Use dummy TransformerConfig with mostly default values. Avoid divide-by-zero + # errors for num_attention_heads and num_layers. + model = DistributedDataParallel( + TransformerConfig(num_attention_heads=1, num_layers=1), ddp_config=ddp_config, module=model ) - params = list(model.parameters()) - param_to_name = {} - for name, param in model.named_parameters(): - param_to_name[param] = name - param_indices = list(range(len(params))) - - param_and_grad_buffer = _ParamAndGradBuffer( - ddp_config, - param_dtype=torch.bfloat16, - grad_dtype=torch.float32, - params=params, - data_parallel_group=parallel_state.get_data_parallel_group(), - bucket_size=bucket_size, - param_to_name=param_to_name, - gradient_scaling_factor=1.0, - param_indices=param_indices, - ) + assert len(model.buffers) == 1 + param_and_grad_buffer = model.buffers[0] return model, param_and_grad_buffer @@ -58,7 +52,6 @@ def get_model_and_buffers( @pytest.mark.parametrize("use_distributed_optimizer", [False, True]) @pytest.mark.parametrize("bias", [False, True]) @pytest.mark.parametrize("shared_embedding", [False, True]) -@pytest.mark.flaky def test_bucket_sizes( bucket_size: Optional[int], use_distributed_optimizer: bool, bias: bool, shared_embedding: bool ): @@ -79,7 +72,7 @@ def test_bucket_sizes( shared_embedding=shared_embedding, bucket_size=bucket_size, use_distributed_optimizer=use_distributed_optimizer, - overlap_grad_reduce=False, + overlap_grad_reduce=True, ) actual_numel_in_each_bucket = [ @@ -163,6 +156,7 @@ def _pad_param_if_needed(numel_unpadded): @pytest.mark.parametrize("use_distributed_optimizer", [False, True]) @pytest.mark.parametrize("overlap_grad_reduce", [False, True]) +@pytest.mark.flaky def test_grad_sync(use_distributed_optimizer: bool, overlap_grad_reduce: bool): Utils.initialize_model_parallel() @@ -190,6 +184,8 @@ def test_grad_sync(use_distributed_optimizer: bool, overlap_grad_reduce: bool): expected_grad_data_value_after_collective = 1 if torch.distributed.get_rank() == 0 or not use_distributed_optimizer: expected_grad_data_value_after_collective = parallel_state.get_data_parallel_world_size() + # Default scaling behavior in DDP involves dividing by the data-parallel size. + expected_grad_data_value_after_collective /= parallel_state.get_data_parallel_world_size() params = list(model.parameters()) for i, param in enumerate(params): @@ -214,7 +210,7 @@ def test_grad_sync(use_distributed_optimizer: bool, overlap_grad_reduce: bool): expected_grad_data_value = expected_grad_data_value_after_collective if overlap_grad_reduce and i < (len(params) - 1): expected_grad_data_value = 1 - assert int(param_and_grad_buffer.grad_data[0]) == expected_grad_data_value + assert param_and_grad_buffer.grad_data[0] == expected_grad_data_value if not overlap_grad_reduce: # Reset grad_data for subsequent collectives. diff --git a/tests/unit_tests/export/trtllm/test_distributed_fp8.py b/tests/unit_tests/export/trtllm/test_distributed_fp8.py new file mode 100644 index 000000000..3e5c2217c --- /dev/null +++ b/tests/unit_tests/export/trtllm/test_distributed_fp8.py @@ -0,0 +1,271 @@ +from functools import partial + +import pytest +import torch +from pytest_mock import mocker +from torch.optim import Adam +from torch.utils.data import DataLoader + +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.gpt_dataset import GPTDatasetConfig, MockGPTDataset +from megatron.core.datasets.utils import compile_helpers +from megatron.core.export.data_type import DataType +from megatron.core.export.export_config import ExportConfig +from megatron.core.export.model_type import ModelType +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.core.pipeline_parallel.schedules import get_forward_backward_func +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.training.tokenizer.tokenizer import _NullTokenizer +from tests.unit_tests.test_utilities import Utils + +VOCAB_SIZE = 256 +SEQUENCE_LENGTH = 64 +NUM_LAYERS = 2 +DEVICE = torch.device("cuda") +DTYPE = torch.bfloat16 + + +def _model_provider(): + """Build the model.""" + + transformer_config = TransformerConfig( + num_layers=2, + hidden_size=512, + num_attention_heads=16, + use_cpu_initialization=True, + num_query_groups=2, + fp8='hybrid', + fp8_margin=0, + fp8_interval=1, + fp8_amax_history_len=1024, + fp8_amax_compute_algo="max", + tensor_model_parallel_size=2, + ) + + gpt_model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_with_transformer_engine_spec(), + vocab_size=VOCAB_SIZE, + max_sequence_length=SEQUENCE_LENGTH, + ) + + return gpt_model + + +def _get_train_data_iterator(): + if torch.distributed.is_available() and torch.distributed.is_initialized(): + if torch.distributed.get_rank() == 0: + compile_helpers() + torch.distributed.barrier() + else: + compile_helpers() + + config = GPTDatasetConfig( + random_seed=0, + sequence_length=SEQUENCE_LENGTH, + reset_position_ids=False, + reset_attention_mask=False, + eod_mask_loss=False, + tokenizer=_NullTokenizer(vocab_size=50), + ) + + datasets = BlendedMegatronDatasetBuilder( + MockGPTDataset, [1000, None, None], lambda: True, config + ).build() + + train_dataloader = DataLoader(datasets[0], batch_size=8, shuffle=True) + + train_iterator = iter(train_dataloader) + + return train_iterator + + +def _forward_step_func(data_iterator, model): + + def loss_func(loss_mask: torch.Tensor, output_tensor: torch.Tensor): + + losses = output_tensor.float() + loss_mask = loss_mask.view(-1).float() + loss = torch.sum(losses.view(-1) * loss_mask) / loss_mask.sum() + # If you have data parallel reduce loss across data parallel groups. + # If pipeline parallel, loss computation is done only in last stage. + + return loss, {'lm loss': loss} + + data = next(data_iterator) + tokens = torch.ones_like(data['tokens']).to(DEVICE) + attention_mask = data['attention_mask'].to(DEVICE) + position_ids = data['position_ids'].to(DEVICE) + labels = data['labels'].to(DEVICE) + loss_mask = data['loss_mask'].to(DEVICE) + output_tensor = model(tokens, position_ids, attention_mask, labels=labels) + + return output_tensor, partial(loss_func, loss_mask) + + +class TestTRTLLMSingleDeviceConverterFP8: + QUANTIZED_LAYERS = [ + 'transformer.layers.*.attention.dense.weight', + 'transformer.layers.*.attention.qkv.weight', + 'transformer.layers.*.mlp.fc.weight', + 'transformer.layers.*.mlp.proj.weight', + ] + NON_QUANTIZED_LAYERS = [ + 'transformer.layers.*.attention.dense.bias', + 'transformer.layers.*.input_layernorm.weight', + 'transformer.layers.*.input_layernorm.bias', + 'transformer.layers.*.attention.qkv.bias', + 'transformer.layers.*.post_layernorm.weight', + 'transformer.layers.*.post_layernorm.bias', + 'transformer.layers.*.mlp.fc.bias', + 'transformer.layers.*.mlp.proj.bias', + 'transformer.vocab_embedding.weight', + 'transformer.position_embedding.weight', + 'lm_head.weight', + 'transformer.ln_f.weight', + 'transformer.ln_f.bias', + ] + SCALING_FACTORS = [ + 'transformer.layers.*.attention.dense.activation_scaling_factor', + 'transformer.layers.*.attention.dense.weights_scaling_factor', + 'transformer.layers.*.attention.qkv.activation_scaling_factor', + 'transformer.layers.*.attention.qkv.weights_scaling_factor', + 'transformer.layers.*.mlp.fc.activation_scaling_factor', + 'transformer.layers.*.mlp.fc.weights_scaling_factor', + 'transformer.layers.*.mlp.proj.activation_scaling_factor', + 'transformer.layers.*.mlp.proj.weights_scaling_factor', + ] + KV_SCALING_FACTORS = ['transformer.layers.*.attention.kv_cache_scaling_factor'] + + def _assert_has_scales(self, state_dict, quantized): + for layer in range(NUM_LAYERS): + for key in self.SCALING_FACTORS: + k = key.replace('*', str(layer)) + + if quantized: + assert k in state_dict, f'Expected {k} in the converted model' + assert ( + state_dict[k].dtype == torch.float32 + ), 'Scaling factor dtype is expected to be torch.float32' + else: + assert k not in state_dict, f'Did not expect {k} in the converted model' + + def _assert_has_kv_scales(self, state_dict, kv_quantized): + for layer in range(NUM_LAYERS): + for key in self.KV_SCALING_FACTORS: + k = key.replace('*', str(layer)) + + if kv_quantized: + assert k in state_dict, f'Expected {k} in the converted model' + assert ( + state_dict[k].dtype == torch.float32 + ), 'Scaling factor dtype is expected to be torch.float32' + else: + assert k not in state_dict, f'Did not expect {k} in the converted model' + + def _assert_quantizable_layers(self, state_dict, quantized): + expected_dtype = torch.float8_e4m3fn if quantized else DTYPE + + for layer in range(NUM_LAYERS): + for key in self.QUANTIZED_LAYERS: + k = key.replace('*', str(layer)) + + assert k in state_dict, f'Expected {k} in the converted model' + assert ( + state_dict[k].dtype == expected_dtype + ), f'Expected {k} to have the dtype == {str(expected_dtype)}' + + def _assert_non_quantizable_layers(self, state_dict): + expected_dtype = torch.bfloat16 + + for layer in range(NUM_LAYERS): + for key in self.NON_QUANTIZED_LAYERS: + k = key.replace('*', str(layer)) + + assert k in state_dict, f'Expected {k} in the converted model' + assert ( + state_dict[k].dtype == expected_dtype + ), f'Expected {k} to have the dtype == {str(expected_dtype)}' + + def setup_method(self, method): + Utils.initialize_model_parallel(2, 1) + gpt_model = _model_provider() + gpt_model.to(DEVICE) + optim = Adam(gpt_model.parameters()) + train_iterator = _get_train_data_iterator() + forward_backward_func = get_forward_backward_func() + + # Mock training to initialize constants + for _ in range(2): + optim.zero_grad() + forward_backward_func( + forward_step_func=_forward_step_func, + data_iterator=train_iterator, + model=gpt_model, + num_microbatches=1, + seq_length=SEQUENCE_LENGTH, + micro_batch_size=8, + decoder_seq_length=SEQUENCE_LENGTH, + forward_only=False, + ) + optim.step() + + self.gpt_model = gpt_model + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_get_model_weights_converter(self, mocker): + pytest.importorskip('tensorrt_llm') + mocker.patch( + "megatron.core.export.trtllm.trtllm_weights_converter.distributed_trtllm_model_weights_converter.str_dtype_to_torch", + return_value=DTYPE, + ) + + from megatron.core.export.trtllm.trtllm_helper import TRTLLMHelper + + gpt_model = self.gpt_model + seq_len_interpolation_factor = None + if hasattr(gpt_model, "rotary_pos_emb"): + seq_len_interpolation_factor = gpt_model.rotary_pos_emb.seq_len_interpolation_factor + trtllm_helper = TRTLLMHelper( + transformer_config=gpt_model.config, + model_type=ModelType.gpt, + position_embedding_type=gpt_model.position_embedding_type, + max_position_embeddings=gpt_model.max_position_embeddings, + rotary_percentage=gpt_model.rotary_percent, + rotary_base=gpt_model.rotary_base, + moe_tp_mode=2, + multi_query_mode=False, + activation="gelu", + seq_len_interpolation_factor=seq_len_interpolation_factor, + share_embeddings_and_output_weights=gpt_model.share_embeddings_and_output_weights, + ) + + for fp8_quantized in [True, False]: + for fp8_kvcache in [True, False]: + weight_list, config_list = ( + trtllm_helper.get_trtllm_pretrained_config_and_model_weights( + model_state_dict=gpt_model.state_dict(), + dtype=DataType.bfloat16, + on_device_distributed_conversion=True, + vocab_size=VOCAB_SIZE, + gpus_per_node=2, + fp8_quantized=fp8_quantized, + fp8_kvcache=fp8_kvcache, + ) + ) + + expected_quant = 'FP8' if fp8_quantized else None + expected_kv_quant = 'FP8' if fp8_kvcache else None + assert ( + config_list[0].quantization.quant_algo == expected_quant + ), 'Wrong quantization settings' + assert ( + config_list[0].quantization.kv_cache_quant_algo == expected_kv_quant + ), 'Wrong KV-cache quantization settings' + self._assert_has_scales(weight_list[0], fp8_quantized) + self._assert_has_kv_scales(weight_list[0], fp8_kvcache) + self._assert_quantizable_layers(weight_list[0], fp8_quantized) + self._assert_non_quantizable_layers(weight_list[0]) diff --git a/tests/unit_tests/export/trtllm/test_single_device_fp8.py b/tests/unit_tests/export/trtllm/test_single_device_fp8.py new file mode 100644 index 000000000..02aa1e3a9 --- /dev/null +++ b/tests/unit_tests/export/trtllm/test_single_device_fp8.py @@ -0,0 +1,268 @@ +from functools import partial + +import pytest +import torch +from pytest_mock import mocker +from torch.optim import Adam +from torch.utils.data import DataLoader + +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.gpt_dataset import GPTDatasetConfig, MockGPTDataset +from megatron.core.datasets.utils import compile_helpers +from megatron.core.export.data_type import DataType +from megatron.core.export.export_config import ExportConfig +from megatron.core.export.model_type import ModelType +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.core.pipeline_parallel.schedules import get_forward_backward_func +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.training.tokenizer.tokenizer import _NullTokenizer +from tests.unit_tests.test_utilities import Utils + +SEQUENCE_LENGTH = 64 +NUM_LAYERS = 2 +DEVICE = torch.device("cuda") + + +def _model_provider(): + """Build the model.""" + + transformer_config = TransformerConfig( + num_layers=NUM_LAYERS, + hidden_size=64, + num_attention_heads=2, + use_cpu_initialization=True, + pipeline_dtype=torch.float32, + fp8='hybrid', + fp8_margin=0, + fp8_interval=1, + fp8_amax_history_len=1024, + fp8_amax_compute_algo="max", + ) + + gpt_model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_with_transformer_engine_spec(), + vocab_size=100, + max_sequence_length=SEQUENCE_LENGTH, + ) + + return gpt_model + + +def _get_train_data_iterator(): + if torch.distributed.is_available() and torch.distributed.is_initialized(): + if torch.distributed.get_rank() == 0: + compile_helpers() + torch.distributed.barrier() + else: + compile_helpers() + + config = GPTDatasetConfig( + random_seed=0, + sequence_length=SEQUENCE_LENGTH, + reset_position_ids=False, + reset_attention_mask=False, + eod_mask_loss=False, + tokenizer=_NullTokenizer(vocab_size=50), + ) + + datasets = BlendedMegatronDatasetBuilder( + MockGPTDataset, [1000, None, None], lambda: True, config + ).build() + + train_dataloader = DataLoader(datasets[0], batch_size=8, shuffle=True) + + train_iterator = iter(train_dataloader) + + return train_iterator + + +def _forward_step_func(data_iterator, model): + + def _loss_func(loss_mask: torch.Tensor, output_tensor: torch.Tensor): + + losses = output_tensor.float() + loss_mask = loss_mask.view(-1).float() + loss = torch.sum(losses.view(-1) * loss_mask) / loss_mask.sum() + # If you have data parallel reduce loss across data parallel groups. + # If pipeline parallel, loss computation is done only in last stage. + + return loss, {'lm loss': loss} + + data = next(data_iterator) + tokens = torch.ones_like(data['tokens']).to(DEVICE) + attention_mask = data['attention_mask'].to(DEVICE) + position_ids = data['position_ids'].to(DEVICE) + labels = data['labels'].to(DEVICE) + loss_mask = data['loss_mask'].to(DEVICE) + output_tensor = model(tokens, position_ids, attention_mask, labels=labels) + + return output_tensor, partial(_loss_func, loss_mask) + + +class TestTRTLLMSingleDeviceConverterFP8: + QUANTIZED_LAYERS = [ + 'transformer.layers.*.attention.dense.weight', + 'transformer.layers.*.attention.qkv.weight', + 'transformer.layers.*.mlp.fc.weight', + 'transformer.layers.*.mlp.proj.weight', + ] + NON_QUANTIZED_LAYERS = [ + 'transformer.layers.*.attention.dense.bias', + 'transformer.layers.*.input_layernorm.weight', + 'transformer.layers.*.input_layernorm.bias', + 'transformer.layers.*.attention.qkv.bias', + 'transformer.layers.*.post_layernorm.weight', + 'transformer.layers.*.post_layernorm.bias', + 'transformer.layers.*.mlp.fc.bias', + 'transformer.layers.*.mlp.proj.bias', + 'transformer.vocab_embedding.weight', + 'transformer.position_embedding.weight', + 'lm_head.weight', + 'transformer.ln_f.weight', + 'transformer.ln_f.bias', + ] + SCALING_FACTORS = [ + 'transformer.layers.*.attention.dense.activation_scaling_factor', + 'transformer.layers.*.attention.dense.weights_scaling_factor', + 'transformer.layers.*.attention.qkv.activation_scaling_factor', + 'transformer.layers.*.attention.qkv.weights_scaling_factor', + 'transformer.layers.*.mlp.fc.activation_scaling_factor', + 'transformer.layers.*.mlp.fc.weights_scaling_factor', + 'transformer.layers.*.mlp.proj.activation_scaling_factor', + 'transformer.layers.*.mlp.proj.weights_scaling_factor', + ] + KV_SCALING_FACTORS = ['transformer.layers.*.attention.kv_cache_scaling_factor'] + + def _assert_has_scales(self, state_dict, quantized): + for layer in range(NUM_LAYERS): + for key in self.SCALING_FACTORS: + k = key.replace('*', str(layer)) + + if quantized: + assert k in state_dict, f'Expected {k} in the converted model' + assert ( + state_dict[k].dtype == torch.float32 + ), 'Scaling factor dtype is expected to be torch.float32' + else: + assert k not in state_dict, f'Did not expect {k} in the converted model' + + def _assert_has_kv_scales(self, state_dict, kv_quantized): + for layer in range(NUM_LAYERS): + for key in self.KV_SCALING_FACTORS: + k = key.replace('*', str(layer)) + + if kv_quantized: + assert k in state_dict, f'Expected {k} in the converted model' + assert ( + state_dict[k].dtype == torch.float32 + ), 'Scaling factor dtype is expected to be torch.float32' + else: + assert k not in state_dict, f'Did not expect {k} in the converted model' + + def _assert_quantizable_layers(self, state_dict, quantized): + expected_dtype = torch.float8_e4m3fn if quantized else torch.bfloat16 + + for layer in range(NUM_LAYERS): + for key in self.QUANTIZED_LAYERS: + k = key.replace('*', str(layer)) + + assert k in state_dict, f'Expected {k} in the converted model' + assert ( + state_dict[k].dtype == expected_dtype + ), f'Expected {k} to have the dtype == {str(expected_dtype)}' + + def _assert_non_quantizable_layers(self, state_dict): + expected_dtype = torch.bfloat16 + + for layer in range(NUM_LAYERS): + for key in self.NON_QUANTIZED_LAYERS: + k = key.replace('*', str(layer)) + + assert k in state_dict, f'Expected {k} in the converted model' + assert ( + state_dict[k].dtype == expected_dtype + ), f'Expected {k} to have the dtype == {str(expected_dtype)}' + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + gpt_model = _model_provider() + gpt_model.to(DEVICE) + optim = Adam(gpt_model.parameters()) + train_iterator = _get_train_data_iterator() + forward_backward_func = get_forward_backward_func() + + # Mock training to initialize constants + for _ in range(2): + optim.zero_grad() + forward_backward_func( + forward_step_func=_forward_step_func, + data_iterator=train_iterator, + model=gpt_model, + num_microbatches=1, + seq_length=SEQUENCE_LENGTH, + micro_batch_size=8, + decoder_seq_length=SEQUENCE_LENGTH, + forward_only=False, + ) + optim.step() + + self.gpt_model = gpt_model + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_get_model_weights_converter(self, mocker): + pytest.importorskip('tensorrt_llm') + mocker.patch( + "megatron.core.export.trtllm.trtllm_weights_converter.distributed_trtllm_model_weights_converter.str_dtype_to_torch", + return_value=torch.float32, + ) + + from megatron.core.export.trtllm.trtllm_helper import TRTLLMHelper + + gpt_model = self.gpt_model + export_config = ExportConfig(inference_tp_size=2) + + seq_len_interpolation_factor = None + if hasattr(gpt_model, "rotary_pos_emb"): + seq_len_interpolation_factor = gpt_model.rotary_pos_emb.seq_len_interpolation_factor + trtllm_helper = TRTLLMHelper( + transformer_config=gpt_model.config, + model_type=ModelType.gpt, + position_embedding_type=gpt_model.position_embedding_type, + max_position_embeddings=gpt_model.max_position_embeddings, + rotary_percentage=gpt_model.rotary_percent, + rotary_base=gpt_model.rotary_base, + moe_tp_mode=2, + multi_query_mode=False, + activation="gelu", + seq_len_interpolation_factor=seq_len_interpolation_factor, + share_embeddings_and_output_weights=gpt_model.share_embeddings_and_output_weights, + ) + + for fp8_quantized in [True, False]: + for fp8_kvcache in [True, False]: + weight_list, config_list = ( + trtllm_helper.get_trtllm_pretrained_config_and_model_weights( + model_state_dict=gpt_model.state_dict(), + dtype=DataType.bfloat16, + export_config=export_config, + fp8_quantized=fp8_quantized, + fp8_kvcache=fp8_kvcache, + ) + ) + + expected_quant = 'FP8' if fp8_quantized else None + expected_kv_quant = 'FP8' if fp8_kvcache else None + assert ( + config_list[0].quantization.quant_algo == expected_quant + ), 'Wrong quantization settings' + assert ( + config_list[0].quantization.kv_cache_quant_algo == expected_kv_quant + ), 'Wrong KV-cache quantization settings' + self._assert_has_scales(weight_list[0], fp8_quantized) + self._assert_has_kv_scales(weight_list[0], fp8_kvcache) + self._assert_quantizable_layers(weight_list[0], fp8_quantized) + self._assert_non_quantizable_layers(weight_list[0]) diff --git a/tests/unit_tests/export/trtllm/test_trtllm_distributed_gpu_converter.py b/tests/unit_tests/export/trtllm/test_trtllm_distributed_gpu_converter.py index 5a0aa0e9c..6a5ccb04a 100644 --- a/tests/unit_tests/export/trtllm/test_trtllm_distributed_gpu_converter.py +++ b/tests/unit_tests/export/trtllm/test_trtllm_distributed_gpu_converter.py @@ -1,9 +1,12 @@ -import pytest import torch from pytest_mock import mocker from megatron.core.export.data_type import DataType -from megatron.core.export.trtllm.model_to_trllm_mapping.gpt_model import GPT_DICT +from megatron.core.export.trtllm.model_to_trllm_mapping.default_conversion_dict import ( + DEFAULT_CONVERSION_DICT, +) + +# pylint: disable=line-too-long from megatron.core.export.trtllm.trtllm_weights_converter.distributed_trtllm_model_weights_converter import ( DistributedTRTLLMModelWeightsConverter, ) @@ -18,8 +21,14 @@ class TestTRTLLMDistributedGPUConverter: + """ + Test Distributed converter + """ def setup_method(self, method): + """ + Setup method + """ Utils.initialize_model_parallel(2, 1) model_parallel_cuda_manual_seed(123) @@ -40,9 +49,15 @@ def setup_method(self, method): ) def teardown_method(self, method): + """ + teardown method + """ Utils.destroy_model_parallel() def test_get_model_weights_converter(self, mocker): + """ + test model weights onverter + """ device = torch.device("cuda") self.gpt_model.to(device) @@ -66,7 +81,7 @@ def test_get_model_weights_converter(self, mocker): distributed_converter.convert( model_state_dict=model_state_dict, - trtllm_conversion_dict=GPT_DICT, + trtllm_conversion_dict=DEFAULT_CONVERSION_DICT, tokenizer_vocab_size=_VOCAB_SIZE, ) diff --git a/tests/unit_tests/export/trtllm/test_trtllm_helper.py b/tests/unit_tests/export/trtllm/test_trtllm_helper.py index 53c0a5ffe..d9764dc8f 100644 --- a/tests/unit_tests/export/trtllm/test_trtllm_helper.py +++ b/tests/unit_tests/export/trtllm/test_trtllm_helper.py @@ -32,7 +32,6 @@ def test_exceptions(self, mocker): model_state_dict=None, dtype=None, on_device_distributed_conversion=True, - ModelType=ModelType.falcon, vocab_size=100, gpus_per_node=2, ) diff --git a/tests/unit_tests/inference/model_inference_wrappers/t5/test_t5_inference_wrapper.py b/tests/unit_tests/inference/model_inference_wrappers/t5/test_t5_inference_wrapper.py index b9ece5c39..2aabdebeb 100644 --- a/tests/unit_tests/inference/model_inference_wrappers/t5/test_t5_inference_wrapper.py +++ b/tests/unit_tests/inference/model_inference_wrappers/t5/test_t5_inference_wrapper.py @@ -76,7 +76,7 @@ def setup_model(self, tensor_parallel_size, pipeline_parallel_size): inference_wrapper_config = InferenceWrapperConfig( hidden_size=hidden_size, - inference_batch_times_seqlen_threshold=20, + inference_batch_times_seqlen_threshold=-1, fp32_residual_connection=False, params_dtype=torch.float, padded_vocab_size=self.vocab_size, diff --git a/tests/unit_tests/inference/test_flash_decode.py b/tests/unit_tests/inference/test_flash_decode.py new file mode 100644 index 000000000..77ac08c06 --- /dev/null +++ b/tests/unit_tests/inference/test_flash_decode.py @@ -0,0 +1,31 @@ +import torch + +from megatron.core.models.common.embeddings.rope_utils import apply_rotary_pos_emb_with_cos_sin +from megatron.core.models.common.embeddings.rotary_pos_embedding import RotaryEmbedding + + +class TestRotaryEmbeddingWithPrecomputedCosSin: + + def setup_method(self): + self.batch_size = 3 + self.seq_len = 4 + self.d_rot = 6 + self.rotary_embedding = RotaryEmbedding(kv_channels=4, rotary_percent=1.0) + + def test_output_shapes_match(self): + + # Create input tensors + t = torch.randn(self.seq_len, self.batch_size, 2, self.d_rot * 2, device="cuda") + rotary_pos_cos, rotary_pos_sin = self.rotary_embedding.get_cos_sin(self.seq_len) + + # Test using Flash Decoding optimized kernel which requires precomputed cos & sin tensors + expected_shape = torch.Size( + [self.seq_len, self.batch_size, self.seq_len // 2, self.seq_len * self.batch_size] + ) + output_flash_rotary = apply_rotary_pos_emb_with_cos_sin( + t, rotary_pos_cos, rotary_pos_sin, rotary_interleaved=True + ) + + assert ( + output_flash_rotary.shape == expected_shape + ), f"Outputs do not match: {output_flash_rotary.shape} != {expected_shape}" diff --git a/tests/unit_tests/inference/text_generation_controllers/test_encoder_decoder_text_generation_controller.py b/tests/unit_tests/inference/text_generation_controllers/test_encoder_decoder_text_generation_controller.py index 14c9a8885..977f355d7 100644 --- a/tests/unit_tests/inference/text_generation_controllers/test_encoder_decoder_text_generation_controller.py +++ b/tests/unit_tests/inference/text_generation_controllers/test_encoder_decoder_text_generation_controller.py @@ -84,7 +84,7 @@ def setup_method(self, method): inference_wrapper_config = InferenceWrapperConfig( hidden_size=hidden_size, - inference_batch_times_seqlen_threshold=20, + inference_batch_times_seqlen_threshold=-1, fp32_residual_connection=False, params_dtype=torch.float, padded_vocab_size=self.vocab_size, diff --git a/tests/unit_tests/inference/text_generation_controllers/test_simple_text_generation_controller.py b/tests/unit_tests/inference/text_generation_controllers/test_simple_text_generation_controller.py index df7109e02..e61df5137 100644 --- a/tests/unit_tests/inference/text_generation_controllers/test_simple_text_generation_controller.py +++ b/tests/unit_tests/inference/text_generation_controllers/test_simple_text_generation_controller.py @@ -54,7 +54,7 @@ def setup_method(self, method): inference_wrapper_config = InferenceWrapperConfig( hidden_size=self.hidden_size, - inference_batch_times_seqlen_threshold=20, + inference_batch_times_seqlen_threshold=-1, fp32_residual_connection=False, params_dtype=torch.float, padded_vocab_size=self.vocab_size, diff --git a/tests/unit_tests/models/test_bert_model.py b/tests/unit_tests/models/test_bert_model.py index 186ce5c34..b03a3e596 100644 --- a/tests/unit_tests/models/test_bert_model.py +++ b/tests/unit_tests/models/test_bert_model.py @@ -160,6 +160,7 @@ def test_transformer_engine_version_1_7_to_1_10_flash_attn(self, mocker): ), f"Expected b11s for attn_mask_dimensions but got {attn_mask_dimensions}" @pytest.mark.internal + @pytest.mark.flaky_in_dev def test_transformer_engine_version_1_7_to_1_10_rng_error(self, mocker): os.environ['NVTE_FLASH_ATTN'] = '0' os.environ['NVTE_FUSED_ATTN'] = '0' diff --git a/tests/unit_tests/models/test_llava_model.py b/tests/unit_tests/models/test_llava_model.py index b3142fb80..d0672885a 100644 --- a/tests/unit_tests/models/test_llava_model.py +++ b/tests/unit_tests/models/test_llava_model.py @@ -1,14 +1,21 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. from copy import deepcopy +from types import SimpleNamespace import pytest import torch from megatron.core import InferenceParams +from megatron.core import parallel_state as ps from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec from megatron.core.models.multimodal.llava_model import LLaVAModel +from megatron.core.models.vision.vit_layer_specs import get_vit_layer_with_transformer_engine_spec +from megatron.core.packed_seq_params import PackedSeqParams from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.enums import AttnMaskType from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.utils import is_te_min_version +from megatron.training.global_vars import set_args from tests.unit_tests.test_utilities import Utils @@ -84,13 +91,12 @@ def test_preprocess_data(self): # 3 images with 1 tile and 2 image with 2 tiles = 7 tiles. image_embeddings = ( - 1e-5 - * torch.arange(577 * 7 * hidden_size, dtype=torch.float) + torch.arange(577 * 7 * hidden_size, dtype=torch.float) .reshape(577, 7, hidden_size) .cuda() ) - image_token_index = -200 + image_token_index = self.model.image_token_index input_ids = torch.arange(1024).expand(5, 1024).cuda() input_ids[0, 0] = image_token_index # image before text input_ids[1, 100] = image_token_index # image in between @@ -99,19 +105,19 @@ def test_preprocess_data(self): input_ids[4, 50] = image_token_index # two images in between input_ids[4, 150] = image_token_index - # Offset by 1000 to distinguish from image embeddings. + # Using negative sign to distinguish from image embeddings. language_embeddings = ( - 1000.0 - + 1e-5 - * torch.arange(5 * 1024 * hidden_size, dtype=torch.float) + -torch.arange(5 * 1024 * hidden_size, dtype=torch.float) .reshape(5, 1024, hidden_size) .cuda() ) # Labels are input_ids shifted to left by one. labels = torch.arange(1, 1025, dtype=torch.int).expand(5, 1024).cuda() + # labels[0] - image token got dropped by shift to left by one. labels[1, 99] = image_token_index labels[2, -2] = image_token_index + # labels[3] - no image. labels[4, 49] = image_token_index labels[4, 149] = image_token_index @@ -124,6 +130,8 @@ def test_preprocess_data(self): num_image_tiles = torch.tensor([1, 2, 1, 2, 1], dtype=torch.int).cuda() use_inference_kv_cache = False + inference_params = None + image_token_mask = None embeddings, labels, loss_mask = self.model._preprocess_data( image_embeddings, @@ -132,8 +140,10 @@ def test_preprocess_data(self): loss_mask, labels, use_inference_kv_cache, + inference_params, image_token_index, num_image_tiles, + image_token_mask, ) img_seq_len = 577 @@ -270,7 +280,7 @@ def test_forward(self): # 3 images with 1 tile and 2 images with 2 tiles. img = torch.randn((7, 3, 336, 336)).cuda() - image_token_index = -200 + image_token_index = self.model.image_token_index input_ids = torch.randint(0, 2048, (5, 1024)).cuda() input_ids[0, 0] = image_token_index # image before text input_ids[1, 100] = image_token_index # image in between @@ -307,6 +317,28 @@ def test_forward(self): max_seq_len = img_seq_len * 3 - 2 + 1024 assert loss.shape == new_loss_mask.shape == torch.Size((5, max_seq_len)) + # Try with labels and PackedSeqParams. Only micro batch size 1 is supported in this mode. + packed_seq_params = PackedSeqParams( + qkv_format="thd", + cu_seqlens_q=[0, 512, 1024, 1600], # Just example values. + cu_seqlens_kv=[0, 512, 1024, 1600], + max_seqlen_q=[1600], + max_seqlen_kv=[1600], + ) + + loss, new_loss_mask = self.model.forward( + img[:1], + input_ids[:1], + position_ids[:1], + attention_mask, + labels[:1], + loss_mask[:1], + num_image_tiles=num_image_tiles[:1], + ) + + # 1600 = 577 (img_seq_len) + 1024 (text tokens in the first sample) - 1 (image token). + assert loss.shape == new_loss_mask.shape == torch.Size((1, 1600)) + # Try text-only input. loss, new_loss_mask = self.model.forward( torch.tensor([], dtype=torch.float).cuda(), @@ -321,7 +353,7 @@ def test_forward(self): assert loss.shape == new_loss_mask.shape == torch.Size((5, 1024)) # Try without labels and without inference params. - logits = self.model.forward( + logits, _ = self.model.forward( img, input_ids, position_ids, @@ -334,7 +366,7 @@ def test_forward(self): # Try without labels and with inference params. inference_params = InferenceParams(5, max_seq_len) - logits = self.model.forward( + logits, _ = self.model.forward( img, input_ids, position_ids, @@ -437,3 +469,429 @@ def test_set_input_tensor(self): input_tensor = torch.zeros(expected_shape) self.model.set_input_tensor(input_tensor) assert self.model.vision_model.decoder.input_tensor.shape == expected_shape + + +def create_test_args(cp_size, sequence_parallel): + # Set dummy values for the args. + args = SimpleNamespace() + args.context_parallel_size = cp_size + args.sequence_parallel = sequence_parallel + + return args + + +class TestLLaVAModelTokenParallel: + + def init_llava_model(self): + self.language_hidden_size = 64 + self.language_num_attention_heads = 16 + + language_config = TransformerConfig( + num_layers=3, + hidden_size=self.language_hidden_size, + num_attention_heads=self.language_num_attention_heads, + use_cpu_initialization=False, + tensor_model_parallel_size=self.tp_size, + sequence_parallel=self.sequence_parallel, + context_parallel_size=1, # Init with CP=1 until CI catches up to TEv1.10 + # context_parallel_size=self.cp_size, + ) + # SP and CP are not yet supported for the Vision Backbone + vision_config = TransformerConfig( + num_layers=2, + hidden_size=16, + num_attention_heads=8, + use_cpu_initialization=False, + tensor_model_parallel_size=self.tp_size, + sequence_parallel=False, + context_parallel_size=1, + ) + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=self.language_hidden_size, + ffn_hidden_size=1024, + num_attention_heads=8, + use_cpu_initialization=False, + tensor_model_parallel_size=self.tp_size, + sequence_parallel=False, + context_parallel_size=1, + ) + + language_layer_spec = get_gpt_layer_with_transformer_engine_spec() + # SP/CP either requires user to ensure token lengths do not require padding OR change mask type to padding + if ( + language_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') + == AttnMaskType.causal + ): + language_layer_spec.submodules.self_attention.params['attn_mask_type'] = ( + AttnMaskType.padding_causal + ) + elif ( + language_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') + == AttnMaskType.no_mask + ): + language_layer_spec.submodules.self_attention.params['attn_mask_type'] = ( + AttnMaskType.padding + ) + + vision_layer_spec = deepcopy(language_layer_spec) + vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) + + vision_config.vision_model_type = "clip" + self.model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=8192, + language_max_sequence_length=4096, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=336, + img_w=336, + patch_dim=14, + ) + + @pytest.mark.internal # The model is under active development and its methods may change. + def setup_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + @pytest.mark.parametrize( + "cp_size,tp_size,sequence_parallel", [(1, 8, True), (2, 4, False), (2, 4, True)] + ) + def test_process_embedding_token_parallel(self, cp_size, tp_size, sequence_parallel): + self.cp_size = cp_size + self.tp_size = tp_size + self.sequence_parallel = sequence_parallel + Utils.initialize_model_parallel( + tensor_model_parallel_size=self.tp_size, context_parallel_size=self.cp_size + ) + model_parallel_cuda_manual_seed(123) + + self.init_llava_model() + self.model.cuda() + # Setting CP size for LLM here as model init is done with CP=1 to + # avoid TE version check until CI catches up to TEv1.10 + if self.cp_size > 1: + self.model.context_parallel_lm = self.cp_size + + args = create_test_args(self.cp_size, self.sequence_parallel) + set_args(args) + + batch_size = 2 + combined_valid_seqlen = 2049 + combined_padded_seqlen = 2056 + if self.cp_size > 1: + combined_embeddings = torch.ones( + [batch_size, combined_padded_seqlen, 4096], device='cuda', dtype=torch.bfloat16 + ) # [B, S, H] + else: + combined_embeddings = torch.ones( + [combined_padded_seqlen, batch_size, 4096], device='cuda', dtype=torch.bfloat16 + ) # [S, B, H] + new_labels = torch.ones( + [batch_size, combined_padded_seqlen], device='cuda', dtype=torch.bfloat16 + ) # [B, S] + new_loss_mask = torch.ones( + [batch_size, combined_padded_seqlen], device='cuda', dtype=torch.bfloat16 + ) # [B, S] + + cu_seqlens = torch.arange( + 0, + (batch_size + 1) * (combined_valid_seqlen), + step=(combined_valid_seqlen), + dtype=torch.int32, + device=combined_embeddings.device, + ) + cu_seqlens_padded = torch.arange( + 0, + (batch_size + 1) * (combined_padded_seqlen), + step=(combined_padded_seqlen), + dtype=torch.int32, + device=combined_embeddings.device, + ) + + packed_seq_params = PackedSeqParams( + cu_seqlens_q=cu_seqlens, + cu_seqlens_kv=cu_seqlens, + cu_seqlens_q_padded=cu_seqlens_padded, + cu_seqlens_kv_padded=cu_seqlens_padded, + max_seqlen_q=combined_padded_seqlen, + max_seqlen_kv=combined_padded_seqlen, + qkv_format='thd', + ) + + combined_embeddings, new_labels, new_loss_mask, packed_seq_params = ( + self.model._process_embedding_token_parallel( + combined_embeddings, new_labels, new_loss_mask, packed_seq_params + ) + ) + + # Calculate the expected padded seq length + if self.cp_size > 1 and self.sequence_parallel: + padding_factor = self.tp_size * self.cp_size * 2 + elif self.cp_size > 1: + padding_factor = self.cp_size * 2 + elif self.sequence_parallel: + padding_factor = self.tp_size + + padded_seq_len = int( + (combined_padded_seqlen + (padding_factor - 1)) // padding_factor * padding_factor + ) + + # Check if output shape is as expected + if self.cp_size > 1 and self.sequence_parallel: + # THD format + assert combined_embeddings.shape[0] == batch_size * ( + padded_seq_len / (self.tp_size * self.cp_size) + ) + assert combined_embeddings.shape[1] == 1 + elif self.cp_size > 1: + # THD format + assert combined_embeddings.shape[0] == batch_size * (padded_seq_len / self.cp_size) + assert combined_embeddings.shape[1] == 1 + else: + # SBHD format + assert combined_embeddings.shape[0] == padded_seq_len / self.tp_size + assert combined_embeddings.shape[1] == batch_size + + +def count_parameters(model): + return sum(p.numel() for p in model.parameters()) + + +@pytest.mark.internal # The model is under active development and its methods may change. +@pytest.mark.parametrize( + 'dtp, dpp, etp, epp', [(1, 1, 1, 0), (1, 1, 1, 1), (2, 1, 2, 0), (2, 3, 2, 1), (2, 4, 2, 0)] +) +def test_llava_model_parallelism(dtp, dpp, etp, epp): + """ + The purpose of this test is to check that vit, vision projection and lm layer + counts across tensor and pipeline parallel ranks match the counts in the + non-model-parallel case, i.e. tp==1, pp==1, etp==1, epp==0 + """ + + language_hidden_size = 64 + language_num_attention_heads = 4 + + # First initialize a single GPU model to get baseline parameter and layer counts + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=1, + encoder_tensor_model_parallel_size=1, + encoder_pipeline_model_parallel_size=0, + ) + model_parallel_cuda_manual_seed(123) + + language_config = TransformerConfig( + num_layers=8, + hidden_size=language_hidden_size, + num_attention_heads=language_num_attention_heads, + use_cpu_initialization=False, + ) + language_config.tensor_model_parallel_size = dtp + language_config.pipeline_model_parallel_size = dpp + + vision_config = TransformerConfig( + num_layers=4, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False + ) + vision_config.tensor_model_parallel_size = etp + vision_config.pipeline_model_parallel_size = 1 + + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=language_hidden_size, + ffn_hidden_size=32, + num_attention_heads=1, + use_cpu_initialization=False, + ) + vision_projection_config.tensor_model_parallel_size = etp + vision_projection_config.pipeline_model_parallel_size = 1 + + language_layer_spec = get_gpt_layer_with_transformer_engine_spec() + vision_layer_spec = get_vit_layer_with_transformer_engine_spec() + vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) + + vision_config.vision_model_type = "clip" + non_parallel_model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=8192, + language_max_sequence_length=4096, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=336, + img_w=336, + patch_dim=14, + ) + + base_vit_params = sum(p.numel() for p in non_parallel_model.vision_model.parameters()) + base_proj_params = sum(p.numel() for p in non_parallel_model.vision_projection.parameters()) + + base_vit_layers = len(non_parallel_model.vision_model.decoder.layers) + + Utils.destroy_model_parallel() + + # Next initialize a model parallel version to get test parameter and layer counts + Utils.initialize_model_parallel( + tensor_model_parallel_size=dtp, + pipeline_model_parallel_size=dpp, + encoder_tensor_model_parallel_size=etp, + encoder_pipeline_model_parallel_size=epp, + ) + model_parallel_cuda_manual_seed(123) + + pp_rank = ps.get_pipeline_model_parallel_rank() + pp_world_size = ps.get_pipeline_model_parallel_world_size() + tp_world_size = ps.get_tensor_model_parallel_world_size() + + pre_process = True if (pp_rank == 0 or (pp_rank == 1 and epp == 1)) else False + post_process = ( + True if ((pp_rank == 0 and epp == 1) or (pp_rank == pp_world_size - 1)) else False + ) + add_encoder = True if pp_rank == 0 else False + add_decoder = False if (pp_rank == 0 and epp == 1) else True + + language_config = TransformerConfig( + num_layers=8, + hidden_size=language_hidden_size, + num_attention_heads=language_num_attention_heads, + use_cpu_initialization=False, + ) + language_config.tensor_model_parallel_size = dtp + language_config.pipeline_model_parallel_size = dpp + + vision_config = TransformerConfig( + num_layers=4, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False + ) + vision_config.tensor_model_parallel_size = etp + vision_config.pipeline_model_parallel_size = 1 + + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=language_hidden_size, + ffn_hidden_size=32, + num_attention_heads=1, + use_cpu_initialization=False, + ) + vision_projection_config.tensor_model_parallel_size = etp + vision_projection_config.pipeline_model_parallel_size = 1 + + language_layer_spec = get_gpt_layer_with_transformer_engine_spec() + vision_layer_spec = get_vit_layer_with_transformer_engine_spec() + vision_projection_spec = deepcopy(vision_layer_spec.submodules.mlp.submodules) + + vision_config.vision_model_type = "clip" + model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=8192, + language_max_sequence_length=4096, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=336, + img_w=336, + patch_dim=14, + pre_process=pre_process, + post_process=post_process, + add_encoder=add_encoder, + add_decoder=add_decoder, + ) + + if epp == 1: + if pp_rank == 0: + # should be in a etp sized tp group + assert tp_world_size == etp + # there should only be a single pipeline rank + assert pp_world_size == epp + dpp + # should not be inside decoder + assert not ps.is_inside_decoder() + # should be inside encoder + assert ps.is_inside_encoder() + elif pp_rank != 0: + # non-encoder ranks should be in a dtp sized tp group + assert tp_world_size == dtp + # check we're inside the decoder + assert ps.is_inside_decoder() + # check we're not inside the encoder + assert not ps.is_inside_encoder() + elif epp == 0: + if pp_rank == 0: + # check we're inside the encoder and decoder + assert ps.is_inside_encoder() + assert ps.is_inside_decoder() + elif pp_rank != 0: + # check we're inside the decoder only and there's no vision_model + assert not ps.is_inside_encoder() + assert ps.is_inside_decoder() + assert model.vision_model is None + assert model.vision_projection is None + + if ps.is_inside_encoder(): + # Check num vit layers - epp > 1 not supported + test_vit_layers = len([p for p in model.vision_model.decoder.layers]) + assert test_vit_layers == base_vit_layers + + # Check all vit params are present + test_vit_tp_params = sum( + [ + p.numel() + for p in model.vision_model.parameters() + if hasattr(p, 'tensor_model_parallel') + ] + ) + test_vit_non_tp_params = sum( + [ + p.numel() + for p in model.vision_model.parameters() + if not hasattr(p, 'tensor_model_parallel') + ] + ) + group = ps.get_tensor_model_parallel_group() + test_vit_params_tensor = torch.tensor([test_vit_tp_params], dtype=torch.int32).cuda() + torch.distributed.all_reduce( + test_vit_params_tensor, op=torch.distributed.ReduceOp.SUM, group=group + ) + total_test_vit_tp_params = test_vit_params_tensor.item() + assert total_test_vit_tp_params + test_vit_non_tp_params == base_vit_params + + # Check all vision projection params are present + test_proj_tp_params = sum( + [ + p.numel() + for p in model.vision_projection.parameters() + if hasattr(p, 'tensor_model_parallel') + ] + ) + test_proj_non_tp_params = sum( + [ + p.numel() + for p in model.vision_projection.parameters() + if not hasattr(p, 'tensor_model_parallel') + ] + ) + test_proj_params_tensor = torch.tensor([test_proj_tp_params], dtype=torch.int32).cuda() + torch.distributed.all_reduce( + test_proj_params_tensor, op=torch.distributed.ReduceOp.SUM, group=group + ) + total_test_proj_tp_params = test_proj_params_tensor.item() + assert total_test_proj_tp_params + test_proj_non_tp_params == base_proj_params + else: + # check ranks that aren't inside encoder have no vit + assert model.vision_model is None + assert model.vision_projection is None + + Utils.destroy_model_parallel() + torch.cuda.empty_cache() diff --git a/tests/unit_tests/models/test_mamba_model.py b/tests/unit_tests/models/test_mamba_model.py index 913adb538..f800e420d 100644 --- a/tests/unit_tests/models/test_mamba_model.py +++ b/tests/unit_tests/models/test_mamba_model.py @@ -121,3 +121,12 @@ def test_save_load(self, tmp_path): torch.save(self.model.state_dict(), path) self.model.load_state_dict(torch.load(path)) + + def test_layer_numbers(self): + """ + The layer numbers should start at one (for the embedding # layer) and go up + incrementally from there. This is required for PEFT to work. + """ + model = self.model + for expected, layer in enumerate(model.decoder.layers, start=1): + assert expected == layer.layer_number, "layer numbers are incorrect" diff --git a/tests/unit_tests/models/test_t5_model.py b/tests/unit_tests/models/test_t5_model.py index efe12b78f..6c1faf971 100644 --- a/tests/unit_tests/models/test_t5_model.py +++ b/tests/unit_tests/models/test_t5_model.py @@ -1,11 +1,15 @@ # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +import os from copy import deepcopy import pytest import torch +from packaging.version import Version as PkgVersion +from pytest_mock import mocker import megatron.core.parallel_state as ps +from megatron.core.datasets.t5_dataset import T5MaskedWordPieceDataset from megatron.core.models.T5.t5_model import T5Model from megatron.core.models.T5.t5_spec import ( get_t5_decoder_with_local_block_spec, @@ -243,3 +247,116 @@ def test_state_dict_for_save_checkpoint(self): def test_load_state_dict(self): pass + + +class TestT5ModelAttentionDimensions: + + def teardown_method(self, method): + os.environ.pop('NVTE_FUSED_ATTN', None) + os.environ.pop('NVTE_FLASH_ATTN', None) + os.environ.pop('NVTE_UNFUSED_ATTN', None) + + def setup_method(self, method): + self.bs = 4 + self.seq_len = 512 + self.seq_len_dec = 128 + self.encoder_tokens = torch.ones([self.bs, self.seq_len]) + self.decoder_tokens = torch.ones([self.bs, self.seq_len_dec]) + self.encoder_mask = torch.ones([self.bs, self.seq_len]) < 0.5 + self.decoder_mask = torch.ones([self.bs, self.seq_len_dec]) < 0.5 + + @pytest.mark.internal + def test_local_spec(self): + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=True, + ) + ) + + assert list(encoder_mask.shape) == [self.bs, 1, self.seq_len, self.seq_len] + assert list(decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len_dec] + assert list(encoder_decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len] + + @pytest.mark.internal + def test_transformer_engine_version_1_10(self): + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=False, + test_te_version="1.10", + ) + ) + + assert list(encoder_mask.shape) == [self.bs, 1, 1, self.seq_len] + assert decoder_mask is None + assert list(encoder_decoder_mask[0].shape) == [self.bs, 1, 1, self.seq_len_dec] + assert list(encoder_decoder_mask[1].shape) == [self.bs, 1, 1, self.seq_len] + + @pytest.mark.internal + def test_transformer_engine_version_1_7_to_1_10_flashfused_attn(self): + os.environ['NVTE_FLASH_ATTN'] = '1' + os.environ['NVTE_FUSED_ATTN'] = '1' + + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=False, + test_te_version="1.8", + ) + ) + + assert list(encoder_mask.shape) == [self.bs, 1, 1, self.seq_len] + assert decoder_mask is None + assert list(encoder_decoder_mask[0].shape) == [self.bs, 1, 1, self.seq_len_dec] + assert list(encoder_decoder_mask[1].shape) == [self.bs, 1, 1, self.seq_len] + + @pytest.mark.internal + def test_transformer_engine_version_1_7_to_1_10_unfused_attention(self): + os.environ['NVTE_FLASH_ATTN'] = '0' + os.environ['NVTE_FUSED_ATTN'] = '0' + + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=False, + test_te_version="1.8", + ) + ) + + assert list(encoder_mask.shape) == [self.bs, 1, self.seq_len, self.seq_len] + assert decoder_mask is None + assert list(encoder_decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len] + + @pytest.mark.internal + def test_transformer_engine_version_less_than_1_7(self): + os.environ['NVTE_FLASH_ATTN'] = '1' + with pytest.raises(Exception) as exc_info: + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=False, + test_te_version="1.5", + ) + ) + + assert str(exc_info.value) == ( + "Flash and fused attention is not supported with transformer " + "engine version < 1.7. Set NVTE_FLASH_ATTN=0 and NVTE_FUSED_ATTN=0" + "or upgrade transformer engine >= 1.7" + ) diff --git a/tests/unit_tests/pipeline_parallel/test_helpers.py b/tests/unit_tests/pipeline_parallel/test_helpers.py new file mode 100644 index 000000000..a20c3a540 --- /dev/null +++ b/tests/unit_tests/pipeline_parallel/test_helpers.py @@ -0,0 +1,124 @@ +def compare_helpers(pipeline_parallel_size, num_microbatches, num_model_chunks): + total_num_microbatches = num_microbatches * num_model_chunks + + # Baseline helpers + def baseline_get_model_chunk_id(microbatch_id, forward): + """Helper method to get the model chunk ID given the iteration number.""" + microbatch_id_in_group = microbatch_id % (pipeline_parallel_size * num_model_chunks) + model_chunk_id = microbatch_id_in_group // pipeline_parallel_size + if not forward: + model_chunk_id = num_model_chunks - model_chunk_id - 1 + return model_chunk_id + + def baseline_get_microbatch_id_in_model_chunk(iteration_id, forward): + """Helper method to get the microbatch_id within model chunk given the iteration number.""" + assert forward + iteration_group_id = iteration_id // (pipeline_parallel_size * num_model_chunks) + microbatch_id_in_model_chunk = (iteration_group_id * pipeline_parallel_size) + ( + iteration_id % pipeline_parallel_size + ) + return microbatch_id_in_model_chunk + + def baseline_is_first_microbatch_for_model_chunk(microbatch_id: int) -> bool: + """Check if an iteration is the first for a model chunk.""" + microbatch_group_size = pipeline_parallel_size * num_model_chunks + microbatch_group_id = microbatch_id // microbatch_group_size + microbatch_id_in_group = microbatch_id % microbatch_group_size + if microbatch_group_id == 0: + return microbatch_id_in_group % pipeline_parallel_size == 0 + else: + return False + + def baseline_is_last_microbatch_for_model_chunk(microbatch_id: int) -> bool: + """Check if an iteration is the last for a model chunk.""" + microbatch_group_size = pipeline_parallel_size * num_model_chunks + num_microbatch_groups = total_num_microbatches // microbatch_group_size + microbatch_group_id = microbatch_id // microbatch_group_size + microbatch_id_in_group = microbatch_id % microbatch_group_size + if microbatch_group_id == num_microbatch_groups - 1: + return microbatch_id_in_group % pipeline_parallel_size == pipeline_parallel_size - 1 + else: + return False + + # Create schedule table prior to new helper methods + schedule_table = [] + for min_microbatch_id_in_group in range(0, num_microbatches, pipeline_parallel_size): + if min_microbatch_id_in_group + pipeline_parallel_size >= num_microbatches: + # Construct schedule for the last microbatch group + schedule_table.extend( + [ + (microbatch_id, model_chunk_id) + for model_chunk_id in range(num_model_chunks) + for microbatch_id in range(min_microbatch_id_in_group, num_microbatches) + ] + ) + else: + # Construct schedule for other microbatch groups + schedule_table.extend( + [ + (microbatch_id, model_chunk_id) + for model_chunk_id in range(num_model_chunks) + for microbatch_id in range( + min_microbatch_id_in_group, + min_microbatch_id_in_group + pipeline_parallel_size, + ) + ] + ) + + microbatch_id_table, model_chunk_id_table = zip(*schedule_table) + + # New helper methods that indexes schedule table + def new_get_model_chunk_id(virtual_microbatch_id, forward): + """Helper method to get the model chunk ID given the iteration number.""" + model_chunk_id = model_chunk_id_table[virtual_microbatch_id % total_num_microbatches] + if not forward: + model_chunk_id = num_model_chunks - model_chunk_id - 1 + return model_chunk_id + + def new_get_microbatch_id_in_model_chunk(iteration_id, forward): + """Helper method to get the microbatch_id within model chunk given the iteration number.""" + assert forward + microbatch_id_in_model_chunk = microbatch_id_table[iteration_id] + return microbatch_id_in_model_chunk + + def new_is_first_microbatch_for_model_chunk(virtual_microbatch_id: int) -> bool: + """Check if an iteration is the first for a model chunk.""" + if virtual_microbatch_id < total_num_microbatches: + return microbatch_id_table[virtual_microbatch_id] == 0 + else: + return False + + def new_is_last_microbatch_for_model_chunk(virtual_microbatch_id: int) -> bool: + """Check if an iteration is the last for a model chunk.""" + if virtual_microbatch_id < total_num_microbatches: + return microbatch_id_table[virtual_microbatch_id] == num_microbatches - 1 + else: + return False + + for i in range(total_num_microbatches): + # Test both forward and backward + assert baseline_get_model_chunk_id(i, forward=False) == new_get_model_chunk_id( + i, forward=False + ) + assert baseline_get_model_chunk_id(i, forward=True) == new_get_model_chunk_id( + i, forward=True + ) + + # Only used in forward + assert baseline_get_microbatch_id_in_model_chunk( + i, forward=True + ) == new_get_microbatch_id_in_model_chunk(i, forward=True) + + assert baseline_is_first_microbatch_for_model_chunk( + i + ) == new_is_first_microbatch_for_model_chunk(i) + assert baseline_is_last_microbatch_for_model_chunk( + i + ) == new_is_last_microbatch_for_model_chunk(i) + + +def test_helpers(): + for pp in [2, 4, 8]: + for m in [pp, 2 * pp, 4 * pp, 8 * pp]: + for vp in range(2, 13): + compare_helpers(pipeline_parallel_size=pp, num_microbatches=m, num_model_chunks=vp) diff --git a/tests/unit_tests/ssm/test_mamba_block.py b/tests/unit_tests/ssm/test_mamba_block.py new file mode 100644 index 000000000..82ed40bdb --- /dev/null +++ b/tests/unit_tests/ssm/test_mamba_block.py @@ -0,0 +1,80 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from megatron.core.models.mamba.mamba_layer_specs import mamba_stack_spec +from megatron.core.ssm.mamba_block import MambaStack +from megatron.core.ssm.mamba_hybrid_layer_allocation import Symbols +from megatron.core.ssm.mamba_layer import MambaLayer +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.attention import SelfAttention +from megatron.core.transformer.mlp import MLP +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import TransformerLayer +from tests.unit_tests.test_utilities import Utils + + +class TestMambaBlock: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + + def get_mamba_block(self, hybrid_override_pattern): + transformer_config = TransformerConfig( + hidden_size=256, # The Mamba layer places several constraints on this + # Need to specify num_attention_heads and num_layers or TransformerConfig + # will generate errors. + num_layers=len(hybrid_override_pattern), + num_attention_heads=4, + use_cpu_initialization=True, + ) + modules = mamba_stack_spec.submodules + return MambaStack( + transformer_config, modules, hybrid_override_pattern=hybrid_override_pattern + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_gpu_forward(self): + hybrid_override_pattern = Symbols.MAMBA + Symbols.ATTENTION + Symbols.MLP + block = self.get_mamba_block(hybrid_override_pattern) + block.cuda() + micro_batch_size = 2 + sequence_length = 32 + hidden_states = torch.ones((sequence_length, micro_batch_size, block.config.hidden_size)) + hidden_states = hidden_states.cuda() + attention_mask = torch.ones( + (micro_batch_size, 1, sequence_length, sequence_length), dtype=bool + ) + attention_mask = attention_mask.cuda() + output = block(hidden_states, attention_mask=attention_mask) + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == block.config.hidden_size + assert output.dtype == torch.float32 + + def test_layer_types(self): + """ + Make sure that the layer types specified with hybrid_override_pattern + were honored. + """ + hybrid_override_pattern = Symbols.MAMBA + Symbols.ATTENTION + Symbols.MLP + block = self.get_mamba_block(hybrid_override_pattern) + layers = block.layers + # Note that this matches the order specified by hybrid_override_pattern in setup_method + assert type(layers[0]) == MambaLayer + assert type(layers[1]) == TransformerLayer + assert type(layers[1].self_attention) == SelfAttention + assert type(layers[2]) == TransformerLayer + assert type(layers[2].mlp) == MLP + + def test_invalid_layer_types_cause_failure(self): + invalid_symbol = '+' + assert invalid_symbol not in Symbols.VALID # sanity check. + hybrid_override_pattern = Symbols.MAMBA + Symbols.ATTENTION + Symbols.MLP + invalid_symbol + # _allocate_override() in mamba_hybrid_layer_allocation.py throws a ValueError. + with pytest.raises(ValueError): + block = self.get_mamba_block(hybrid_override_pattern) diff --git a/tests/unit_tests/ssm/test_mamba_hybrid_layer_allocation.py b/tests/unit_tests/ssm/test_mamba_hybrid_layer_allocation.py new file mode 100644 index 000000000..706fada5b --- /dev/null +++ b/tests/unit_tests/ssm/test_mamba_hybrid_layer_allocation.py @@ -0,0 +1,76 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import math +import re + +import pytest +import torch + +from megatron.core.ssm.mamba_hybrid_layer_allocation import Symbols, allocate_layers + + +class TestMambaHybridLayerAllocation: + + def test_hybrid_layer_allocation(self): + # The format for the test cases is: + # (layers_count, attention_ratio, mlp_ratio, override_pattern). + test_cases = [ + (9, 0.0, 0.0, "M*-M*-M*-"), + (9, 0.0, 0.0, "MMMMMMMMM"), + (30, 0.0, 0.0, None), + (8, 0.25, 0.25, "MM*-MM*-"), + (8, 0.5, 0.25, "M**-M**-"), + (48, 0.5, 0.2, None), + ] + for test in test_cases: + (layers_count, attention_ratio, mlp_ratio, override_pattern) = test + + layer_types = allocate_layers(*test) + + # Check that return value is in the right format. + assert isinstance(layer_types, list) + assert layers_count == len(layer_types) + + # Make sure all the layers are valid. + for layer_type in layer_types: + assert layer_type in Symbols.VALID + + # Make sure each layer is as requested by override_pattern. + if override_pattern is not None: + assert len(override_pattern) == len(layer_types) + for index, layer_type in enumerate(layer_types): + assert override_pattern[index] == layer_types[index] + else: + # Make sure the count of each type of layer is correct. + counts = {layer_type: 0 for layer_type in Symbols.VALID} # Initialize all to zero. + for layer_type in layer_types: + assert layer_type in counts + counts[layer_type] += 1 + # Check the ratios. + remainder = 1.0 - attention_ratio - mlp_ratio + assert remainder >= 0 + assert int(attention_ratio * layers_count + 0.5) == counts[Symbols.ATTENTION] + assert int(mlp_ratio * layers_count + 0.5) == counts[Symbols.MLP] + assert int(remainder * layers_count + 0.5) == counts[Symbols.MAMBA] + + # Make sure the ratios are as requested. + # This code is not working yet because capsys seems broken in Megatron. + # captured = capsys.readouterr() # Remove this output from the capture buffer. + # out = captured.out # Get stdout. + # if attention_ratio != 0 or mlp_ratio != 0: + # assert ( + # match := re.search(r'Actual attention ratio: (1\.0|0\.[0-9]+)\.', out) + # ) and math.isclose(match.group(1), attention_ratio) + # assert ( + # match := re.search(r'Actual mlp ratio: (1\.0|0\.[0-9]+)\.', out) + # ) and math.isclose(match.group(1), mlp_ratio) + + @pytest.mark.xfail(raises=ValueError) + def test_wrong_length_override_pattern(self): + # This override_pattern is too short. + layer_types = allocate_layers(9, 0.0, 0.0, "M*-M*-") + + @pytest.mark.xfail(raises=ValueError) + def test_wrong_number_of_layer_types_in_override_pattern(self): + # This override_pattern has too many mlps and not enough attention + layer_types = allocate_layers(8, 0.5, 0.25, "M*--M**-") diff --git a/tests/unit_tests/ssm/test_mamba_layer.py b/tests/unit_tests/ssm/test_mamba_layer.py new file mode 100644 index 000000000..ea29a49c6 --- /dev/null +++ b/tests/unit_tests/ssm/test_mamba_layer.py @@ -0,0 +1,47 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from megatron.core.models.mamba.mamba_layer_specs import mamba_stack_spec +from megatron.core.ssm.mamba_layer import MambaLayer +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestMambaLayer: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + hidden_size=256, # The Mamba layer places several constraints on this + # Need to specify num_attention_heads and num_layers or TransformerConfig + # will generate errors. + num_layers=1, + num_attention_heads=1, + use_cpu_initialization=True, + ) + modules = mamba_stack_spec.submodules.mamba_layer.submodules + self.layer = MambaLayer(transformer_config, modules) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_gpu_forward(self): + layer = self.layer + layer.cuda() + micro_batch_size = 2 + sequence_length = 32 + hidden_states = torch.ones((sequence_length, micro_batch_size, layer.config.hidden_size)) + hidden_states = hidden_states.cuda() + attention_mask = torch.ones( + (micro_batch_size, 1, sequence_length, sequence_length), dtype=bool + ) + attention_mask = attention_mask.cuda() + output = layer(hidden_states, attention_mask=attention_mask) + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == layer.config.hidden_size + assert output.dtype == torch.float32 diff --git a/tests/unit_tests/ssm/test_mamba_mixer.py b/tests/unit_tests/ssm/test_mamba_mixer.py new file mode 100644 index 000000000..4ea730a80 --- /dev/null +++ b/tests/unit_tests/ssm/test_mamba_mixer.py @@ -0,0 +1,50 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from megatron.core.models.mamba.mamba_layer_specs import mamba_stack_spec +from megatron.core.ssm.mamba_mixer import MambaMixer +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestMambaMixer: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + hidden_size=256, # The Mamba layer places several constraints on this + # Need to specify num_attention_heads and num_layers or TransformerConfig + # will generate errors. + num_layers=1, + num_attention_heads=1, + use_cpu_initialization=True, + ) + modules = mamba_stack_spec.submodules.mamba_layer.submodules.mixer.submodules + self.mixer = MambaMixer(transformer_config, modules, transformer_config.hidden_size) + self.mixer_no_mem_eff_path = MambaMixer( + transformer_config, modules, transformer_config.hidden_size, use_mem_eff_path=False + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize("use_mem_eff_path", [True, False]) + def test_gpu_forward(self, use_mem_eff_path): + if use_mem_eff_path: + mixer = self.mixer + else: + mixer = self.mixer_no_mem_eff_path + mixer.cuda() + micro_batch_size = 2 + sequence_length = 32 + hidden_states = torch.ones((sequence_length, micro_batch_size, mixer.config.hidden_size)) + hidden_states = hidden_states.cuda() + output, bias = mixer(hidden_states) + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == mixer.config.hidden_size + assert output.dtype == torch.float32 diff --git a/tests/unit_tests/tensor_parallel/test_initialization.py b/tests/unit_tests/tensor_parallel/test_initialization.py index 039ad071a..e0d835f1e 100644 --- a/tests/unit_tests/tensor_parallel/test_initialization.py +++ b/tests/unit_tests/tensor_parallel/test_initialization.py @@ -4,16 +4,13 @@ import torch import megatron.core.parallel_state as ps +from megatron.core.extensions.transformer_engine import TEColumnParallelLinear, TERowParallelLinear from megatron.core.tensor_parallel.layers import ( ColumnParallelLinear, RowParallelLinear, VocabParallelEmbedding, ) from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.custom_layers.transformer_engine import ( - TEColumnParallelLinear, - TERowParallelLinear, -) from megatron.core.transformer.transformer_config import TransformerConfig from tests.unit_tests.test_utilities import Utils diff --git a/tests/unit_tests/tensor_parallel/test_layers.py b/tests/unit_tests/tensor_parallel/test_layers.py index 709fc598f..d635e164d 100644 --- a/tests/unit_tests/tensor_parallel/test_layers.py +++ b/tests/unit_tests/tensor_parallel/test_layers.py @@ -24,7 +24,6 @@ def test_LinearWithFrozenWeight(tensor_parallel, allreduce_dgrad): bias = torch.zeros((size_per_partition)).cuda() gradient_accumulation_fusion = False - async_grad_allreduce = allreduce_dgrad sequence_parallel = False grad_output_buffer = None wgrad_deferral_limit = None @@ -34,11 +33,10 @@ def test_LinearWithFrozenWeight(tensor_parallel, allreduce_dgrad): weight, bias, gradient_accumulation_fusion, - async_grad_allreduce, + allreduce_dgrad, sequence_parallel, grad_output_buffer, wgrad_deferral_limit, - allreduce_dgrad, ) output = gather_from_tensor_model_parallel_region( output_parallel diff --git a/tests/unit_tests/tensor_parallel/test_mappings.py b/tests/unit_tests/tensor_parallel/test_mappings.py index d5bc3f212..3c5536f27 100644 --- a/tests/unit_tests/tensor_parallel/test_mappings.py +++ b/tests/unit_tests/tensor_parallel/test_mappings.py @@ -1,3 +1,4 @@ +import pytest import torch from megatron.core.tensor_parallel import mappings @@ -90,6 +91,7 @@ def test_ScatterToSequenceParallelRegion(): Utils.destroy_model_parallel() +@pytest.mark.internal def test_GatherFromSequenceParallelRegion(): Utils.initialize_model_parallel(4, 2) input_data = torch.ones(4).cuda() * Utils.rank @@ -110,6 +112,8 @@ def test_GatherFromSequenceParallelRegion(): class Ctx: tensor_parallel_output_grad = True output_split_sizes = None + group = None + use_global_buffer = False output_data = mappings._GatherFromSequenceParallelRegion.backward(Ctx(), input_data) expected_output = torch.ones((1, 4)).cuda() * 4 * int(Utils.rank % 4) @@ -117,6 +121,7 @@ class Ctx: Utils.destroy_model_parallel() +@pytest.mark.internal def test_ReduceScatterToSequenceParallelRegion(): Utils.initialize_model_parallel(4, 2) input_data = torch.vstack( @@ -133,12 +138,14 @@ def test_ReduceScatterToSequenceParallelRegion(): class Ctx: input_split_sizes = None + group = None + use_global_buffer = False - output_data, _ = mappings._ReduceScatterToSequenceParallelRegion.backward(Ctx(), input_data) + output_data = mappings._ReduceScatterToSequenceParallelRegion.backward(Ctx(), input_data) expected_output = torch.concat( (torch.ones(4) * 0, torch.ones(4) * 1, torch.ones(4) * 2, torch.ones(4) * 3) ).cuda() if Utils.rank >= 4: expected_output = expected_output + 4 - assert torch.equal(output_data, expected_output) + assert torch.equal(output_data[0], expected_output) Utils.destroy_model_parallel() diff --git a/tests/unit_tests/test_inference.py b/tests/unit_tests/test_inference.py new file mode 100644 index 000000000..140b30125 --- /dev/null +++ b/tests/unit_tests/test_inference.py @@ -0,0 +1,113 @@ +import argparse +import unittest.mock + +import numpy as np +import pytest +import torch + +from megatron.inference.text_generation_server import MegatronServer +from megatron.training import tokenizer +from tests.unit_tests.test_tokenizer import GPT2_VOCAB_SIZE, gpt2_tiktok_vocab +from tests.unit_tests.test_utilities import Utils + +logitsT = torch.Tensor + + +@pytest.fixture +def gpt2_tiktoken_tokenizer(gpt2_tiktok_vocab): + return tokenizer.build_tokenizer(gpt2_tiktok_vocab) + + +def forward_step_wrapper(gpt2_tiktoken_tokenizer): + assert gpt2_tiktoken_tokenizer.vocab_size == GPT2_VOCAB_SIZE + + def mock_forward_step_fn(tokens, position_ids, attention_mask) -> logitsT: + B, L = tokens.shape + assert B == 1, "Test assumes batch_size == 1" + V = gpt2_tiktoken_tokenizer.vocab_size + next_token_idxs = tokens[0, 1:] + logits = torch.zeros(1, L, V, dtype=torch.float32, device=tokens.device) + logits[0, torch.arange(L - 1), next_token_idxs] = 100 + logits[0, -1, gpt2_tiktoken_tokenizer.eos] = 100 + return logits + + return mock_forward_step_fn + + +@pytest.fixture +def app(): + server = MegatronServer(None) + return server.app + + +@pytest.fixture +def client(app): + return app.test_client() + + +@unittest.mock.patch('megatron.inference.endpoints.completions.get_tokenizer') +@unittest.mock.patch('megatron.inference.endpoints.completions.send_do_generate') +@unittest.mock.patch('megatron.inference.text_generation.generation.get_args') +@unittest.mock.patch('megatron.inference.text_generation.api.mpu') +@unittest.mock.patch('megatron.inference.text_generation.generation.mpu') +@unittest.mock.patch('megatron.inference.text_generation.communication.mpu') +@unittest.mock.patch('megatron.inference.text_generation.generation.ForwardStep') +@unittest.mock.patch('megatron.inference.text_generation.tokenization.get_tokenizer') +def test_completions( + mock_get_tokenizer1, + mock_forward_step, + mock_mpu_2, + mock_mpu_1, + mock_mpu_0, + mock_get_args_1, + mock_send_do_generate, + mock_get_tokenizer2, + client, + gpt2_tiktoken_tokenizer, +): + Utils.initialize_distributed() + + # set up the mocks + args = argparse.Namespace( + max_position_embeddings=1024, max_tokens_to_oom=1_000_000, inference_max_seq_length=1024 + ) + mock_get_args_1.return_value = args + mock_get_tokenizer1.return_value = gpt2_tiktoken_tokenizer + mock_get_tokenizer2.return_value = gpt2_tiktoken_tokenizer + mock_forward_step.return_value = forward_step_wrapper(gpt2_tiktoken_tokenizer) + mock_mpu_0.is_pipeline_last_stage.return_value = True + mock_mpu_1.is_pipeline_last_stage.return_value = True + mock_mpu_2.is_pipeline_last_stage.return_value = True + + twinkle = ("twinkle twinkle little star,", " how I wonder what you are") + request_data = {"prompt": twinkle[0] + twinkle[1], "max_tokens": 0, "logprobs": 5, "echo": True} + + response = client.post('/completions', json=request_data) + + assert response.status_code == 200 + assert response.is_json + + json_data = response.get_json() + assert 'choices' in json_data + assert len(json_data['choices']) > 0 + assert 'text' in json_data['choices'][0] + assert 'logprobs' in json_data['choices'][0] + + # whats up with the reconstruction of the prompt? + # we are replicating what lm-eval-harness::TemplateLM::_encode_pair does + # it encodes prompt, then prompt+suffix, and then infers the suffix tokens + # from the combined encoding. + logprobs = json_data["choices"][0]["logprobs"] + num_reconstructed_prompt_tokens = np.searchsorted(logprobs["text_offset"], len(twinkle[0])) + assert num_reconstructed_prompt_tokens == len(gpt2_tiktoken_tokenizer.tokenize(twinkle[0])) + suffix_logprob = logprobs["token_logprobs"][num_reconstructed_prompt_tokens:] + + # we mock logits to be 0 everywhere, and 100 at gt tokens, so logprob should be 0 for gt tokens + assert sum(suffix_logprob) == 0, f"{suffix_logprob} != [0, .... 0]" + + # Test for unsupported HTTP methods + response = client.put('/completions', json=request_data) + assert response.status_code == 405 # Method Not Allowed + + mock_get_tokenizer1.assert_called() + mock_send_do_generate.assert_called_once() diff --git a/tests/unit_tests/test_local_multi_tensor_fns.py b/tests/unit_tests/test_local_multi_tensor_fns.py index 086de6f6d..9c06cd24a 100644 --- a/tests/unit_tests/test_local_multi_tensor_fns.py +++ b/tests/unit_tests/test_local_multi_tensor_fns.py @@ -17,8 +17,11 @@ def test_local_multi_tensor_l2_norm_and_scale(): torch.manual_seed(42) tensor_list = [torch.rand(5, 5).cuda() for _ in range(10)] + tensor_list_hold = copy.copy(tensor_list) tensor_list_copy = copy.deepcopy(tensor_list) + tensor_list_copy_hold = copy.copy(tensor_list_copy) + # test multi_tensor_l2norm norm_apex, _ = multi_tensor_apply.multi_tensor_applier( amp_C.multi_tensor_l2norm, torch.tensor([0], dtype=torch.int, device='cuda'), @@ -33,6 +36,7 @@ def test_local_multi_tensor_l2_norm_and_scale(): ) torch.testing.assert_close(norm_apex, norm_local) + # test src is dst clip_coeff = 0.05 multi_tensor_apply.multi_tensor_applier( amp_C.multi_tensor_scale, @@ -46,6 +50,26 @@ def test_local_multi_tensor_l2_norm_and_scale(): [tensor_list_copy, tensor_list_copy], clip_coeff, ) + torch.testing.assert_close(tensor_list, tensor_list_hold) + torch.testing.assert_close(tensor_list_copy, tensor_list_copy_hold) + torch.testing.assert_close(tensor_list, tensor_list_copy) + + # test src is not dst + clip_coeff = 2.0 + multi_tensor_apply.multi_tensor_applier( + amp_C.multi_tensor_scale, + torch.tensor([0], dtype=torch.int, device='cuda'), + [copy.deepcopy(tensor_list), tensor_list], + clip_coeff, + ) + multi_tensor_apply.multi_tensor_applier( + local_multi_tensor_scale, + torch.tensor([0], dtype=torch.int, device='cuda'), + [copy.deepcopy(tensor_list_copy), tensor_list_copy], + clip_coeff, + ) + torch.testing.assert_close(tensor_list, tensor_list_hold) + torch.testing.assert_close(tensor_list_copy, tensor_list_copy_hold) torch.testing.assert_close(tensor_list, tensor_list_copy) diff --git a/tests/unit_tests/test_parallel_state.py b/tests/unit_tests/test_parallel_state.py index 6dbf0394a..ca5185b28 100644 --- a/tests/unit_tests/test_parallel_state.py +++ b/tests/unit_tests/test_parallel_state.py @@ -1,5 +1,3 @@ -import os - import pytest import torch @@ -12,6 +10,7 @@ @pytest.mark.parametrize('order', test_parallel_order) +@pytest.mark.flaky_in_dev def test_initialize_and_destroy_model_parallel(order): with pytest.raises(AssertionError): assert ps.initialize_model_parallel(order=order) @@ -39,6 +38,10 @@ def test_initialize_and_destroy_model_parallel(order): assert ps.get_tensor_model_parallel_group() is not None assert ps.get_pipeline_model_parallel_group() is not None assert ps.get_data_parallel_group() is not None + assert ps.get_expert_model_parallel_group() is not None + assert ps.get_expert_tensor_parallel_group() is not None + assert ps.get_expert_data_parallel_group() is not None + assert ps.get_expert_tensor_model_pipeline_parallel_group() is not None Utils.destroy_model_parallel() assert ps._MODEL_PARALLEL_GROUP is None @@ -73,6 +76,15 @@ def test_tensor_model_parellel_world_size(order): Utils.destroy_model_parallel() +@pytest.mark.parametrize('order', test_parallel_order) +def test_expert_tensor_parellel_world_size(order): + Utils.initialize_model_parallel(expert_tensor_parallel_size=world_size, order=order) + assert ps.get_expert_tensor_parallel_world_size() == world_size + ps.set_expert_tensor_parallel_world_size(None) + assert ps.get_expert_tensor_parallel_world_size() == world_size + Utils.destroy_model_parallel() + + @pytest.mark.parametrize('order', test_parallel_order) def test_pipeline_model_parallel_world_size(order): Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) @@ -91,6 +103,15 @@ def test_tensor_model_parallel_rank(order): Utils.destroy_model_parallel() +@pytest.mark.parametrize('order', test_parallel_order) +def test_moe_tensor_model_parellel_rank(order): + Utils.initialize_model_parallel(expert_tensor_parallel_size=world_size, order=order) + assert ps.get_expert_tensor_parallel_rank() == rank + ps.set_expert_tensor_parallel_rank(None) + assert ps.get_expert_tensor_parallel_rank() == rank + Utils.destroy_model_parallel() + + @pytest.mark.parametrize('order', test_parallel_order) def test_pipeline_model_parallel_rank(order): Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) @@ -166,6 +187,7 @@ def test_encoder_tensor_pipeline_parallelism(order): Utils.destroy_model_parallel() +@pytest.mark.internal @pytest.mark.parametrize( 'src_tp_pp, ep_size', [ @@ -191,12 +213,12 @@ def test_different_initialize_order_consistency(src_tp_pp, ep_size): tp_g = torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) dp_g = torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) pp_g = torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) - dp_no_ep_g = torch.distributed.get_process_group_ranks( - ps.get_data_modulo_expert_parallel_group() - ) + dp_no_ep_g = torch.distributed.get_process_group_ranks(ps.get_expert_data_parallel_group()) cp_g = torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) mp_g = torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) - tp_ep_g = torch.distributed.get_process_group_ranks(ps.get_tensor_and_expert_parallel_group()) + tp_ep_g = torch.distributed.get_process_group_ranks( + ps.get_expert_tensor_and_model_parallel_group() + ) tp_dp_g = torch.distributed.get_process_group_ranks( ps.get_tensor_and_data_parallel_group(False) ) @@ -215,12 +237,12 @@ def test_different_initialize_order_consistency(src_tp_pp, ep_size): assert dp_g == torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) assert pp_g == torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) assert dp_no_ep_g == torch.distributed.get_process_group_ranks( - ps.get_data_modulo_expert_parallel_group() + ps.get_expert_data_parallel_group() ) assert cp_g == torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) assert mp_g == torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) assert tp_ep_g == torch.distributed.get_process_group_ranks( - ps.get_tensor_and_expert_parallel_group() + ps.get_expert_tensor_and_model_parallel_group() ) assert tp_dp_g == torch.distributed.get_process_group_ranks( ps.get_tensor_and_data_parallel_group(False) @@ -260,6 +282,7 @@ def test_different_initialize_order_unconsistency(src_tp_pp, ep_size): Utils.destroy_model_parallel() +@pytest.mark.internal @pytest.mark.parametrize( 'nodes, num_gpu, tp, pp, cp, ep', [ @@ -388,54 +411,37 @@ def golden_rank_result_from_past_code( ranks = ranks + list(range(start_rank, end_rank)) tp_dp_group.append(list(ranks)) - tp_ep_group = [] - dp_no_ep_group = [] - dp_no_ep_group_with_cp = [] + expert_tp_ep_group = [] + expert_dp_group = [] + expert_data_parallel_size = world_size // ( + tensor_model_parallel_size * pipeline_model_parallel_size * expert_model_parallel_size + ) all_ranks = torch.arange(world_size).reshape( ( pipeline_model_parallel_size, - data_parallel_size // expert_model_parallel_size, + expert_data_parallel_size, expert_model_parallel_size, - context_parallel_size, tensor_model_parallel_size, ) ) - # 'pp edp ep cp tp -> (pp edp cp) (ep tp)' - tp_ep_rearrange = torch.transpose(all_ranks, 2, 3) + # (pp, dp, ep, tp) -> (pp*dp, ep*tp) tp_ep_rearrange = torch.reshape( - tp_ep_rearrange, (-1, expert_model_parallel_size * tensor_model_parallel_size) + all_ranks, (-1, expert_model_parallel_size * tensor_model_parallel_size) ) - tp_ep_rearrange = tp_ep_rearrange.tolist() - tp_ep_rearrange.sort() - for tensor_and_expert_parallel_ranks in tp_ep_rearrange: - tensor_and_expert_parallel_ranks = list(tensor_and_expert_parallel_ranks) - tensor_and_expert_parallel_ranks.sort() - tp_ep_group.append(tensor_and_expert_parallel_ranks) - # 'pp edp ep cp tp -> (pp ep cp tp) edp' - edp_rearrange = torch.transpose(all_ranks, 1, 4) - edp_rearrange = torch.reshape( - edp_rearrange, (-1, data_parallel_size // expert_model_parallel_size) + num_tp_ep_groups = tp_ep_rearrange.shape[0] + for i in range(num_tp_ep_groups): + expert_tensor_and_model_parallel_ranks = tp_ep_rearrange[i].tolist() + expert_tp_ep_group.append(expert_tensor_and_model_parallel_ranks) + + # (pp, dp, ep, tp) -> (pp*ep*tp, dp) + expert_dp_rearrange = torch.permute(all_ranks, (0, 2, 3, 1)).reshape( + -1, expert_data_parallel_size ) - edp_rearrange = edp_rearrange.tolist() - edp_rearrange.sort() - for expert_data_parallel_ranks in edp_rearrange: - expert_data_parallel_ranks = list(expert_data_parallel_ranks) - expert_data_parallel_ranks.sort() - dp_no_ep_group.append(expert_data_parallel_ranks) - # 'pp edp ep cp tp -> (pp ep tp) (cp edp)' - edp_cp_rearrange = torch.transpose(all_ranks, 1, 2) - edp_cp_rearrange = torch.transpose(edp_cp_rearrange, 2, 4) - edp_cp_rearrange = torch.reshape( - edp_cp_rearrange, - (-1, context_parallel_size * data_parallel_size // expert_model_parallel_size), - ) - edp_cp_rearrange = edp_cp_rearrange.tolist() - edp_cp_rearrange.sort() - for expert_data_parallel_ranksj_with_cp in edp_cp_rearrange: - expert_data_parallel_ranksj_with_cp = list(expert_data_parallel_ranksj_with_cp) - expert_data_parallel_ranksj_with_cp.sort() - dp_no_ep_group_with_cp.append(expert_data_parallel_ranksj_with_cp) + num_expert_dp_groups = world_size // expert_data_parallel_size + for i in range(num_expert_dp_groups): + expert_dp_ranks = expert_dp_rearrange[i].tolist() + expert_dp_group.append(expert_dp_ranks) return ( dp_groups, @@ -446,13 +452,13 @@ def golden_rank_result_from_past_code( pp_group, tp_dp_group, tp_dp_cp_group, - tp_ep_group, - dp_no_ep_group, - dp_no_ep_group_with_cp, + expert_tp_ep_group, + expert_dp_group, ) world_size = nodes * num_gpu dp = world_size // (tp * pp * cp) + expert_dp = world_size // (tp * ep * pp) assert dp % ep == 0, f"dp size ({dp}) is not divisible by ep {ep} ." assert ( world_size % (tp * pp * cp) == 0 @@ -466,9 +472,8 @@ def golden_rank_result_from_past_code( pp_group, tp_dp_group, tp_dp_cp_group, - tp_ep_group, - dp_no_ep_group, - dp_no_ep_group_with_cp, + expert_tp_ep_group, + expert_dp_group, ) = golden_rank_result_from_past_code( world_size=world_size, tensor_model_parallel_size=tp, @@ -476,7 +481,10 @@ def golden_rank_result_from_past_code( context_parallel_size=cp, expert_model_parallel_size=ep, ) - rank_generator = ps.RankGenerator(tp=tp, ep=ep, dp=dp, pp=pp, cp=cp, order="tp-cp-ep-dp-pp") + rank_generator = ps.RankGenerator(tp=tp, ep=1, dp=dp, pp=pp, cp=cp, order="tp-cp-dp-pp") + expert_rank_generator = ps.RankGenerator( + tp=tp, ep=ep, dp=expert_dp, pp=pp, cp=1, order="tp-ep-dp-pp" + ) assert dp_groups == rank_generator.get_ranks( "dp" ), f"{dp_groups} != {rank_generator.get_ranks('dp')}" @@ -501,12 +509,9 @@ def golden_rank_result_from_past_code( assert tp_dp_cp_group == rank_generator.get_ranks( "tp-dp-cp" ), f"{tp_dp_cp_group} != {rank_generator.get_ranks('tp-dp-cp')}" - assert tp_ep_group == rank_generator.get_ranks( - "tp-ep", independent_ep=True - ), f"{tp_ep_group} != {rank_generator.get_ranks('tp-ep', independent_ep=True)}." - assert dp_no_ep_group == rank_generator.get_ranks( - "dp", independent_ep=True - ), f"{dp_no_ep_group} != {rank_generator.get_ranks('dp', independent_ep=True)}." - assert dp_no_ep_group_with_cp == rank_generator.get_ranks( - "dp-cp", independent_ep=True - ), f"{dp_no_ep_group_with_cp} != {rank_generator.get_ranks('dp-cp', independent_ep=True)}." + assert expert_tp_ep_group == expert_rank_generator.get_ranks( + "tp-ep" + ), f"{expert_tp_ep_group} != {expert_rank_generator.get_ranks('tp-ep')}." + assert expert_dp_group == expert_rank_generator.get_ranks( + "dp" + ), f"{expert_dp_group} != {expert_rank_generator.get_ranks('dp')}." diff --git a/tests/unit_tests/test_tokenizer.py b/tests/unit_tests/test_tokenizer.py index 13e222953..3d8f5d9c3 100644 --- a/tests/unit_tests/test_tokenizer.py +++ b/tests/unit_tests/test_tokenizer.py @@ -3,16 +3,20 @@ from argparse import Namespace from pathlib import Path +import numpy as np import pytest import requests from megatron.training import tokenizer from megatron.training.tokenizer.gpt2_tokenization import PRETRAINED_VOCAB_ARCHIVE_MAP +from megatron.training.tokenizer.multimodal_tokenizer import MultimodalTokenizer TOKENIZER_DIR = Path("~/data/tokenizers").expanduser() # Copied over from test_preprocess_data.py -__LOCAL_GPT2_VOCAB = "/home/gitlab-runner/data/gpt3_data/gpt2-vocab.json" +from tests.unit_tests.data.test_preprocess_data import __LOCAL_GPT2_VOCAB + +GPT2_VOCAB_SIZE = 32768 def offsets_to_substrs(offsets, string): @@ -117,14 +121,11 @@ def gpt2_tiktok_vocab(tmp_path_factory): ) -def specs(): - if TOKENIZER_DIR.exists(): - return local_test_specs() - return [] - - -@pytest.mark.parametrize("args", specs()) +@pytest.mark.parametrize("args", local_test_specs()) def test_tokenizer(args): + if not TOKENIZER_DIR.exists(): + pytest.skip("Skipping tokenizer tests because the tokenizer directory does not exist") + tok = tokenizer.build_tokenizer(args) run_tokenizer_tests(tok) @@ -191,3 +192,85 @@ def test_null_tokenizer(): detok_str == test_string ), f"Detokenized string {detok_str} does not match original {test_string}" assert len(toks) == len(offsets), f"Tokenized string {toks} does not match original {offsets}" + + +class MockUnderlyingTokenizer: + """Mock tokenizer for testing purposes.""" + + def __init__(self): + self.pad_token_id = 256 + + def __len__(self): + return 256 + + def encode(self, text: str) -> list[int]: + """Convert text to a list of token IDs.""" + return [ord(c) for c in text] + + def decode(self, tokens: list[int]) -> str: + """Convert list of token IDs to plaintext.""" + return "".join([chr(t) for t in tokens]) + + def apply_chat_template(self, conversation: list[dict], *args, **kwargs) -> list[int]: + """Convert a conversation to token IDs.""" + out = [] + for turn in conversation: + turn_tokens = self.encode(f"{turn['role']}:{turn['content']}") + out.extend(turn_tokens) + + if kwargs.get("return_tensors", None) == "np": + return [np.array(out)] + + return out + + def convert_tokens_to_ids(self, text: str) -> list[int]: + """Convert plaintext to token IDs.""" + return self.encode(text) + + def add_tokens(self, extra_tokens: list[str], *args, **kwargs) -> int: + """Add tokens to the tokenizer. No-op for this mock tokenizer.""" + return len(extra_tokens) + + +def test_multimodal_tokenizer(): + """Test MultimodalTokenizer.""" + underlying = MockUnderlyingTokenizer() + prompt_format = "chatml" + special_tokens = [""] + image_tag_type = "" + tokenizer = MultimodalTokenizer(underlying, prompt_format, special_tokens, image_tag_type) + + # Simple encode - decode roundtrip. + assert ( + tokenizer.detokenize(tokenizer.tokenize("abc")) == "abc" + ), "encode-decode roundtrip failed" + + # Apply chat template. + conversation = [ + {"role": "system", "content": "abc"}, + {"role": "user", "content": "123"}, + {"role": "assistant", "content": "xyz"}, + ] + conv_tokens = tokenizer.tokenize_conversation( + conversation, return_target=False, add_generation_prompt=False + ) + assert len(conv_tokens) > 0, "failed to tokenize conversation" + + conv_tokens, target_tokens = tokenizer.tokenize_conversation( + conversation, return_target=True, add_generation_prompt=True + ) + assert len(conv_tokens) > 0 and len(conv_tokens) == len( + target_tokens + ), "failed to tokenize conversation and return target tokens" + + # Try converting tokens to ids. + assert tokenizer.convert_tokens_to_ids("a"), "failed to convert tokens to ids." + + # Try image tags. + image_tag_type = "nvlm" + tokenizer = MultimodalTokenizer(underlying, prompt_format, special_tokens, image_tag_type) + + assert tokenizer._apply_image_tag("hello") == "hello" + assert tokenizer._apply_image_tag([{"role": "user", "content": "hello"}]) == [ + {"role": "user", "content": "hello"} + ] diff --git a/tests/unit_tests/test_utilities.py b/tests/unit_tests/test_utilities.py index 29aef63c8..410350be1 100644 --- a/tests/unit_tests/test_utilities.py +++ b/tests/unit_tests/test_utilities.py @@ -27,7 +27,7 @@ def __init__( class Utils: - world_size = torch.cuda.device_count() + world_size = int(os.environ['WORLD_SIZE']) rank = int(os.environ['LOCAL_RANK']) inited = False store = None @@ -102,3 +102,22 @@ def initialize_model_parallel( **kwargs, ) Utils.inited = True + + @staticmethod + def fake_initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=1, + virtual_pipeline_model_parallel_size=None, + expert_model_parallel_size=1, + ): + """Used for layer-wise UT as a proxy for NeMo-style intialization.""" + ps.set_tensor_model_parallel_world_size(tensor_model_parallel_size) + ps.set_tensor_model_parallel_rank(0) + + ps.set_expert_model_parallel_world_size(expert_model_parallel_size) + ps.set_expert_model_parallel_rank(0) + if virtual_pipeline_model_parallel_size is not None: + ps.set_virtual_pipeline_model_parallel_world_size(virtual_pipeline_model_parallel_size) + ps.set_virtual_pipeline_model_parallel_rank(0) + + ps.set_pipeline_model_parallel_world_size(pipeline_model_parallel_size) diff --git a/tests/unit_tests/transformer/moe/conftest.py b/tests/unit_tests/transformer/moe/conftest.py new file mode 100644 index 000000000..dda2a6d2b --- /dev/null +++ b/tests/unit_tests/transformer/moe/conftest.py @@ -0,0 +1,49 @@ +import os +from pathlib import Path + +import pytest +import torch +import torch.distributed + +from megatron.core.utils import is_te_min_version +from tests.unit_tests.dist_checkpointing import TempNamedDir +from tests.unit_tests.test_utilities import Utils + + +def pytest_sessionfinish(session, exitstatus): + if exitstatus == 5: + session.exitstatus = 0 + + +@pytest.fixture(scope="session", autouse=True) +def cleanup(): + yield + if torch.distributed.is_initialized(): + print("Waiting for destroy_process_group") + torch.distributed.barrier() + torch.distributed.destroy_process_group() + + +@pytest.fixture(scope="function", autouse=True) +def set_env(): + if is_te_min_version("1.3"): + os.environ['NVTE_FLASH_ATTN'] = '0' + os.environ['NVTE_FUSED_ATTN'] = '0' + + +@pytest.fixture(scope="session") +def tmp_path_dist_ckpt(tmp_path_factory) -> Path: + """Common directory for saving the checkpoint. + + Can't use pytest `tmp_path_factory` directly because directory must be shared between processes. + """ + + tmp_dir = tmp_path_factory.mktemp('ignored', numbered=False) + tmp_dir = tmp_dir.parent.parent / 'tmp_dist_ckpt' + + if Utils.rank == 0: + with TempNamedDir(tmp_dir, sync=False): + yield tmp_dir + + else: + yield tmp_dir diff --git a/tests/unit_tests/transformer/moe/test_a2a_token_dispatcher.py b/tests/unit_tests/transformer/moe/test_a2a_token_dispatcher.py index ad829881d..96afe46e9 100644 --- a/tests/unit_tests/transformer/moe/test_a2a_token_dispatcher.py +++ b/tests/unit_tests/transformer/moe/test_a2a_token_dispatcher.py @@ -3,11 +3,16 @@ import pytest import torch -from megatron.core.transformer.moe.moe_utils import permute, unpermute from tests.unit_tests.test_utilities import Utils from tests.unit_tests.transformer.moe.test_token_dispatcher import MoEModelTestContainer +def test_placeholder(): + """This is here because otherwise there's no other test in this module (all disabled) and pytest would fail.""" + pass + + +@pytest.mark.flaky class TestAlltoAllDispatcher: def setup_method(self, method): pass @@ -19,6 +24,8 @@ def teardown_method(self, method): @pytest.mark.internal @pytest.mark.timeout(120) @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev def test_forward_backward(self, tp_size, ep_size): container = MoEModelTestContainer( tp_size=tp_size, @@ -35,6 +42,8 @@ def test_forward_backward(self, tp_size, ep_size): @pytest.mark.internal @pytest.mark.timeout(120) @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev def test_a2aseq_forward_backward(self, tp_size, ep_size): container = MoEModelTestContainer( tp_size=tp_size, @@ -51,6 +60,8 @@ def test_a2aseq_forward_backward(self, tp_size, ep_size): @pytest.mark.internal @pytest.mark.timeout(120) @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev def test_capacity_forward_backward(self, tp_size, ep_size): container = MoEModelTestContainer( tp_size=tp_size, @@ -64,13 +75,14 @@ def test_capacity_forward_backward(self, tp_size, ep_size): moe_expert_capacity_factor=0.5, moe_pad_expert_input_to_capacity=False, ) - container.dispacher_capacity_test() + container.dispatcher_capacity_test() @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal @pytest.mark.timeout(120) @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) @pytest.mark.flaky + @pytest.mark.flaky_in_dev def test_capacity_padding_forward_backward(self, tp_size, ep_size): container = MoEModelTestContainer( tp_size=tp_size, @@ -81,7 +93,7 @@ def test_capacity_padding_forward_backward(self, tp_size, ep_size): moe_router_load_balancing_type="aux_loss", moe_token_dispatcher_type="alltoall", moe_token_drop_policy="probs", - moe_expert_capacity_factor=0.5, + moe_expert_capacity_factor=0.6, moe_pad_expert_input_to_capacity=True, ) container.dispatcher_drop_and_pad_test() diff --git a/tests/unit_tests/transformer/moe/test_aux_loss.py b/tests/unit_tests/transformer/moe/test_aux_loss.py index 2b7b2e109..50567e193 100644 --- a/tests/unit_tests/transformer/moe/test_aux_loss.py +++ b/tests/unit_tests/transformer/moe/test_aux_loss.py @@ -18,6 +18,7 @@ def partition_input(self, input): output.requires_grad = True return output + @pytest.mark.internal def aux_loss_test(self, input, baseline_grad): partitioned_input = self.partition_input(input) moe_layer = self.moe_layer @@ -56,6 +57,7 @@ def setup_method(self, method): def teardown_method(self, method): Utils.destroy_model_parallel() + @pytest.mark.internal @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal @pytest.mark.parametrize( @@ -75,6 +77,7 @@ def test_allgather_dispatcher(self, tp_size, ep_size, cp_size): ) container.aux_loss_test(self.input, self.baseline_grad) + @pytest.mark.internal @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal @pytest.mark.parametrize( diff --git a/tests/unit_tests/transformer/moe/test_grouped_mlp.py b/tests/unit_tests/transformer/moe/test_grouped_mlp.py index 043bdc8c5..4748cbc88 100644 --- a/tests/unit_tests/transformer/moe/test_grouped_mlp.py +++ b/tests/unit_tests/transformer/moe/test_grouped_mlp.py @@ -312,6 +312,7 @@ def test_constructor(self): self.fc2_ffn_hidden_size, ) + @pytest.mark.internal @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal def test_gpu_forward_backward(self): @@ -355,6 +356,7 @@ def test_gpu_forward_backward(self): for smm_result, gmm_result in zip(smm_results, gmm_results): torch.testing.assert_close(smm_result, gmm_result) + @pytest.mark.internal @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal def test_gpu_forward_backward_with_no_tokens_allocated(self): diff --git a/tests/unit_tests/transformer/moe/test_moe_layer.py b/tests/unit_tests/transformer/moe/test_moe_layer.py index e65e7f225..591ba4d4a 100644 --- a/tests/unit_tests/transformer/moe/test_moe_layer.py +++ b/tests/unit_tests/transformer/moe/test_moe_layer.py @@ -69,5 +69,55 @@ def test_legacy_moe_layer(self, num_moe_experts, moe_token_dispatcher_type): ) Utils.destroy_model_parallel() + @pytest.mark.parametrize("moe_token_dispatcher_type", ["allgather", "alltoall"]) + @pytest.mark.parametrize("grouped_gemm", [True, False]) + @pytest.mark.parametrize("tp_size,ep_size", [(1, 1), (2, 2)]) + def test_moe_with_late_initialize( + self, moe_token_dispatcher_type, grouped_gemm, tp_size, ep_size + ): + num_moe_experts = 4 + hidden_size = 12 + transformer_config = TransformerConfig( + num_layers=1, + hidden_size=hidden_size, + num_attention_heads=4, + num_moe_experts=num_moe_experts, + use_cpu_initialization=True, + moe_router_load_balancing_type="aux_loss", + moe_router_topk=2, + moe_aux_loss_coeff=0.01, + add_bias_linear=False, + moe_grouped_gemm=grouped_gemm, + moe_token_dispatcher_type=moe_token_dispatcher_type, + tensor_model_parallel_size=tp_size, + expert_model_parallel_size=ep_size, + sequence_parallel=tp_size > 1, + bf16=True, + params_dtype=torch.bfloat16, + ) + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( + num_experts=num_moe_experts, moe_grouped_gemm=grouped_gemm + ) + + # Fake initialization as NeMo does + Utils.fake_initialize_model_parallel( + tensor_model_parallel_size=tp_size, expert_model_parallel_size=ep_size + ) + moe_layer = MoELayer( + transformer_config, transformer_layer_spec.submodules.mlp.submodules + ).cuda() + + Utils.initialize_model_parallel( + tensor_model_parallel_size=tp_size, expert_model_parallel_size=ep_size + ) + _set_random_seed(seed_=123, data_parallel_random_init=False) + + input_data = torch.randn( + 16, 4, hidden_size, device=torch.cuda.current_device(), dtype=torch.bfloat16 + ) + output = moe_layer(input_data) + + Utils.destroy_model_parallel() + def teardown_method(self, method): Utils.destroy_model_parallel() diff --git a/tests/unit_tests/transformer/moe/test_routers.py b/tests/unit_tests/transformer/moe/test_routers.py index c1633834b..2b3e098db 100644 --- a/tests/unit_tests/transformer/moe/test_routers.py +++ b/tests/unit_tests/transformer/moe/test_routers.py @@ -44,6 +44,7 @@ def test_constructor(self): num_weights = sum([p.numel() for p in self.router.parameters()]) assert num_weights == 12 * 4, num_weights + @pytest.mark.internal @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal @pytest.mark.parametrize("moe_router_pre_softmax", [(True), (False)]) @@ -56,6 +57,7 @@ def test_router_forward(self, moe_router_pre_softmax): hidden_states = hidden_states.cuda() scores, indices = self.router(hidden_states) + @pytest.mark.internal @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal def test_aux_loss(self): diff --git a/tests/unit_tests/transformer/moe/test_sequential_mlp.py b/tests/unit_tests/transformer/moe/test_sequential_mlp.py index 514e098bf..2a005555d 100644 --- a/tests/unit_tests/transformer/moe/test_sequential_mlp.py +++ b/tests/unit_tests/transformer/moe/test_sequential_mlp.py @@ -4,13 +4,10 @@ import pytest import torch +from megatron.core.extensions.transformer_engine import TEColumnParallelLinear, TERowParallelLinear from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.custom_layers.transformer_engine import ( - TEColumnParallelLinear, - TERowParallelLinear, -) from megatron.core.transformer.mlp import MLPSubmodules from megatron.core.transformer.moe.experts import SequentialMLP from megatron.core.transformer.moe.moe_layer import MoELayer @@ -114,6 +111,7 @@ def setup_method(self, method): self.num_local_experts, self.transformer_config, self.te_mlp_spec ) + @pytest.mark.internal @pytest.mark.skipif( not is_te_min_version("1.7.0"), reason="Transformer Engine under v1.7.0 doesn't support MoE training.", @@ -130,6 +128,7 @@ def test_constructor(self): self.te_sequential_mlp.local_experts[i].linear_fc2.weight, ) + @pytest.mark.internal @pytest.mark.skipif( not is_te_min_version("1.7.0"), reason="Transformer Engine under v1.7.0 doesn't support MoE training.", @@ -152,6 +151,7 @@ def test_gpu_forward(self): output_te, _ = self.te_sequential_mlp(hidden_states, tokens_per_expert) assert torch.equal(output_local, output_te) + @pytest.mark.internal @pytest.mark.skipif( not is_te_min_version("1.7.0"), reason="Transformer Engine under v1.7.0 doesn't support MoE training.", @@ -176,6 +176,7 @@ def test_gpu_forward_with_one_local_expert(self): output_te, _ = te_sequential_mlp(hidden_states, tokens_per_expert) assert torch.equal(output_local, output_te) + @pytest.mark.internal @pytest.mark.skipif( not is_te_min_version("1.7.0"), reason="Transformer Engine under v1.7.0 doesn't support MoE training.", diff --git a/tests/unit_tests/transformer/moe/test_token_dispatcher.py b/tests/unit_tests/transformer/moe/test_token_dispatcher.py index e85f8512b..895cb291a 100644 --- a/tests/unit_tests/transformer/moe/test_token_dispatcher.py +++ b/tests/unit_tests/transformer/moe/test_token_dispatcher.py @@ -21,6 +21,7 @@ def __init__( ep_size, pp_size, cp_size=1, + moe_tp_size=None, data_parallel_random_init=False, num_moe_experts=8, moe_router_topk=2, @@ -32,11 +33,14 @@ def __init__( **kwargs, ): self.num_local_experts = num_moe_experts // ep_size + if moe_tp_size is None: + moe_tp_size = tp_size Utils.initialize_model_parallel( tensor_model_parallel_size=tp_size, pipeline_model_parallel_size=pp_size, expert_model_parallel_size=ep_size, context_parallel_size=cp_size, + expert_tensor_parallel_size=moe_tp_size, ) _set_random_seed(seed_=123, data_parallel_random_init=data_parallel_random_init) local_expert_indices_offset = ( @@ -45,12 +49,12 @@ def __init__( self.local_expert_indices = [ local_expert_indices_offset + i for i in range(self.num_local_experts) ] - self.config = TransformerConfig( tensor_model_parallel_size=tp_size, expert_model_parallel_size=ep_size, pipeline_model_parallel_size=pp_size, context_parallel_size=cp_size, + expert_tensor_parallel_size=moe_tp_size, moe_router_topk=moe_router_topk, num_moe_experts=num_moe_experts, moe_router_load_balancing_type=moe_router_load_balancing_type, @@ -59,9 +63,8 @@ def __init__( moe_pad_expert_input_to_capacity=moe_pad_expert_input_to_capacity, moe_aux_loss_coeff=moe_aux_loss_coeff, num_layers=1, - moe_extended_tp=kwargs.get("moe_extended_tp", False), moe_grouped_gemm=kwargs.get("moe_grouped_gemm", False), - hidden_size=kwargs.get("hidden_size", 1024), + hidden_size=kwargs.get("hidden_size", 16), num_attention_heads=kwargs.get("num_attention_heads", 8), use_cpu_initialization=kwargs.get("use_cpu_initialization", True), sequence_parallel=tp_size > 1, @@ -69,19 +72,24 @@ def __init__( ) # init moe layer + self.moe_layer = self.new_moe_layer() + + def new_moe_layer(self): transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( - num_experts=num_moe_experts, moe_grouped_gemm=kwargs.get("moe_grouped_gemm", False) + num_experts=self.config.num_moe_experts, moe_grouped_gemm=self.config.moe_grouped_gemm ) - self.moe_layer = MoELayer( - self.config, transformer_layer_spec.submodules.mlp.submodules + moe_layer = MoELayer( + copy.deepcopy(self.config), transformer_layer_spec.submodules.mlp.submodules ).cuda() - self.moe_layer.set_layer_number(0) + moe_layer.set_layer_number(0) + return moe_layer def __del__(self): torch.distributed.barrier() torch.cuda.synchronize() Utils.destroy_model_parallel() + @pytest.mark.internal def dispatcher_dropless_test(self): moe_layer = self.moe_layer bs = 32 @@ -103,13 +111,7 @@ def dispatcher_dropless_test(self): moe_layer.token_dispatcher.token_permutation(hidden_states, probs, indices) ) - if self.config.moe_extended_tp: - scale = ( - moe_layer.config.tensor_model_parallel_size - * moe_layer.config.expert_model_parallel_size - ) - else: - scale = moe_layer.config.tensor_model_parallel_size + scale = moe_layer.config.expert_tensor_parallel_size permuted_local_hidden_states /= scale @@ -127,14 +129,13 @@ def dispatcher_dropless_test(self): hidden_states.grad, ans ), "Restored hidden states do not match original hidden states" - def dispacher_capacity_test(self): + @pytest.mark.internal + def dispatcher_capacity_test(self): moe_layer = self.moe_layer - hidden_states = torch.randn((256, moe_layer.config.hidden_size)) + hidden_states = torch.randn((16, moe_layer.config.hidden_size)) hidden_states = hidden_states.cuda() hidden_states.requires_grad = True probs, indices = moe_layer.router(hidden_states) - tp_size = moe_layer.config.tensor_model_parallel_size - tp_rank = parallel_state.get_tensor_model_parallel_rank() # Create the answer. prob_mask = probs != 0 @@ -163,27 +164,17 @@ def dispacher_capacity_test(self): hidden_states.grad, restored_hidden_states_answer ), "Gradient of hidden states should be same as hidden states" + @pytest.mark.internal def dispatcher_drop_and_pad_test(self): "Test if the tokens are dropped and padded correctly" moe_layer = self.moe_layer - moe_layer_2 = copy.deepcopy(moe_layer) - hidden_states = torch.randn((256, moe_layer.config.hidden_size)).cuda() + + hidden_states = torch.randn((16, moe_layer.config.hidden_size)).cuda() hidden_states.requires_grad = True - # Create the answer. moe_layer.config.moe_pad_expert_input_to_capacity = False moe_layer.token_dispatcher.drop_and_pad = False - # Uncomment these lines to help bug location. - # hidden_states = torch.ones((8, moe_layer.config.hidden_size)).cuda() - # hidden_states = hidden_states * torch.range(1, 8).unsqueeze(1).cuda() - # hidden_states.requires_grad = True - # indices_1 = torch.tensor([[0, 0], [1, 1], [2, 2], [3, 3], [4, 4], [5, 5], [6, 6], [7, 7]]).cuda() - # probs_1 = torch.ones_like(indices_1) - # indices_2 = torch.tensor([[0, 0], [1, 1], [2, 2], [3, 3], [4, 4], [5, 5], [6, 6], [7, 7]]).cuda() - # probs_2 = torch.ones_like(indices_2) - # num_local_tokens_per_expert = torch.tensor([2, 2, 2, 2, 2, 2, 2, 2]).cuda() - probs_1, indices_1 = moe_layer.router(hidden_states) (permuted_input_1, tokens_per_expert) = moe_layer.token_dispatcher.token_permutation( hidden_states, probs_1, indices_1 @@ -198,6 +189,11 @@ def dispatcher_drop_and_pad_test(self): torch.cuda.synchronize() # End + moe_layer_2 = self.new_moe_layer() + moe_layer_2.load_state_dict(moe_layer.state_dict()) + moe_layer_2.config.moe_pad_expert_input_to_capacity = True + moe_layer_2.token_dispatcher.drop_and_pad = True + probs_2, indices_2 = moe_layer_2.router(hidden_states) (permuted_input_2, tokens_per_expert) = moe_layer_2.token_dispatcher.token_permutation( hidden_states, probs_2, indices_2 @@ -231,9 +227,12 @@ def setup_method(self, method): def teardown_method(self, method): Utils.destroy_model_parallel() + @pytest.mark.internal @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal @pytest.mark.parametrize("tp_size,ep_size", [(8, 1), (1, 8), (2, 4), (1, 1)]) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev def test_forward_backward(self, tp_size, ep_size): container = MoEModelTestContainer( tp_size=tp_size, @@ -247,19 +246,27 @@ def test_forward_backward(self, tp_size, ep_size): container.dispatcher_dropless_test() + @pytest.mark.internal @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") @pytest.mark.internal - @pytest.mark.parametrize("tp_size,ep_size", [(2, 4)]) - def test_extend_tp_forward_backward(self, tp_size, ep_size): + @pytest.mark.parametrize( + "tp_size,ep_size,moe_tp_size", [(1, 1, 8), (1, 2, 4), (1, 4, 2), (2, 2, 4)] + ) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev + def test_moe_tp_forward_backward(self, tp_size, ep_size, moe_tp_size): container = MoEModelTestContainer( tp_size=tp_size, ep_size=ep_size, pp_size=1, + moe_tp_size=moe_tp_size, num_moe_experts=8, moe_router_topk=2, moe_router_load_balancing_type="aux_loss", moe_token_dispatcher_type="allgather", - moe_extended_tp=True, + sequence_parallel=True, + moe_grouped_gemm=True, + use_cpu_initialization=False, ) container.dispatcher_dropless_test() diff --git a/tests/unit_tests/transformer/moe/test_upcycling.py b/tests/unit_tests/transformer/moe/test_upcycling.py index b5a98c371..fc53d57ad 100644 --- a/tests/unit_tests/transformer/moe/test_upcycling.py +++ b/tests/unit_tests/transformer/moe/test_upcycling.py @@ -128,7 +128,6 @@ def teardown_method(self, method): destroy_num_microbatches_calculator() @pytest.mark.internal - @pytest.mark.flaky # TODO: Fix the test @pytest.mark.parametrize( ('tp_pp_ep', 'enable_vp', 'enable_grouped_gemm'), [((1, 1, 2), (False), (False))] ) diff --git a/tests/unit_tests/transformer/test_retro_attention.py b/tests/unit_tests/transformer/test_retro_attention.py index d7c5a5f15..0f82399b0 100644 --- a/tests/unit_tests/transformer/test_retro_attention.py +++ b/tests/unit_tests/transformer/test_retro_attention.py @@ -2,6 +2,7 @@ import types +import pytest import torch from megatron.core.models.retro import RetroConfig, get_retro_decoder_block_spec diff --git a/tools/autoformat.sh b/tools/autoformat.sh index 4595b9cbd..ecec87e3e 100755 --- a/tools/autoformat.sh +++ b/tools/autoformat.sh @@ -10,7 +10,7 @@ if [[ $GIT_MAJOR -eq 2 && $GIT_MINOR -lt 31 ]]; then exit 1 fi -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) CHECK_ONLY=${CHECK_ONLY:-false} SKIP_DOCS=${SKIP_DOCS:-false} @@ -20,7 +20,6 @@ ADDITIONAL_ARGS="" ADDITIONAL_BLACK_ARGS="" ADDITIONAL_PYLINT_ARGS="" - if [[ $CHECK_ONLY == true ]]; then ADDITIONAL_ARGS="--check" ADDITIONAL_BLACK_ARGS="--diff" @@ -34,6 +33,7 @@ if [[ -n "$CHANGED_FILES" ]]; then black --skip-magic-trailing-comma $ADDITIONAL_ARGS $ADDITIONAL_BLACK_ARGS --verbose $CHANGED_FILES isort $ADDITIONAL_ARGS $CHANGED_FILES pylint $ADDITIONAL_PYLINT_ARGS $CHANGED_FILES + mypy --explicit-package-bases --follow-imports=skip $CHANGED_FILES || true else echo Changeset is empty, all good. fi diff --git a/tools/checkpoint/loader_llama_mistral.py b/tools/checkpoint/loader_llama_mistral.py index ea803c554..ce470d0f7 100644 --- a/tools/checkpoint/loader_llama_mistral.py +++ b/tools/checkpoint/loader_llama_mistral.py @@ -19,9 +19,9 @@ def add_arguments(parser): # TODO(jbarker): Need assertion to make sure *exactly* one of these is used parser.add_argument('--model-size', type=str, required=True, - choices=['llama2-7B', 'llama2-13B', 'llama2-70B', 'llama2-7Bf', 'llama2-13Bf', 'llama2-70Bf', 'llama3-8B', 'llama3-70B', 'llama3-8Bf', 'llama3-70Bf', 'mistral-7B', 'mistral-7Bf', 'yi-34B'], - help='Model size can be `llama2-7B`, `llama2-13B`, `llama2-70B`, `llama3-8B`, `llama3-70B`, `mistral-7B` (for pretrained models), ' - 'and `llama2-7Bf`, `llama2-13Bf`, `llama2-70Bf`, `llama3-8Bf`, `llama3-70bf` and `mistral-7Bf` (for chat-finetuned models).') + choices=['llama2-7B', 'llama2-13B', 'llama2-70B', 'llama2-7Bf', 'llama2-13Bf', 'llama2-70Bf', 'llama3-8B', 'llama3-70B', 'llama3-8Bf', 'llama3-70Bf', 'mistral-7B', 'mistral-7Bf', 'yi-34B', 'qwen2.5-7B', 'qwen2.5-72B', 'qwen2.5-7Bf', 'qwen2.5-72Bf'], + help='Model size can be `llama2-7B`, `llama2-13B`, `llama2-70B`, `llama3-8B`, `llama3-70B`, `mistral-7B`, `qwen2.5-7B`, `qwen2.5-72B` (for pretrained models), ' + 'and `llama2-7Bf`, `llama2-13Bf`, `llama2-70Bf`, `llama3-8Bf`, `llama3-70bf`, `mistral-7Bf`, `qwen2.5-7Bf`, and `qwen2.5-72Bf` (for chat-finetuned models).') parser.add_argument('--checkpoint-type', type=str, required=True, help='Type of checkpoint to convert, options are "meta" or "hf"') parser.add_argument('--bf16', action='store_true', help='Whether to load weights in bf16.') @@ -35,6 +35,7 @@ def add_arguments(parser): help='Tokenizer model file.') group.add_argument('--megatron-path', type=str, default=None, help='Base directory of Megatron repository') + group.add_argument("--make-vocab-size-divisible-by", type=int, default=None, help="Make vocab size divisible by") group.add_argument('--loader-transformer-impl', default='local', choices=['local', 'transformer_engine'], help='Which Transformer implementation to use.') @@ -59,6 +60,10 @@ def verify_transformers_version(): "mistral-7B": 1, "mistral-7Bf": 1, "yi-34B": 8, + "qwen2.5-7B": 1, + "qwen2.5-7Bf": 1, + "qwen2.5-72B": 8, + "qwen2.5-72Bf": 8, } @@ -353,6 +358,13 @@ def set_attn_state(args, layer, hf_layer): hf_attn.k_proj.weight.reshape((ng, dim, -1)), hf_attn.v_proj.weight.reshape((ng, dim, -1)), ], dim=1).reshape((-1, args.hidden_size))) + if args.add_qkv_bias: + attn.query_key_value.bias.data.copy_(torch.cat([ + hf_attn.q_proj.bias.reshape((ng, dim*nh//ng)), + hf_attn.k_proj.bias.reshape((ng, dim)), + hf_attn.v_proj.bias.reshape((ng, dim)), + ], dim=1).reshape(-1)) + attn.dense.weight.data.copy_(hf_attn.o_proj.weight) @@ -445,19 +457,28 @@ def _load_checkpoint(queue, args): '--no-save-rng', '--mock-data', # To pass the "blend data checks" in arguments.py '--no-initialization', - '--load', args.load_dir + '--load', args.load_dir, + '--no-one-logger', ] + if args.make_vocab_size_divisible_by is not None: + sys.argv.extend(["--make-vocab-size-divisible-by", str(args.make_vocab_size_divisible_by)]) + margs = parse_args() margs.tokenizer_model = args.tokenizer_model load_args_from_checkpoint(margs) - if "llama2" in args.model_size or "yi" in args.model_size: + if "llama2" in args.model_size: margs.tokenizer_type = "Llama2Tokenizer" + elif "yi" in args.model_size: + margs.tokenizer_type = "HuggingFaceTokenizer" elif "llama3" in args.model_size: margs.tokenizer_type = "HuggingFaceTokenizer" elif "mistral" in args.model_size: margs.tokenizer_type = "HuggingFaceTokenizer" + elif "qwen2.5" in args.model_size: + margs.tokenizer_type = "HuggingFaceTokenizer" + margs.add_qkv_bias = True # Arguments do sanity checks on the world size, but we don't care, # so trick it into thinking we are plenty of processes. @@ -530,11 +551,12 @@ def check_for_arg(arg_name, default=None): md.output_layer = margs.untie_embeddings_and_output_weights md.position_embedding_type = margs.position_embedding_type md.linear_bias = margs.add_bias_linear + md.qkv_bias = margs.add_qkv_bias md.norm_has_bias = False md.swiglu = margs.swiglu md.previous_tensor_parallel_size = margs.tensor_model_parallel_size md.previous_pipeline_parallel_size = margs.pipeline_model_parallel_size - md.make_vocab_size_divisible_by = None + md.make_vocab_size_divisible_by = margs.make_vocab_size_divisible_by md.checkpoint_args = margs md.consumed_train_samples = 0 md.consumed_valid_samples = 0 @@ -591,8 +613,10 @@ def queue_put(name, msg): dense_weight.append(layer.self_attention.dense.weight.data) mlp_l0_weight.append(layer.mlp.dense_h_to_4h.weight.data) mlp_l1_weight.append(layer.mlp.dense_4h_to_h.weight.data) - if md.linear_bias: + + if md.qkv_bias: qkv_bias.append(layer.self_attention.query_key_value.bias.data) + if md.linear_bias: mlp_l0_bias.append(layer.mlp.dense_h_to_4h.bias.data) # Handle gated linear units. @@ -609,8 +633,9 @@ def queue_put(name, msg): message["qkv weight"] = torch.cat(qkv_weight, dim=0) message["dense weight"] = torch.cat(dense_weight, dim=1) message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) - if md.linear_bias: + if md.qkv_bias: message["qkv bias"] = torch.cat(qkv_bias, dim=0) + if md.linear_bias: if md.swiglu: for tp_rank in range(tp_size): mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) diff --git a/tools/checkpoint/loader_mcore.py b/tools/checkpoint/loader_mcore.py index 0be90c2ab..9185969b3 100644 --- a/tools/checkpoint/loader_mcore.py +++ b/tools/checkpoint/loader_mcore.py @@ -6,7 +6,8 @@ import torch import types -from utils import get_mcore_transformer_block_key, print_memory_usage +from schema_mcore import get_model_schema +from utils import print_memory_usage def add_arguments(parser): @@ -68,6 +69,7 @@ def _load_checkpoint(queue, args): '--load', args.load_dir, '--position-embedding-type', args.position_embedding_type, '--exit-on-missing-checkpoint', + '--no-one-logger', ] margs = parse_args() @@ -81,6 +83,10 @@ def _load_checkpoint(queue, args): margs.fp16 = checkpoint_args.fp16 margs.bf16 = checkpoint_args.bf16 + # Expert parallelism requires sequence parallelism. + if margs.expert_model_parallel_size > 1: + margs.sequence_parallel = True + # Validate margs. margs = validate_args(margs) @@ -180,6 +186,7 @@ def get_models(count, dtype): mpu.set_tensor_model_parallel_world_size(margs.tensor_model_parallel_size) mpu.set_pipeline_model_parallel_world_size(margs.pipeline_model_parallel_size) mpu.set_virtual_pipeline_model_parallel_world_size(margs.virtual_pipeline_model_parallel_size) + mpu.set_expert_model_parallel_world_size(margs.expert_model_parallel_size) fused_kernels.load(margs) # Get true (non-padded) vocab size @@ -209,7 +216,7 @@ def get_models(count, dtype): # older models only supported LayerNorm norm_has_bias = True - # metadata + # Metadata. md = types.SimpleNamespace() md.model_type = args.model_type md.num_layers = margs.num_layers @@ -224,6 +231,7 @@ def get_models(count, dtype): md.output_layer = margs.untie_embeddings_and_output_weights md.position_embedding_type = margs.position_embedding_type md.linear_bias = margs.add_bias_linear + md.qkv_bias = margs.add_qkv_bias md.norm_has_bias = norm_has_bias md.swiglu = margs.swiglu md.previous_tensor_parallel_size = margs.tensor_model_parallel_size @@ -233,12 +241,7 @@ def get_models(count, dtype): md.checkpoint_args = checkpoint_args md.use_legacy_models = margs.use_legacy_models - # Get transformer block (named either 'encoder' or 'decoder'). - transformer_block_key = get_mcore_transformer_block_key(md.model_type) - def get_transformer_block(_model): - return getattr(_model, transformer_block_key) - - # Get first pipe stage + # Get first pipe stage. mpu.set_pipeline_model_parallel_rank(0) all_models = [get_models(tp_size, md.params_dtype)] models = all_models[0][0] @@ -252,19 +255,26 @@ def queue_put(name, msg): msg["name"] = name queue.put(msg) - # Send embeddings + # Model schema. + schema = get_model_schema( + md.model_type, + margs.transformer_impl, + margs.num_experts, + margs.expert_model_parallel_size, + ) + + # Send embeddings. + embeddings = [ schema.get("embeddings", model) for model in models ] message = { - "word embeddings": torch.cat( - [models[tp_rank].embedding.word_embeddings.weight.data for tp_rank in range(tp_size)], - dim = 0) + "word embeddings": torch.cat([ e["word"] for e in embeddings ], dim=0) } if md.position_embedding_type == 'learned_absolute': - message["position embeddings"] = models[0].embedding.position_embeddings.weight.data + message["position embeddings"] = embeddings[0]["pos"] else: - assert not hasattr(models[0].embedding, 'position_embeddings') - + assert embeddings[0]["pos"] is None queue_put("embeddings", message) + # Send layers. total_layer_num = 0 for vp_rank in range(vp_size): mpu.set_virtual_pipeline_model_parallel_rank(vp_rank) @@ -274,20 +284,19 @@ def queue_put(name, msg): if vp_rank == 0: all_models.append(get_models(tp_size, md.params_dtype)) models = all_models[pp_rank][vp_rank] - for layer_num in range(len(get_transformer_block(models[0]).layers)): + for layer_num in range(schema.get_num_layers(models[0])): message = {} # Get non-parallel tensors from tp_rank 0 - layer = get_transformer_block(models[0]).layers[layer_num] - message["input norm weight"] = layer.self_attention.linear_qkv.layer_norm_weight.data - if norm_has_bias: - message["input norm bias"] = layer.self_attention.linear_qkv.layer_norm_bias.data - message["post norm weight"] = layer.mlp.linear_fc1.layer_norm_weight.data + layer = schema.get_layer(models[0], layer_num) + message["input norm weight"] = layer["self_attn_norm_weight"] + message["post norm weight"] = layer["mlp_norm_weight"] if norm_has_bias: - message["post norm bias"] = layer.mlp.linear_fc1.layer_norm_bias.data + message["input norm bias"] = layer["self_attn_norm_bias"] + message["post norm bias"] = layer["mlp_norm_bias"] if md.linear_bias: - message["dense bias"] = layer.self_attention.linear_proj.bias.data - message["mlp l1 bias"] = layer.mlp.linear_fc2.bias.data + message["dense bias"] = layer["self_attn_proj_bias"] + message["mlp l1 bias"] = layer["mlp_fc2_bias"] # Grab all parallel tensors for this layer qkv_weight = [] @@ -297,14 +306,15 @@ def queue_put(name, msg): mlp_l0_bias = [] mlp_l1_weight = [] for tp_rank, model in enumerate(models): - layer = get_transformer_block(model).layers[layer_num] - qkv_weight.append(layer.self_attention.linear_qkv.weight.data) - dense_weight.append(layer.self_attention.linear_proj.weight.data) - mlp_l0_weight.append(layer.mlp.linear_fc1.weight.data) - mlp_l1_weight.append(layer.mlp.linear_fc2.weight.data) + layer = schema.get_layer(model, layer_num) + qkv_weight.append(layer["self_attn_qkv_weight"]) + dense_weight.append(layer["self_attn_proj_weight"]) + mlp_l0_weight.append(layer["mlp_fc1_weight"]) + mlp_l1_weight.append(layer["mlp_fc2_weight"]) + if md.qkv_bias: + qkv_bias.append(layer["self_attn_qkv_bias"]) if md.linear_bias: - qkv_bias.append(layer.self_attention.linear_qkv.bias.data) - mlp_l0_bias.append(layer.mlp.linear_fc1.bias.data) + mlp_l0_bias.append(layer["mlp_fc1_bias"]) # Handle gated linear units if md.swiglu: @@ -320,8 +330,9 @@ def queue_put(name, msg): message["qkv weight"] = torch.cat(qkv_weight, dim=0) message["dense weight"] = torch.cat(dense_weight, dim=1) message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) - if md.linear_bias: + if md.qkv_bias: message["qkv bias"] = torch.cat(qkv_bias, dim=0) + if md.linear_bias: if md.swiglu: for tp_rank in range(tp_size): mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) @@ -334,46 +345,55 @@ def queue_put(name, msg): total_layer_num = total_layer_num + 1 - # Send final norm from tp_rank 0 + # Send final norm from tp_rank 0. + final_norm = schema.get("final_norm", models[0]) message = { - "weight": get_transformer_block(models[0]).final_layernorm.weight.data, + "weight": final_norm["weight"], } if norm_has_bias: - message["bias"] = get_transformer_block(models[0]).final_layernorm.bias.data + message["bias"] = final_norm["bias"] queue_put("final norm", message) + # Send output layer. if md.output_layer: + output_layer_ranks = [ schema.get("output_layer", m) for m in models ] message = { - "weight": torch.cat( - [models[tp_rank].output_layer.weight.data for tp_rank in range(tp_size)], - dim = 0) + "weight": torch.cat([r["weight"] for r in output_layer_ranks], dim=0), } queue_put("output layer", message) - - # Send BERT lm head and binary head if it exists + # Send BERT params. if md.model_type == 'BERT': + + # Pooler. + pooler = schema.get("pooler", models[0]) message = { - "weight": models[0].pooler.dense.weight.data, - "bias": models[0].pooler.dense.bias.data + "weight": pooler["weight"], + "bias": pooler["bias"], } queue_put("pooler", message) + # LM head. + lm_head = schema.get("lm_head", models[0]) message = { - "dense weight": models[0].lm_head.dense.weight.data, - "dense bias": models[0].lm_head.dense.bias.data, - "norm weight": models[0].lm_head.layer_norm.weight.data, + "dense weight": lm_head["dense_weight"], + "dense bias": lm_head["dense_bias"], + "norm weight": lm_head["norm_weight"], } if norm_has_bias: - message["norm bias"] = models[0].lm_head.layer_norm.bias.data + message["norm bias"] = lm_head["norm_bias"], queue_put("lm head", message) + # Binary head. if md.bert_binary_head: + binary_head = schema.get("binary_head", models[0]) message = { - "weight": models[0].binary_head.weight.data, - "bias": models[0].binary_head.bias.data + "weight": binary_head["weight"], + "bias": binary_head["bias"], } queue_put("binary head", message) + + # Done. queue.put("done") def load_checkpoint(queue, args): diff --git a/tools/checkpoint/loader_megatron.py b/tools/checkpoint/loader_megatron.py index 72edcd9db..d8f684745 100644 --- a/tools/checkpoint/loader_megatron.py +++ b/tools/checkpoint/loader_megatron.py @@ -66,6 +66,7 @@ def _load_checkpoint(queue, args): '--load', args.load_dir, '--position-embedding-type', args.position_embedding_type, '--exit-on-missing-checkpoint', + '--no-one-logger', ] margs = parse_args() @@ -218,6 +219,7 @@ def get_models(count, dtype): md.output_layer = margs.untie_embeddings_and_output_weights md.position_embedding_type = margs.position_embedding_type md.linear_bias = margs.add_bias_linear + md.qkv_bias = margs.add_qkv_bias md.norm_has_bias = norm_has_bias md.swiglu = margs.swiglu md.previous_tensor_parallel_size = margs.tensor_model_parallel_size @@ -290,8 +292,9 @@ def queue_put(name, msg): dense_weight.append(layer.self_attention.dense.weight.data) mlp_l0_weight.append(layer.mlp.dense_h_to_4h.weight.data) mlp_l1_weight.append(layer.mlp.dense_4h_to_h.weight.data) - if md.linear_bias: + if md.qkv_bias: qkv_bias.append(layer.self_attention.query_key_value.bias.data) + if md.linear_bias: mlp_l0_bias.append(layer.mlp.dense_h_to_4h.bias.data) # Handle gated linear units @@ -308,8 +311,9 @@ def queue_put(name, msg): message["qkv weight"] = torch.cat(qkv_weight, dim=0) message["dense weight"] = torch.cat(dense_weight, dim=1) message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) - if md.linear_bias: + if md.qkv_bias: message["qkv bias"] = torch.cat(qkv_bias, dim=0) + if md.linear_bias: if md.swiglu: for tp_rank in range(tp_size): mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) diff --git a/tools/checkpoint/loader_mixtral_hf.py b/tools/checkpoint/loader_mixtral_hf.py index 9ff09f8df..131d6dc60 100644 --- a/tools/checkpoint/loader_mixtral_hf.py +++ b/tools/checkpoint/loader_mixtral_hf.py @@ -188,7 +188,8 @@ def _load_checkpoint(queue, args): '--no-initialization', '--mock-data', # To pass the "blend data checks" in arguments.py '--transformer-impl', 'transformer_engine', - '--load', args.load_dir + '--load', args.load_dir, + '--no-one-logger', ] margs = parse_args() diff --git a/tools/checkpoint/saver_mcore.py b/tools/checkpoint/saver_mcore.py index 6aec90e41..2caf26a9a 100644 --- a/tools/checkpoint/saver_mcore.py +++ b/tools/checkpoint/saver_mcore.py @@ -1,268 +1,12 @@ # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - +from importlib.metadata import version import os +from packaging.version import Version as PkgVersion import sys + import torch -from setter import ModelSetter -from utils import get_mcore_transformer_block_key, print_memory_usage -from megatron.core.utils import get_te_version, is_te_min_version - - -class MCoreSetter(ModelSetter): - - transformer_block_key = None - - @classmethod - def get_transformer_block(cls, model): - return getattr(model, cls.transformer_block_key) - - @classmethod - def has_position_embeddings(cls, model): - return hasattr(model.embedding, "position_embeddings") - - @classmethod - def set_embeddings( - cls, - model, - word=None, - pos=None, - ): - cls.set_tensor(model.embedding.word_embeddings.weight, word) - if pos is not None: - cls.set_tensor(model.embedding.position_embeddings.weight, pos) - - @classmethod - def set_final_norm( - cls, - model, - weight=None, - bias=None, - ): - block = cls.get_transformer_block(model) - cls.set_tensor(block.final_layernorm.weight, weight) - if bias is not None: - cls.set_tensor(block.final_layernorm.bias, bias) - - @classmethod - def set_output_word_embeddings( - cls, - model, - emb=None, - ): - cls.set_tensor(model.embedding.word_embeddings.weight, emb) - - @classmethod - def set_output_layer( - cls, - model, - weight=None, - ): - cls.set_tensor(model.output_layer.weight, weight) - - @classmethod - def set_pooler( - cls, - model, - weight=None, - bias=None, - ): - cls.set_tensor(model.pooler.dense.weight, weight) - if bias is not None: - cls.set_tensor(model.pooler.dense.bias, bias) - - @classmethod - def set_lm_head( - cls, - model, - dense_weight=None, - dense_bias=None, - norm_weight=None, - norm_bias=None, - ): - - cls.set_tensor(model.lm_head.dense.weight, dense_weight) - if dense_bias is not None: - cls.set_tensor(model.lm_head.dense.bias, dense_bias) - - cls.set_tensor(model.lm_head.layer_norm.weight, norm_weight) - if norm_bias is not None: - cls.set_tensor(model.lm_head.layer_norm.bias, norm_bias) - - @classmethod - def set_binary_head( - cls, - model, - weight=None, - bias=None, - ): - cls.set_tensor(model.binary_head.weight, weight) - if bias is not None: - cls.set_tensor(model.binary_head.bias, bias) - - -class MCoreLocalSetter(MCoreSetter): - - @classmethod - def set_layer( - cls, - model, - layer_idx, - self_attn_norm_weight=None, - self_attn_norm_bias=None, - self_attn_qkv_weight=None, - self_attn_qkv_bias=None, - self_attn_proj_weight=None, - self_attn_proj_bias=None, - mlp_norm_weight=None, - mlp_norm_bias=None, - mlp_fc1_weight=None, - mlp_fc1_bias=None, - mlp_fc2_weight=None, - mlp_fc2_bias=None, - ): - - block = cls.get_transformer_block(model) - l = block.layers[layer_idx] - - # Self attention. - cls.set_tensor(l.input_layernorm.weight, self_attn_norm_weight) - if self_attn_norm_bias is not None: - cls.set_tensor(l.input_layernorm.bias, self_attn_norm_bias) - - cls.set_tensor(l.self_attention.linear_qkv.weight, self_attn_qkv_weight) - if self_attn_qkv_bias is not None: - cls.set_tensor(l.self_attention.linear_qkv.bias, self_attn_qkv_bias) - - cls.set_tensor(l.self_attention.linear_proj.weight, self_attn_proj_weight) - if self_attn_proj_bias is not None: - cls.set_tensor(l.self_attention.linear_proj.bias, self_attn_proj_bias) - - # MLP. - cls.set_tensor(l.pre_mlp_layernorm.weight, mlp_norm_weight) - if mlp_norm_bias is not None: - cls.set_tensor(l.pre_mlp_layernorm.bias, mlp_norm_bias) - - cls.set_tensor(l.mlp.linear_fc1.weight, mlp_fc1_weight) - if mlp_fc1_bias is not None: - cls.set_tensor(l.mlp.linear_fc1.bias, mlp_fc1_bias) - - cls.set_tensor(l.mlp.linear_fc2.weight, mlp_fc2_weight) - if mlp_fc2_bias is not None: - cls.set_tensor(l.mlp.linear_fc2.bias, mlp_fc2_bias) - - -class MCoreTESetter(MCoreSetter): - - @classmethod - def set_layer( - cls, - model, - layer_idx, - self_attn_norm_weight=None, - self_attn_norm_bias=None, - self_attn_qkv_weight=None, - self_attn_qkv_bias=None, - self_attn_proj_weight=None, - self_attn_proj_bias=None, - mlp_norm_weight=None, - mlp_norm_bias=None, - mlp_fc1_weight=None, - mlp_fc1_bias=None, - mlp_fc2_weight=None, - mlp_fc2_bias=None, - ): - - block = cls.get_transformer_block(model) - l = block.layers[layer_idx] - - # Self attention. - cls.set_tensor(l.self_attention.linear_qkv.layer_norm_weight, self_attn_norm_weight) - if self_attn_norm_bias is not None: - cls.set_tensor(l.self_attention.linear_qkv.layer_norm_bias, self_attn_norm_bias) - - cls.set_tensor(l.self_attention.linear_qkv.weight, self_attn_qkv_weight) - if self_attn_qkv_bias is not None: - cls.set_tensor(l.self_attention.linear_qkv.bias, self_attn_qkv_bias) - - cls.set_tensor(l.self_attention.linear_proj.weight, self_attn_proj_weight) - if self_attn_proj_bias is not None: - cls.set_tensor(l.self_attention.linear_proj.bias, self_attn_proj_bias) - - # MLP. - cls.set_tensor(l.mlp.linear_fc1.layer_norm_weight, mlp_norm_weight) - if mlp_norm_bias is not None: - cls.set_tensor(l.mlp.linear_fc1.layer_norm_bias, mlp_norm_bias) - - cls.set_tensor(l.mlp.linear_fc1.weight, mlp_fc1_weight) - if mlp_fc1_bias is not None: - cls.set_tensor(l.mlp.linear_fc1.bias, mlp_fc1_bias) - - cls.set_tensor(l.mlp.linear_fc2.weight, mlp_fc2_weight) - if mlp_fc2_bias is not None: - cls.set_tensor(l.mlp.linear_fc2.bias, mlp_fc2_bias) - -class MCoreMoETESetter(MCoreSetter): - - @classmethod - def set_layer( - cls, - model, - layer_idx, - router_weight=None, - self_attn_norm_weight=None, - self_attn_norm_bias=None, - self_attn_qkv_weight=None, - self_attn_qkv_bias=None, - self_attn_proj_weight=None, - self_attn_proj_bias=None, - mlp_norm_weight=None, - mlp_norm_bias=None, - mlp_fc1_weight=None, - mlp_fc1_bias=None, - mlp_fc2_weight=None, - mlp_fc2_bias=None, - ): - - block = cls.get_transformer_block(model) - l = block.layers[layer_idx] - - # Self attention. - cls.set_tensor(l.self_attention.linear_qkv.layer_norm_weight, self_attn_norm_weight) - if self_attn_norm_bias is not None: - cls.set_tensor(l.self_attention.linear_qkv.layer_norm_bias, self_attn_norm_bias) - cls.set_tensor(l.self_attention.linear_qkv.weight, self_attn_qkv_weight) - if self_attn_qkv_bias is not None: - cls.set_tensor(l.self_attention.linear_qkv.bias, self_attn_qkv_bias) - cls.set_tensor(l.self_attention.linear_proj.weight, self_attn_proj_weight) - if self_attn_proj_bias is not None: - cls.set_tensor(l.self_attention.linear_proj.bias, self_attn_proj_bias) - - # MLP. - cls.set_tensor(l.pre_mlp_layernorm.weight, mlp_norm_weight) - if model.config.normalization == "LayerNorm": - cls.set_tensor(l.pre_mlp_layernorm.bias, mlp_norm_bias) - - cls.set_tensor(l.mlp.router.weight, router_weight) - - num_local_experts = mlp_fc1_weight.shape[0] - for expert_idx in range(num_local_experts): - cls.set_tensor(l.mlp.experts.local_experts[expert_idx].linear_fc1.weight, mlp_fc1_weight[expert_idx]) - cls.set_tensor(l.mlp.experts.local_experts[expert_idx].linear_fc2.weight, mlp_fc2_weight[expert_idx]) - - -def get_model_setter(model_type, transformer_impl, num_experts=0): - if num_experts is not None and num_experts > 0: - # Only support TE setter for MOE - assert transformer_impl == "transformer_engine" - setter = MCoreMoETESetter - else: - setter = { - "local" : MCoreLocalSetter, - "transformer_engine" : MCoreTESetter, - }[transformer_impl] - setter.transformer_block_key = get_mcore_transformer_block_key(model_type) - return setter +from schema_mcore import get_model_schema def add_arguments(parser): @@ -287,8 +31,9 @@ def add_arguments(parser): def save_checkpoint(queue, args): # Transformer engine >= 0.12.0, for CPU initialization. - assert is_te_min_version("0.12.0"), \ - "transformer engine version: %s (>=0.12.0 required)." % get_te_version() + te_version = PkgVersion(version("transformer-engine")) + assert te_version >= PkgVersion("0.12.0"), \ + "transformer engine version: %s (>=0.12.0 required)." % te_version # Search in directory above this sys.path.append(os.path.abspath( @@ -389,6 +134,7 @@ def check_message(msg): '--save-interval', '1', '--save', args.save_dir, '--ckpt-format', 'torch', # only 'torch' supported for conversion + '--no-one-logger', ] if md.make_vocab_size_divisible_by is not None: @@ -534,8 +280,13 @@ def pad_weight(orig_word_embed, true_vocab_size): # Split into new tensor model parallel sizes out_word_embed = torch.chunk(full_word_embed, args.target_tensor_parallel_size, dim=0) - # Parameter setter class. - setter = get_model_setter(md.model_type, margs.transformer_impl, margs.num_experts) + # Model schema. + schema = get_model_schema( + md.model_type, + margs.transformer_impl, + margs.num_experts, + margs.expert_model_parallel_size, + ) # Construct a 3D(PPxEPxTP) arry for models, fill it with None models = [[[None for _ in range(args.target_tensor_parallel_size)] for _ in range(args.target_expert_parallel_size)] for _ in range(args.target_pipeline_parallel_size)] @@ -554,12 +305,11 @@ def get_local_model(pp_rank, ep_rank, tp_rank): for tp_rank in range(args.target_tensor_parallel_size): model = get_local_model(0, ep_rank, tp_rank) if pos_embed is None: - assert not setter.has_position_embeddings(model) - setter.set_embeddings( - model, - word=out_word_embed[tp_rank], - pos=pos_embed, - ) + assert not schema.has_position_embeddings(model) + schema.set("embeddings", model, { + "pos" : pos_embed, + "word" : out_word_embed[tp_rank], + }) def chunk_weight(weight, parallel_mode, tp_size=1, ep_size=1): assert parallel_mode in ["row", "column"] @@ -600,9 +350,10 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): # ------------------ total_layer_num = 0 for pp_rank in range(args.target_pipeline_parallel_size): + mpu.set_pipeline_model_parallel_rank(pp_rank) # initial the first module in pp stage to get the layer_num, pooler, lm_head. binary_head get_local_model(pp_rank,0,0) - for layer_id in range(len(setter.get_transformer_block(models[pp_rank][0][0]).layers)): + for layer_id in range(schema.get_num_layers(models[pp_rank][0][0])): msg = queue_get(f"transformer layer {total_layer_num}") # duplicated tensors @@ -628,10 +379,11 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): else: mlp_l0_weight = chunk_weight(msg.pop("mlp l0 weight"), "column", args.target_tensor_parallel_size, args.target_expert_parallel_size) + if md.qkv_bias: + qkv_bias = chunk_bias(msg.pop("qkv bias"), 'column', args.target_tensor_parallel_size) if md.linear_bias: dense_bias = msg.pop("dense bias") mlp_l1_bias = chunk_bias(msg.pop("mlp l1 bias"), 'row', args.target_tensor_parallel_size, args.target_expert_parallel_size) - qkv_bias = chunk_bias(msg.pop("qkv bias"), 'column', args.target_tensor_parallel_size) if md.swiglu: mlp_l0_bias_W = chunk_bias(msg.pop("mlp l0 bias W"), 'column', args.target_tensor_parallel_size, args.target_expert_parallel_size) mlp_l0_bias_V = chunk_bias(msg.pop("mlp l0 bias V"), 'column', args.target_tensor_parallel_size, args.target_expert_parallel_size) @@ -662,9 +414,12 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): "self_attn_norm_bias" : input_norm_bias if md.norm_has_bias else None, "mlp_norm_bias" : post_norm_bias if md.norm_has_bias else None, }) + if md.qkv_bias: + params_dict.update({ + "self_attn_qkv_bias" : qkv_bias[tp_rank] + }) if md.linear_bias: params_dict.update({ - "self_attn_qkv_bias" : qkv_bias[tp_rank], "self_attn_proj_bias" : dense_bias }) if margs.num_experts: @@ -682,7 +437,7 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): "router_weight": router }) model = get_local_model(pp_rank, ep_rank, tp_rank) - setter.set_layer(model, layer_id, **params_dict) + schema.set_layer(model, layer_id, params_dict) total_layer_num = total_layer_num + 1 check_message(msg) @@ -697,17 +452,15 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): for tp_rank in range(args.target_tensor_parallel_size)] for eptp_rank, model in enumerate(pp_local_models): tp_rank = eptp_rank % args.target_tensor_parallel_size - setter.set_final_norm( - model, - weight=final_norm_weight, - bias=final_norm_bias if md.norm_has_bias else None, - ) + schema.set("final_norm", model, { + "weight" : final_norm_weight, + "bias" : final_norm_bias if md.norm_has_bias else None, + }) if pp_rank != 0 and not md.output_layer: # Copy word embeddings to final pipeline rank - setter.set_output_word_embeddings( - model, - emb=out_word_embed[tp_rank], - ) + schema.set("output_layer", model, { + "weight" : out_word_embed[tp_rank], + }) del final_norm_weight if md.norm_has_bias: del final_norm_bias @@ -722,7 +475,9 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): output_layer_weight = torch.chunk(output_layer_weight, args.target_tensor_parallel_size, dim=0) for eptp_rank, model in enumerate(pp_local_models): tp_rank = eptp_rank % args.target_tensor_parallel_size - setter.set_output_layer(model, output_layer_weight[tp_rank]) + schema.set("output_layer", model, { + "weight" : output_layer_weight[tp_rank], + }) check_message(msg) msg = queue_get() @@ -734,11 +489,10 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): pooler_weight = msg.pop("weight") pooler_bias = msg.pop("bias") for model in pp_local_models: - setter.set_pooler( - model=model, - weight=pooler_weight, - bias=pooler_bias, - ) + schema.set("pooler", model, { + "weight" : pooler_weight, + "bias" : pooler_bias, + }) del pooler_weight del pooler_bias check_message(msg) @@ -755,13 +509,12 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): if md.norm_has_bias: lm_head_norm_bias = msg.pop("norm bias") for model in pp_local_models: - setter.set_lm_head( - model=model, - dense_weight=lm_head_dense_weight, - dense_bias=lm_head_dense_bias, - norm_weight=lm_head_norm_weight, - norm_bias=lm_head_norm_bias if md.norm_has_bias else None, - ) + schema.set("lm_head", model, { + "dense_weight" : lm_head_dense_weight, + "dense_bias" : lm_head_dense_bias, + "norm_weight" : lm_head_norm_weight, + "norm_bias" : lm_head_norm_bias if md.norm_has_bias else None, + }) check_message(msg) msg = queue_get() @@ -773,11 +526,10 @@ def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): binary_head_weight = msg.pop("weight") binary_head_bias = msg.pop("bias") for model in pp_local_models: - setter.set_binary_head( - model=model, - weight=binary_head_weight, - bias=binary_head_bias, - ) + schema.set("binary_head", model, { + "weight" : binary_head_weight, + "bias" : binary_head_bias, + }) check_message(msg) msg = queue_get() diff --git a/tools/checkpoint/saver_megatron.py b/tools/checkpoint/saver_megatron.py index b017c9ed9..9b11b9afe 100644 --- a/tools/checkpoint/saver_megatron.py +++ b/tools/checkpoint/saver_megatron.py @@ -116,6 +116,7 @@ def check_message(msg): '--save-interval', '1', '--save', args.save_dir, '--ckpt-format', 'torch', # only 'torch' supported for conversion + '--no-one-logger', ] if md.make_vocab_size_divisible_by is not None: @@ -295,8 +296,9 @@ def get_models(count, dtype, pre_process, post_process): else: mlp_l0_weight = torch.chunk(msg.pop("mlp l0 weight"), args.target_tensor_parallel_size, dim=0) - if md.linear_bias: + if md.qkv_bias: qkv_bias = torch.chunk(msg.pop("qkv bias"), args.target_tensor_parallel_size, dim=0) + if md.linear_bias: if md.swiglu: mlp_l0_bias_W = torch.chunk(msg.pop("mlp l0 bias W"), args.target_tensor_parallel_size, dim=0) mlp_l0_bias_V = torch.chunk(msg.pop("mlp l0 bias V"), args.target_tensor_parallel_size, dim=0) @@ -317,8 +319,9 @@ def get_models(count, dtype, pre_process, post_process): l.post_attention_norm.bias.data.copy_(post_norm_bias) l.mlp.dense_h_to_4h.weight.data.copy_(mlp_l0_weight[tp_rank]) l.mlp.dense_4h_to_h.weight.data.copy_(mlp_l1_weight[tp_rank]) - if md.linear_bias: + if md.qkv_bias: l.self_attention.query_key_value.bias.data.copy_(qkv_bias[tp_rank]) + if md.linear_bias: l.self_attention.dense.bias.data.copy_(dense_bias) l.mlp.dense_h_to_4h.bias.data.copy_(mlp_l0_bias[tp_rank]) l.mlp.dense_4h_to_h.bias.data.copy_(mlp_l1_bias) diff --git a/tools/checkpoint/schema_base.py b/tools/checkpoint/schema_base.py new file mode 100644 index 000000000..3940ed208 --- /dev/null +++ b/tools/checkpoint/schema_base.py @@ -0,0 +1,93 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Base model schema.""" + +import torch + + +class ModelSchema: + + def __init__(self, mapping): + self._mapping = dict(mapping) + + for key in ( + "embeddings", + "layer_prefix", + "layer", + "final_norm", + "output_layer", + "pooler", + "lm_head", + "binary_head", + ): + assert key in mapping + + def __getitem__(self, key): + return self._mapping[key] + + # Utilities. + @classmethod + def _get_deep_attr(cls, obj, path): + assert isinstance(path, str) + path = path.split(".") + for key in path: + try: + obj = getattr(obj, key) + except AttributeError: + return None + if isinstance(obj, torch.Tensor): + obj = obj.data + return obj + + @classmethod + def _set_deep_tensor(cls, obj, path, src): + if src is None: + return + dst = cls._get_deep_attr(obj, path) + assert isinstance(src, torch.Tensor), "src is <%s>." % type(src).__name__ + assert isinstance(dst, torch.Tensor), "dst is <%s>." % type(dst).__name__ + assert not dst.requires_grad, "should be using '.data', from getter above." + dst.copy_(src) + + def _get_layers(self, model): + layers = self._get_deep_attr(model, self["layer_prefix"]) + assert layers is not None, "'layers' attribute not found." + return layers + + def get_num_layers(self, model): + return len(self._get_layers(model)) + + # Getters. + @classmethod + def _get(cls, schema, model): + return { k: cls._get_deep_attr(model, m) for k, m in schema.items() } + + def get(self, key, model): + return self._get(self[key], model) + + def get_layer(self, model, layer_idx): + schema = self["layer"] + layer = self._get_layers(model)[layer_idx] + params = self._get(schema, layer) + return params + + # Setters. + @classmethod + def _set(cls, schema, model, params): + for k, m in schema.items(): + if k in params: + cls._set_deep_tensor(model, m, params[k]) + + def set(self, key, model, params): + self._set(self[key], model, params) + + def set_layer(self, model, layer_idx, params): + schema = self["layer"] + layer = self._get_layers(model)[layer_idx] + self._set(schema, layer, params) + + # Other. + def has_position_embeddings(self, model): + pos_path = self["embeddings"]["pos"] + pos = self._get_deep_attr(model, pos_path) + return pos is not None diff --git a/tools/checkpoint/schema_mcore.py b/tools/checkpoint/schema_mcore.py new file mode 100644 index 000000000..ef90ff0aa --- /dev/null +++ b/tools/checkpoint/schema_mcore.py @@ -0,0 +1,143 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Mcore model schemas.""" + +import typing as T + +from schema_base import ModelSchema + + +def get_mcore_transformer_block_key(model_key): + return { + "GPT" : "decoder", + "BERT" : "encoder", + }[model_key] + + +class MCoreSchema(ModelSchema): + + def __init__(self, model_type, layer_schema): + block_key = get_mcore_transformer_block_key(model_type) + super().__init__({ + "embeddings" : { + "pos" : "embedding.position_embeddings.weight", + "word" : "embedding.word_embeddings.weight", + }, + "layer_prefix" : f"{block_key}.layers", + "layer" : layer_schema, + "final_norm" : { + "weight" : f"{block_key}.final_layernorm.weight", + "bias" : f"{block_key}.final_layernorm.bias", + }, + "output_layer" : { + "weight" : "output_layer.weight", + }, + "pooler" : { + "weight" : "pooler.dense.weight", + "bias" : "pooler.dense.bias", + }, + "lm_head" : { + "dense_weight" : "lm_head.dense.weight", + "dense_bias" : "lm_head.dense.bias", + "norm_weight" : "lm_head.layer_norm.weight", + "norm_bias" : "lm_head.layer_norm.bias", + }, + "binary_head" : { + "weight" : "binary_head.weight", + "bias" : "binary_head.bias", + }, + }) + + +class MCoreLocalSchema(MCoreSchema): + + def __init__(self, model_type): + super().__init__(model_type, layer_schema={ + + # Self attention. + "self_attn_norm_weight" : "input_layernorm.weight", + "self_attn_norm_bias" : "input_layernorm.bias", + "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", + "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", + "self_attn_proj_weight" : "self_attention.linear_proj.weight", + "self_attn_proj_bias" : "self_attention.linear_proj.bias", + + # MLP. + "mlp_norm_weight" : "pre_mlp_layernorm.weight", + "mlp_norm_bias" : "pre_mlp_layernorm.bias", + "mlp_fc1_weight" : "mlp.linear_fc1.weight", + "mlp_fc1_bias" : "mlp.linear_fc1.bias", + "mlp_fc2_weight" : "mlp.linear_fc2.weight", + "mlp_fc2_bias" : "mlp.linear_fc2.bias", + + }) + + +class MCoreTESchema(MCoreSchema): + + def __init__(self, model_type): + super().__init__(model_type, layer_schema={ + + # Self attention. + "self_attn_norm_weight" : "self_attention.linear_qkv.layer_norm_weight", + "self_attn_norm_bias" : "self_attention.linear_qkv.layer_norm_bias", + "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", + "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", + + "self_attn_proj_weight" : "self_attention.linear_proj.weight", + "self_attn_proj_bias" : "self_attention.linear_proj.bias", + + # MLP. + "mlp_norm_weight" : "mlp.linear_fc1.layer_norm_weight", + "mlp_norm_bias" : "mlp.linear_fc1.layer_norm_bias", + "mlp_fc1_weight" : "mlp.linear_fc1.weight", + "mlp_fc1_bias" : "mlp.linear_fc1.bias", + "mlp_fc2_weight" : "mlp.linear_fc2.weight", + "mlp_fc2_bias" : "mlp.linear_fc2.bias", + + }) + + +class MCoreMoETESchema(MCoreSchema): + + def __init__(self, model_type, num_experts, expert_model_parallel_size): + num_local_experts = num_experts // expert_model_parallel_size + super().__init__(model_type, layer_schema={ + + # Self attention. + "self_attn_norm_weight" : "self_attention.linear_qkv.layer_norm_weight", + "self_attn_norm_bias" : "self_attention.linear_qkv.layer_norm_bias", + + "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", + "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", + + "self_attn_proj_weight" : "self_attention.linear_proj.weight", + "self_attn_proj_bias" : "self_attention.linear_proj.bias", + + # MLP. + "mlp_norm_weight" : "pre_mlp_layernorm.weight", + "mlp_norm_bias" : "pre_mlp_layernorm.bias", + + "router_weight" : "mlp.router.weight", + + **{f"mlp_fc1_weight.{expert_idx}" : f"mlp.experts.local_experts.{expert_idx}.linear_fc1.weight" for expert_idx in range(num_local_experts) }, + **{f"mlp_fc2_weight.{expert_idx}" : f"mlp.experts.local_experts.{expert_idx}.linear_fc2.weight" for expert_idx in range(num_local_experts) }, + + }) + + +def get_model_schema( + model_type: T.Literal["GPT", "BERT"], + transformer_impl: T.Literal["transformer_engine", "local"], + num_experts: T.Optional[int] = None, + expert_model_parallel_size: T.Optional[int] = None, +) -> MCoreSchema: + if num_experts is not None and num_experts > 0: + # Only support TE setter for MOE + assert transformer_impl == "transformer_engine" + assert isinstance(expert_model_parallel_size, int) + return MCoreMoETESchema(model_type, num_experts, expert_model_parallel_size) + return { + "local" : MCoreLocalSchema, + "transformer_engine" : MCoreTESchema, + }[transformer_impl](model_type) diff --git a/tools/checkpoint/setter.py b/tools/checkpoint/setter.py deleted file mode 100644 index 5e84cff95..000000000 --- a/tools/checkpoint/setter.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - - -class ModelSetter: - '''Model parameter setter. - - See convert.py for a full list of supported parameters and their names. - ''' - - @classmethod - def set_tensor(cls, dst, src): - '''Copy (in-place) src tensor to dst tensor.''' - if src is not None: - dst.data.copy_(src) - - @classmethod - def has_position_embeddings(cls, model): - ''' - Return True if learned parameters exist for position embeddings (e.g., - learned absolute), and False otherwise (e.g., RoPE). - ''' - raise NotImplementedError - - @classmethod - def set_embeddings( - cls, - model, - word=None, - pos=None, - ): - '''Set word and position embeddings.''' - raise NotImplementedError - - @classmethod - def set_output_word_embeddings( - cls, - model, - emb=None, - ): - '''Set output word embeddings for final pipeline stage.''' - raise NotImplementedError - - @classmethod - def set_layer( - cls, - model, - layer_idx, - self_attn_norm_weight=None, - self_attn_norm_bias=None, - self_attn_qkv_weight=None, - self_attn_qkv_bias=None, - self_attn_proj_weight=None, - self_attn_proj_bias=None, - mlp_norm_weight=None, - mlp_norm_bias=None, - mlp_fc1_weight=None, - mlp_fc1_bias=None, - mlp_fc2_weight=None, - mlp_fc2_bias=None, - ): - '''Set layer parameters.''' - raise NotImplementedError - - @classmethod - def set_final_norm( - cls, - model, - weight=None, - bias=None, - ): - '''Set final norm parameters (i.e., after last transformer layer).''' - raise NotImplementedError - - @classmethod - def set_output_layer( - cls, - model, - weight=None, - ): - '''Set output (i.e., 'dense') weights.''' - raise NotImplementedError - - @classmethod - def set_pooler( - cls, - model, - weight=None, - bias=None, - ): - '''Set pooler parameters (e.g., for Bert).''' - raise NotImplementedError - - @classmethod - def set_lm_head( - cls, - model, - dense_weight=None, - dense_bias=None, - norm_weight=None, - norm_bias=None, - ): - '''Set LM head parameters.''' - raise NotImplementedError - - @classmethod - def set_binary_head( - cls, - model, - weight=None, - bias=None, - ): - '''Set binary head parameters.''' - raise NotImplementedError diff --git a/tools/checkpoint/utils.py b/tools/checkpoint/utils.py index a60461941..6a9c5d567 100644 --- a/tools/checkpoint/utils.py +++ b/tools/checkpoint/utils.py @@ -14,10 +14,3 @@ def print_memory_usage(key, rank, num_ranks): mem_info.rss / 1024**3, 100 * mem_info.rss / process.memory_percent() / 1024**3, )) - - -def get_mcore_transformer_block_key(model_key): - return { - "GPT" : "decoder", - "BERT" : "encoder", - }[model_key] diff --git a/tools/preprocess_data.py b/tools/preprocess_data.py index a81fe8ca7..13e5b64a4 100644 --- a/tools/preprocess_data.py +++ b/tools/preprocess_data.py @@ -23,6 +23,7 @@ nltk_available = False from megatron.training.tokenizer import build_tokenizer +from megatron.training.arguments import _add_tokenizer_args from megatron.core.datasets import indexed_dataset @@ -188,6 +189,7 @@ def process_json_file(self, file_name): def get_args(): parser = argparse.ArgumentParser() + parser = _add_tokenizer_args(parser) group = parser.add_argument_group(title='input data') group.add_argument('--input', type=str, required=True, help='Path to input JSON') @@ -197,22 +199,7 @@ def get_args(): help='Split documents into sentences.') group.add_argument('--keep-newlines', action='store_true', help='Keep newlines between sentences when splitting.') - - group = parser.add_argument_group(title='tokenizer') - group.add_argument('--tokenizer-type', type=str, required=True, - choices=['BertWordPieceLowerCase','BertWordPieceCase', - 'GPT2BPETokenizer', 'SentencePieceTokenizer', - 'GPTSentencePieceTokenizer', 'Llama2Tokenizer', - 'Llama3Tokenizer', 'MistralTokenizer', 'NullTokenizer'], - help='What type of tokenizer to use.') - group.add_argument('--tokenizer-model', type=str, default=None, - help='YTTM tokenizer model.') - group.add_argument('--vocab-file', type=str, default=None, - help='Path to the vocab file') - group.add_argument('--vocab-size', default=786, - help='size of vocab for use with NullTokenizer') - group.add_argument('--merge-file', type=str, default=None, - help='Path to the BPE merge file (if necessary).') + group = parser.add_argument_group(title='tokenization process') group.add_argument('--append-eod', action='store_true', help='Append an token to the end of a document.') group.add_argument('--lang', type=str, default='english', @@ -220,7 +207,6 @@ def get_args(): group = parser.add_argument_group(title='output data') group.add_argument('--output-prefix', type=str, required=True, help='Path to binary output file without suffix') - group = parser.add_argument_group(title='runtime') group.add_argument('--workers', type=int, required=True, help=('Number of worker processes to launch.' diff --git a/unit-test-job-lts.yaml b/unit-test-job-lts.yaml new file mode 100644 index 000000000..ea64ccd6b --- /dev/null +++ b/unit-test-job-lts.yaml @@ -0,0 +1,107 @@ +default: + interruptible: true +other: + artifacts: + paths: + - results/ + when: always + image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 + needs: + - job: functional:configure + pipeline: $PARENT_PIPELINE_ID + rules: + - if: $CI_PIPELINE_SOURCE == "parent_pipeline" + - if: $CI_MERGE_REQUEST_ID + script: + - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py + --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case + other --container-tag 20283570 --cluster dgxh100_coreweave + stage: unit-tests + tags: &id001 + - arch/amd64 + - env/prod + - origin/jet-fleet + - owner/jet-core + - purpose/jet-client + - team/megatron + timeout: 7 days +stages: + - unit-tests +tests/unit_tests/data/: + artifacts: + paths: + - results/ + when: always + image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 + needs: + - job: functional:configure + pipeline: $PARENT_PIPELINE_ID + rules: + - if: $CI_PIPELINE_SOURCE == "parent_pipeline" + - if: $CI_MERGE_REQUEST_ID + script: + - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py + --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case + tests/unit_tests/data/ --container-tag 20283570 --cluster dgxh100_coreweave + stage: unit-tests + tags: *id001 + timeout: 7 days +tests/unit_tests/dist_checkpointing/: + artifacts: + paths: + - results/ + when: always + image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 + needs: + - job: functional:configure + pipeline: $PARENT_PIPELINE_ID + rules: + - if: $CI_PIPELINE_SOURCE == "parent_pipeline" + - if: $CI_MERGE_REQUEST_ID + script: + - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py + --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case + tests/unit_tests/dist_checkpointing/ --container-tag 20283570 --cluster dgxh100_coreweave + stage: unit-tests + tags: *id001 + timeout: 7 days +tests/unit_tests/distributed/: + artifacts: + paths: + - results/ + when: always + image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 + needs: + - job: functional:configure + pipeline: $PARENT_PIPELINE_ID + rules: + - if: $CI_PIPELINE_SOURCE == "parent_pipeline" + - if: $CI_MERGE_REQUEST_ID + script: + - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py + --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case + tests/unit_tests/distributed/ --container-tag 20283570 --cluster dgxh100_coreweave + stage: unit-tests + tags: *id001 + timeout: 7 days +? tests/unit_tests/test_inference.py tests/unit_tests/test_tokenizer.py tests/unit_tests/test_utilities.py + tests/unit_tests/test_training.py +: artifacts: + paths: + - results/ + when: always + image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 + needs: + - job: functional:configure + pipeline: $PARENT_PIPELINE_ID + rules: + - if: $CI_PIPELINE_SOURCE == "parent_pipeline" + - if: $CI_MERGE_REQUEST_ID + script: + - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py + --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case + tests/unit_tests/test_inference.py tests/unit_tests/test_tokenizer.py tests/unit_tests/test_utilities.py + tests/unit_tests/test_training.py --container-tag 20283570 --cluster dgxh100_coreweave + stage: unit-tests + tags: *id001 + timeout: 7 days