From 3083229e402bf9cddfed009b08c662abcf229d86 Mon Sep 17 00:00:00 2001 From: Chi Wang Date: Sat, 8 May 2021 02:50:50 +0000 Subject: [PATCH] Notebook (#87) * notebook update --- notebook/flaml_automl.ipynb | 604 +++++++------- notebook/flaml_azureml.ipynb | 10 +- notebook/flaml_finetune_transformer.ipynb | 973 +++++++++++++++------- notebook/flaml_lightgbm.ipynb | 351 +++++++- notebook/flaml_xgboost.ipynb | 280 ++++++- 5 files changed, 1557 insertions(+), 661 deletions(-) diff --git a/notebook/flaml_automl.ipynb b/notebook/flaml_automl.ipynb index 7036a2bb16..9b98d33876 100644 --- a/notebook/flaml_automl.ipynb +++ b/notebook/flaml_automl.ipynb @@ -134,226 +134,182 @@ "slideshow": { "slide_type": "slide" }, - "tags": [ - "outputPrepend" - ] + "tags": [] }, "outputs": [ { "output_type": "stream", "name": "stderr", "text": [ - "ror=0.3777\n", - "[flaml.automl: 04-07 09:18:58] {993} INFO - iteration 2, current learner lgbm\n", - "[flaml.automl: 04-07 09:18:58] {1141} INFO - at 0.9s,\tbest lgbm's error=0.3672,\tbest lgbm's error=0.3672\n", - "[flaml.automl: 04-07 09:18:58] {993} INFO - iteration 3, current learner lgbm\n", - "[flaml.automl: 04-07 09:18:58] {1141} INFO - at 1.0s,\tbest lgbm's error=0.3672,\tbest lgbm's error=0.3672\n", - "[flaml.automl: 04-07 09:18:58] {993} INFO - iteration 4, current learner lgbm\n", - "[flaml.automl: 04-07 09:18:58] {1141} INFO - at 1.1s,\tbest lgbm's error=0.3648,\tbest lgbm's error=0.3648\n", - "[flaml.automl: 04-07 09:18:58] {993} INFO - iteration 5, current learner lgbm\n", - "[flaml.automl: 04-07 09:18:59] {1141} INFO - at 1.3s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:18:59] {993} INFO - iteration 6, current learner lgbm\n", - "[flaml.automl: 04-07 09:18:59] {1141} INFO - at 1.4s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:18:59] {993} INFO - iteration 7, current learner lgbm\n", - "[flaml.automl: 04-07 09:18:59] {1141} INFO - at 1.5s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:18:59] {993} INFO - iteration 8, current learner lgbm\n", - "[flaml.automl: 04-07 09:18:59] {1141} INFO - at 1.6s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:18:59] {993} INFO - iteration 9, current learner lgbm\n", - "[flaml.automl: 04-07 09:18:59] {1141} INFO - at 1.8s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:18:59] {993} INFO - iteration 10, current learner xgboost\n", - "[flaml.automl: 04-07 09:18:59] {1141} INFO - at 1.8s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:18:59] {993} INFO - iteration 11, current learner xgboost\n", - "[flaml.automl: 04-07 09:18:59] {1141} INFO - at 1.9s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:18:59] {993} INFO - iteration 12, current learner extra_tree\n", - "[flaml.automl: 04-07 09:18:59] {1141} INFO - at 2.1s,\tbest extra_tree's error=0.3985,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:18:59] {993} INFO - iteration 13, current learner extra_tree\n", - "[flaml.automl: 04-07 09:19:00] {1141} INFO - at 2.2s,\tbest extra_tree's error=0.3985,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:00] {993} INFO - iteration 14, current learner extra_tree\n", - "[flaml.automl: 04-07 09:19:00] {1141} INFO - at 2.4s,\tbest extra_tree's error=0.3971,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:00] {993} INFO - iteration 15, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:00] {1141} INFO - at 2.5s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:00] {993} INFO - iteration 16, current learner rf\n", - "[flaml.automl: 04-07 09:19:00] {1141} INFO - at 2.7s,\tbest rf's error=0.4017,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:00] {993} INFO - iteration 17, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:00] {1141} INFO - at 2.8s,\tbest xgboost's error=0.3746,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:00] {993} INFO - iteration 18, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:00] {1141} INFO - at 3.0s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:00] {993} INFO - iteration 19, current learner rf\n", - "[flaml.automl: 04-07 09:19:01] {1141} INFO - at 3.2s,\tbest rf's error=0.4017,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:01] {993} INFO - iteration 20, current learner rf\n", - "[flaml.automl: 04-07 09:19:01] {1141} INFO - at 3.4s,\tbest rf's error=0.3993,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:01] {993} INFO - iteration 21, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:01] {1141} INFO - at 3.5s,\tbest xgboost's error=0.3746,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:19:01] {993} INFO - iteration 22, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:01] {1141} INFO - at 3.8s,\tbest lgbm's error=0.3555,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:01] {993} INFO - iteration 23, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:01] {1141} INFO - at 4.0s,\tbest lgbm's error=0.3555,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:01] {993} INFO - iteration 24, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:02] {1141} INFO - at 4.2s,\tbest xgboost's error=0.3627,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:02] {993} INFO - iteration 25, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:02] {1141} INFO - at 4.4s,\tbest xgboost's error=0.3627,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:02] {993} INFO - iteration 26, current learner rf\n", - "[flaml.automl: 04-07 09:19:02] {1141} INFO - at 4.9s,\tbest rf's error=0.3993,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:02] {993} INFO - iteration 27, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:02] {1141} INFO - at 5.1s,\tbest xgboost's error=0.3627,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:02] {993} INFO - iteration 28, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:03] {1141} INFO - at 5.3s,\tbest xgboost's error=0.3627,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:03] {993} INFO - iteration 29, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:04] {1141} INFO - at 6.2s,\tbest xgboost's error=0.3559,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:04] {993} INFO - iteration 30, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:04] {1141} INFO - at 6.8s,\tbest lgbm's error=0.3555,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:19:04] {993} INFO - iteration 31, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:05] {1141} INFO - at 8.1s,\tbest lgbm's error=0.3524,\tbest lgbm's error=0.3524\n", - "[flaml.automl: 04-07 09:19:05] {993} INFO - iteration 32, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:06] {1141} INFO - at 8.9s,\tbest lgbm's error=0.3524,\tbest lgbm's error=0.3524\n", - "[flaml.automl: 04-07 09:19:06] {993} INFO - iteration 33, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:07] {1141} INFO - at 9.6s,\tbest xgboost's error=0.3559,\tbest lgbm's error=0.3524\n", - "[flaml.automl: 04-07 09:19:07] {993} INFO - iteration 34, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:13] {1141} INFO - at 15.8s,\tbest lgbm's error=0.3498,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:13] {993} INFO - iteration 35, current learner catboost\n", - "[flaml.automl: 04-07 09:19:16] {1141} INFO - at 18.5s,\tbest catboost's error=0.3644,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:16] {993} INFO - iteration 36, current learner catboost\n", - "[flaml.automl: 04-07 09:19:17] {1141} INFO - at 19.3s,\tbest catboost's error=0.3644,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:17] {993} INFO - iteration 37, current learner extra_tree\n", - "[flaml.automl: 04-07 09:19:17] {1141} INFO - at 19.7s,\tbest extra_tree's error=0.3971,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:17] {993} INFO - iteration 38, current learner catboost\n", - "[flaml.automl: 04-07 09:19:18] {1141} INFO - at 20.6s,\tbest catboost's error=0.3608,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:18] {993} INFO - iteration 39, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:18] {1141} INFO - at 21.0s,\tbest xgboost's error=0.3559,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:18] {993} INFO - iteration 40, current learner rf\n", - "[flaml.automl: 04-07 09:19:19] {1141} INFO - at 21.7s,\tbest rf's error=0.3936,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:19] {993} INFO - iteration 41, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:20] {1141} INFO - at 22.4s,\tbest xgboost's error=0.3559,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:20] {993} INFO - iteration 42, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:21] {1141} INFO - at 23.4s,\tbest xgboost's error=0.3559,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:19:21] {993} INFO - iteration 43, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:25] {1141} INFO - at 27.3s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:25] {993} INFO - iteration 44, current learner catboost\n", - "[flaml.automl: 04-07 09:19:25] {1141} INFO - at 27.8s,\tbest catboost's error=0.3608,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:25] {993} INFO - iteration 45, current learner extra_tree\n", - "[flaml.automl: 04-07 09:19:26] {1141} INFO - at 28.1s,\tbest extra_tree's error=0.3923,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:26] {993} INFO - iteration 46, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:26] {1141} INFO - at 28.6s,\tbest xgboost's error=0.3559,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:26] {993} INFO - iteration 47, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:35] {1141} INFO - at 37.6s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:35] {993} INFO - iteration 48, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:35] {1141} INFO - at 38.1s,\tbest xgboost's error=0.3559,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:35] {993} INFO - iteration 49, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:36] {1141} INFO - at 38.6s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:36] {993} INFO - iteration 50, current learner catboost\n", - "[flaml.automl: 04-07 09:19:36] {1141} INFO - at 38.9s,\tbest catboost's error=0.3608,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:36] {993} INFO - iteration 51, current learner extra_tree\n", - "[flaml.automl: 04-07 09:19:36] {1141} INFO - at 39.1s,\tbest extra_tree's error=0.3895,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:36] {993} INFO - iteration 52, current learner rf\n", - "[flaml.automl: 04-07 09:19:37] {1141} INFO - at 39.4s,\tbest rf's error=0.3904,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:37] {993} INFO - iteration 53, current learner rf\n", - "[flaml.automl: 04-07 09:19:37] {1141} INFO - at 39.8s,\tbest rf's error=0.3857,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:37] {993} INFO - iteration 54, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:42] {1141} INFO - at 44.5s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:42] {993} INFO - iteration 55, current learner lgbm\n", - "[flaml.automl: 04-07 09:19:43] {1141} INFO - at 45.1s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:43] {993} INFO - iteration 56, current learner xgboost\n", - "[flaml.automl: 04-07 09:19:46] {1141} INFO - at 48.4s,\tbest xgboost's error=0.3553,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:46] {993} INFO - iteration 57, current learner rf\n", - "[flaml.automl: 04-07 09:19:46] {1141} INFO - at 48.7s,\tbest rf's error=0.3857,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:46] {993} INFO - iteration 58, current learner rf\n", - "[flaml.automl: 04-07 09:19:48] {1141} INFO - at 50.4s,\tbest rf's error=0.3808,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:48] {993} INFO - iteration 59, current learner catboost\n", - "[flaml.automl: 04-07 09:19:49] {1141} INFO - at 51.5s,\tbest catboost's error=0.3608,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:19:49] {993} INFO - iteration 60, current learner lgbm\n", - "[flaml.automl: 04-07 09:20:10] {1141} INFO - at 72.6s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:20:10] {993} INFO - iteration 61, current learner extra_tree\n", - "[flaml.automl: 04-07 09:20:10] {1141} INFO - at 72.9s,\tbest extra_tree's error=0.3877,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:20:10] {993} INFO - iteration 62, current learner lgbm\n", - "[flaml.automl: 04-07 09:20:23] {1141} INFO - at 85.6s,\tbest lgbm's error=0.3428,\tbest lgbm's error=0.3428\n", - "[flaml.automl: 04-07 09:20:23] {993} INFO - iteration 63, current learner xgboost\n", - "[flaml.automl: 04-07 09:20:25] {1141} INFO - at 87.8s,\tbest xgboost's error=0.3553,\tbest lgbm's error=0.3428\n", - "[flaml.automl: 04-07 09:20:25] {993} INFO - iteration 64, current learner extra_tree\n", - "[flaml.automl: 04-07 09:20:25] {1141} INFO - at 88.1s,\tbest extra_tree's error=0.3877,\tbest lgbm's error=0.3428\n", - "[flaml.automl: 04-07 09:20:25] {993} INFO - iteration 65, current learner lgbm\n", - "[flaml.automl: 04-07 09:20:49] {1141} INFO - at 111.4s,\tbest lgbm's error=0.3379,\tbest lgbm's error=0.3379\n", - "[flaml.automl: 04-07 09:20:49] {993} INFO - iteration 66, current learner rf\n", - "[flaml.automl: 04-07 09:20:50] {1141} INFO - at 113.1s,\tbest rf's error=0.3808,\tbest lgbm's error=0.3379\n", - "[flaml.automl: 04-07 09:20:50] {993} INFO - iteration 67, current learner catboost\n", - "[flaml.automl: 04-07 09:20:51] {1141} INFO - at 113.4s,\tbest catboost's error=0.3608,\tbest lgbm's error=0.3379\n", - "[flaml.automl: 04-07 09:20:51] {993} INFO - iteration 68, current learner rf\n", - "[flaml.automl: 04-07 09:20:51] {1141} INFO - at 114.0s,\tbest rf's error=0.3808,\tbest lgbm's error=0.3379\n", - "[flaml.automl: 04-07 09:20:51] {993} INFO - iteration 69, current learner lgbm\n", - "[flaml.automl: 04-07 09:21:01] {1141} INFO - at 123.4s,\tbest lgbm's error=0.3361,\tbest lgbm's error=0.3361\n", - "[flaml.automl: 04-07 09:21:01] {993} INFO - iteration 70, current learner lgbm\n", - "[flaml.automl: 04-07 09:21:25] {1141} INFO - at 148.1s,\tbest lgbm's error=0.3361,\tbest lgbm's error=0.3361\n", - "[flaml.automl: 04-07 09:21:25] {993} INFO - iteration 71, current learner catboost\n", - "[flaml.automl: 04-07 09:21:27] {1141} INFO - at 149.3s,\tbest catboost's error=0.3530,\tbest lgbm's error=0.3361\n", - "[flaml.automl: 04-07 09:21:27] {993} INFO - iteration 72, current learner catboost\n", - "[flaml.automl: 04-07 09:21:28] {1141} INFO - at 150.5s,\tbest catboost's error=0.3530,\tbest lgbm's error=0.3361\n", - "[flaml.automl: 04-07 09:21:28] {993} INFO - iteration 73, current learner extra_tree\n", - "[flaml.automl: 04-07 09:21:29] {1141} INFO - at 152.1s,\tbest extra_tree's error=0.3827,\tbest lgbm's error=0.3361\n", - "[flaml.automl: 04-07 09:21:29] {993} INFO - iteration 74, current learner catboost\n", - "[flaml.automl: 04-07 09:21:32] {1141} INFO - at 154.2s,\tbest catboost's error=0.3530,\tbest lgbm's error=0.3361\n", - "[flaml.automl: 04-07 09:21:32] {993} INFO - iteration 75, current learner extra_tree\n", - "[flaml.automl: 04-07 09:21:34] {1141} INFO - at 156.4s,\tbest extra_tree's error=0.3827,\tbest lgbm's error=0.3361\n", - "[flaml.automl: 04-07 09:21:34] {993} INFO - iteration 76, current learner lgbm\n", - "[flaml.automl: 04-07 09:21:49] {1141} INFO - at 171.2s,\tbest lgbm's error=0.3297,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:49] {993} INFO - iteration 77, current learner lrl1\n", + "[flaml.automl: 05-01 16:21:30] {890} INFO - Evaluation method: holdout\n", + "[flaml.automl: 05-01 16:21:31] {596} INFO - Using StratifiedKFold\n", + "[flaml.automl: 05-01 16:21:31] {911} INFO - Minimizing error metric: 1-accuracy\n", + "[flaml.automl: 05-01 16:21:31] {929} INFO - List of ML learners in AutoML Run: ['lgbm', 'rf', 'catboost', 'xgboost', 'extra_tree', 'lrl1']\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 0, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 0.6s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 1, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 0.6s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 2, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 0.7s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 3, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 0.7s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 4, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 0.8s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 5, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 0.9s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 6, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 1.0s,\tbest lgbm's error=0.3765,\tbest lgbm's error=0.3765\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 7, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 1.1s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3765\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 8, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:31] {1141} INFO - at 1.3s,\tbest lgbm's error=0.3686,\tbest lgbm's error=0.3686\n", + "[flaml.automl: 05-01 16:21:31] {993} INFO - iteration 9, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:32] {1141} INFO - at 1.4s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3686\n", + "[flaml.automl: 05-01 16:21:32] {993} INFO - iteration 10, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:32] {1141} INFO - at 1.5s,\tbest lgbm's error=0.3686,\tbest lgbm's error=0.3686\n", + "[flaml.automl: 05-01 16:21:32] {993} INFO - iteration 11, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:32] {1141} INFO - at 1.7s,\tbest lgbm's error=0.3611,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:32] {993} INFO - iteration 12, current learner extra_tree\n", + "[flaml.automl: 05-01 16:21:32] {1141} INFO - at 1.8s,\tbest extra_tree's error=0.4032,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:32] {993} INFO - iteration 13, current learner extra_tree\n", + "[flaml.automl: 05-01 16:21:32] {1141} INFO - at 2.0s,\tbest extra_tree's error=0.4032,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:32] {993} INFO - iteration 14, current learner extra_tree\n", + "[flaml.automl: 05-01 16:21:32] {1141} INFO - at 2.1s,\tbest extra_tree's error=0.4032,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:32] {993} INFO - iteration 15, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:33] {1141} INFO - at 2.4s,\tbest lgbm's error=0.3611,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:33] {993} INFO - iteration 16, current learner extra_tree\n", + "[flaml.automl: 05-01 16:21:33] {1141} INFO - at 2.7s,\tbest extra_tree's error=0.3972,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:33] {993} INFO - iteration 17, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:33] {1141} INFO - at 2.8s,\tbest lgbm's error=0.3611,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:33] {993} INFO - iteration 18, current learner rf\n", + "[flaml.automl: 05-01 16:21:33] {1141} INFO - at 3.1s,\tbest rf's error=0.4011,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:33] {993} INFO - iteration 19, current learner rf\n", + "[flaml.automl: 05-01 16:21:33] {1141} INFO - at 3.3s,\tbest rf's error=0.3994,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:21:33] {993} INFO - iteration 20, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:34] {1141} INFO - at 3.7s,\tbest lgbm's error=0.3603,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:21:34] {993} INFO - iteration 21, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:34] {1141} INFO - at 3.7s,\tbest xgboost's error=0.3765,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:21:34] {993} INFO - iteration 22, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:34] {1141} INFO - at 4.0s,\tbest lgbm's error=0.3603,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:21:34] {993} INFO - iteration 23, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:34] {1141} INFO - at 4.3s,\tbest lgbm's error=0.3603,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:21:34] {993} INFO - iteration 24, current learner rf\n", + "[flaml.automl: 05-01 16:21:35] {1141} INFO - at 4.7s,\tbest rf's error=0.3990,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:21:35] {993} INFO - iteration 25, current learner extra_tree\n", + "[flaml.automl: 05-01 16:21:35] {1141} INFO - at 5.0s,\tbest extra_tree's error=0.3972,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:21:35] {993} INFO - iteration 26, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:35] {1141} INFO - at 5.2s,\tbest lgbm's error=0.3603,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:21:35] {993} INFO - iteration 27, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:36] {1141} INFO - at 5.6s,\tbest lgbm's error=0.3518,\tbest lgbm's error=0.3518\n", + "[flaml.automl: 05-01 16:21:36] {993} INFO - iteration 28, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:36] {1141} INFO - at 5.7s,\tbest xgboost's error=0.3746,\tbest lgbm's error=0.3518\n", + "[flaml.automl: 05-01 16:21:36] {993} INFO - iteration 29, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:36] {1141} INFO - at 5.7s,\tbest xgboost's error=0.3689,\tbest lgbm's error=0.3518\n", + "[flaml.automl: 05-01 16:21:36] {993} INFO - iteration 30, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:36] {1141} INFO - at 6.0s,\tbest lgbm's error=0.3518,\tbest lgbm's error=0.3518\n", + "[flaml.automl: 05-01 16:21:36] {993} INFO - iteration 31, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:37] {1141} INFO - at 6.7s,\tbest lgbm's error=0.3504,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:37] {993} INFO - iteration 32, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:37] {1141} INFO - at 7.0s,\tbest lgbm's error=0.3504,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:37] {993} INFO - iteration 33, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:37] {1141} INFO - at 7.1s,\tbest xgboost's error=0.3689,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:37] {993} INFO - iteration 34, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:41] {1141} INFO - at 10.4s,\tbest lgbm's error=0.3504,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:41] {993} INFO - iteration 35, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:41] {1141} INFO - at 10.5s,\tbest xgboost's error=0.3617,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:41] {993} INFO - iteration 36, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:41] {1141} INFO - at 10.6s,\tbest xgboost's error=0.3610,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:41] {993} INFO - iteration 37, current learner catboost\n", + "[flaml.automl: 05-01 16:21:42] {1141} INFO - at 11.4s,\tbest catboost's error=0.3600,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:42] {993} INFO - iteration 38, current learner catboost\n", + "[flaml.automl: 05-01 16:21:42] {1141} INFO - at 12.2s,\tbest catboost's error=0.3600,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:42] {993} INFO - iteration 39, current learner catboost\n", + "[flaml.automl: 05-01 16:21:43] {1141} INFO - at 12.6s,\tbest catboost's error=0.3599,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:21:43] {993} INFO - iteration 40, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:46] {1141} INFO - at 16.1s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:46] {993} INFO - iteration 41, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:46] {1141} INFO - at 16.2s,\tbest xgboost's error=0.3610,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:46] {993} INFO - iteration 42, current learner extra_tree\n", + "[flaml.automl: 05-01 16:21:47] {1141} INFO - at 16.6s,\tbest extra_tree's error=0.3972,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:47] {993} INFO - iteration 43, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:50] {1141} INFO - at 19.7s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:50] {993} INFO - iteration 44, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:50] {1141} INFO - at 19.8s,\tbest xgboost's error=0.3610,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:50] {993} INFO - iteration 45, current learner extra_tree\n", + "[flaml.automl: 05-01 16:21:50] {1141} INFO - at 20.1s,\tbest extra_tree's error=0.3972,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:50] {993} INFO - iteration 46, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:50] {1141} INFO - at 20.2s,\tbest xgboost's error=0.3595,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:50] {993} INFO - iteration 47, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:55] {1141} INFO - at 24.4s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:55] {993} INFO - iteration 48, current learner xgboost\n", + "[flaml.automl: 05-01 16:21:55] {1141} INFO - at 24.5s,\tbest xgboost's error=0.3595,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:55] {993} INFO - iteration 49, current learner lgbm\n", + "[flaml.automl: 05-01 16:21:57] {1141} INFO - at 26.7s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:57] {993} INFO - iteration 50, current learner rf\n", + "[flaml.automl: 05-01 16:21:57] {1141} INFO - at 27.0s,\tbest rf's error=0.3964,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:21:57] {993} INFO - iteration 51, current learner extra_tree\n", + "[flaml.automl: 05-01 16:22:00] {1141} INFO - at 29.6s,\tbest extra_tree's error=0.3860,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:22:00] {993} INFO - iteration 52, current learner lgbm\n", + "[flaml.automl: 05-01 16:22:05] {1141} INFO - at 34.7s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:22:05] {993} INFO - iteration 53, current learner rf\n", + "[flaml.automl: 05-01 16:22:05] {1141} INFO - at 34.9s,\tbest rf's error=0.3964,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:22:05] {993} INFO - iteration 54, current learner lgbm\n", + "[flaml.automl: 05-01 16:22:07] {1141} INFO - at 36.5s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:22:07] {993} INFO - iteration 55, current learner lgbm\n", + "[flaml.automl: 05-01 16:22:16] {1141} INFO - at 46.3s,\tbest lgbm's error=0.3295,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:16] {993} INFO - iteration 56, current learner xgboost\n", + "[flaml.automl: 05-01 16:22:17] {1141} INFO - at 46.5s,\tbest xgboost's error=0.3590,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:17] {993} INFO - iteration 57, current learner lgbm\n", + "[flaml.automl: 05-01 16:22:25] {1141} INFO - at 54.8s,\tbest lgbm's error=0.3295,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:25] {993} INFO - iteration 58, current learner rf\n", + "[flaml.automl: 05-01 16:22:25] {1141} INFO - at 55.0s,\tbest rf's error=0.3964,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:25] {993} INFO - iteration 59, current learner xgboost\n", + "[flaml.automl: 05-01 16:22:25] {1141} INFO - at 55.1s,\tbest xgboost's error=0.3590,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:25] {993} INFO - iteration 60, current learner lgbm\n", + "[flaml.automl: 05-01 16:22:39] {1141} INFO - at 68.4s,\tbest lgbm's error=0.3295,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:39] {993} INFO - iteration 61, current learner xgboost\n", + "[flaml.automl: 05-01 16:22:39] {1141} INFO - at 68.7s,\tbest xgboost's error=0.3573,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:39] {993} INFO - iteration 62, current learner lgbm\n", + "[flaml.automl: 05-01 16:22:42] {1141} INFO - at 71.8s,\tbest lgbm's error=0.3295,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:42] {993} INFO - iteration 63, current learner xgboost\n", + "[flaml.automl: 05-01 16:22:42] {1141} INFO - at 72.1s,\tbest xgboost's error=0.3573,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:42] {993} INFO - iteration 64, current learner xgboost\n", + "[flaml.automl: 05-01 16:22:43] {1141} INFO - at 72.6s,\tbest xgboost's error=0.3573,\tbest lgbm's error=0.3295\n", + "[flaml.automl: 05-01 16:22:43] {993} INFO - iteration 65, current learner lgbm\n", + "[flaml.automl: 05-01 16:23:22] {1141} INFO - at 111.8s,\tbest lgbm's error=0.3255,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:23:22] {993} INFO - iteration 66, current learner lrl1\n", "No init config given to FLOW2. Using random initial config.For cost-frugal search, consider providing init values for cost-related hps via 'init_config'.\n", "/home/dmx/miniconda2/envs/blend/lib/python3.8/site-packages/sklearn/linear_model/_sag.py:328: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n", " warnings.warn(\"The max_iter was reached which means \"\n", - "[flaml.automl: 04-07 09:21:49] {1141} INFO - at 171.5s,\tbest lrl1's error=0.4337,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:49] {993} INFO - iteration 78, current learner lrl1\n", + "[flaml.automl: 05-01 16:23:22] {1141} INFO - at 112.0s,\tbest lrl1's error=0.4339,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:23:22] {993} INFO - iteration 67, current learner lrl1\n", "/home/dmx/miniconda2/envs/blend/lib/python3.8/site-packages/sklearn/linear_model/_sag.py:328: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n", " warnings.warn(\"The max_iter was reached which means \"\n", - "[flaml.automl: 04-07 09:21:49] {1141} INFO - at 171.8s,\tbest lrl1's error=0.4337,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:49] {993} INFO - iteration 79, current learner lrl1\n", + "[flaml.automl: 05-01 16:23:22] {1141} INFO - at 112.3s,\tbest lrl1's error=0.4338,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:23:22] {993} INFO - iteration 68, current learner extra_tree\n", + "[flaml.automl: 05-01 16:23:28] {1141} INFO - at 117.6s,\tbest extra_tree's error=0.3835,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:23:28] {993} INFO - iteration 69, current learner xgboost\n", + "[flaml.automl: 05-01 16:23:28] {1141} INFO - at 117.7s,\tbest xgboost's error=0.3573,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:23:28] {993} INFO - iteration 70, current learner lgbm\n", + "[flaml.automl: 05-01 16:24:50] {1141} INFO - at 200.2s,\tbest lgbm's error=0.3255,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:24:50] {993} INFO - iteration 71, current learner xgboost\n", + "[flaml.automl: 05-01 16:24:52] {1141} INFO - at 201.6s,\tbest xgboost's error=0.3569,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:24:52] {993} INFO - iteration 72, current learner xgboost\n", + "[flaml.automl: 05-01 16:24:52] {1141} INFO - at 202.0s,\tbest xgboost's error=0.3569,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:24:52] {993} INFO - iteration 73, current learner lgbm\n", + "[flaml.automl: 05-01 16:25:32] {1141} INFO - at 242.0s,\tbest lgbm's error=0.3255,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:26:06] {1164} INFO - retrain lgbm for 34.1s\n", + "[flaml.automl: 05-01 16:26:06] {993} INFO - iteration 74, current learner lrl1\n", "/home/dmx/miniconda2/envs/blend/lib/python3.8/site-packages/sklearn/linear_model/_sag.py:328: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n", " warnings.warn(\"The max_iter was reached which means \"\n", - "[flaml.automl: 04-07 09:21:50] {1141} INFO - at 172.2s,\tbest lrl1's error=0.4337,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:50] {993} INFO - iteration 80, current learner lrl1\n", - "/home/dmx/miniconda2/envs/blend/lib/python3.8/site-packages/sklearn/linear_model/_sag.py:328: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n", - " warnings.warn(\"The max_iter was reached which means \"\n", - "[flaml.automl: 04-07 09:21:50] {1141} INFO - at 172.5s,\tbest lrl1's error=0.4337,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:50] {993} INFO - iteration 81, current learner lrl1\n", - "/home/dmx/miniconda2/envs/blend/lib/python3.8/site-packages/sklearn/linear_model/_sag.py:328: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n", - " warnings.warn(\"The max_iter was reached which means \"\n", - "[flaml.automl: 04-07 09:21:50] {1141} INFO - at 172.8s,\tbest lrl1's error=0.4337,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:50] {993} INFO - iteration 82, current learner lrl1\n", - "/home/dmx/miniconda2/envs/blend/lib/python3.8/site-packages/sklearn/linear_model/_sag.py:328: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n", - " warnings.warn(\"The max_iter was reached which means \"\n", - "[flaml.automl: 04-07 09:21:51] {1141} INFO - at 174.0s,\tbest lrl1's error=0.4334,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:51] {993} INFO - iteration 83, current learner rf\n", - "[flaml.automl: 04-07 09:21:55] {1141} INFO - at 177.7s,\tbest rf's error=0.3808,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:55] {993} INFO - iteration 84, current learner catboost\n", - "[flaml.automl: 04-07 09:21:56] {1141} INFO - at 178.6s,\tbest catboost's error=0.3530,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:21:56] {993} INFO - iteration 85, current learner catboost\n", - "[flaml.automl: 04-07 09:22:06] {1141} INFO - at 188.6s,\tbest catboost's error=0.3481,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:22:06] {993} INFO - iteration 86, current learner extra_tree\n", - "[flaml.automl: 04-07 09:22:06] {1141} INFO - at 189.0s,\tbest extra_tree's error=0.3827,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:22:06] {993} INFO - iteration 87, current learner catboost\n", - "[flaml.automl: 04-07 09:22:12] {1141} INFO - at 195.1s,\tbest catboost's error=0.3481,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:22:12] {993} INFO - iteration 88, current learner catboost\n", - "[flaml.automl: 04-07 09:22:34] {1141} INFO - at 216.6s,\tbest catboost's error=0.3481,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:22:34] {993} INFO - iteration 89, current learner lgbm\n", - "[flaml.automl: 04-07 09:22:43] {1141} INFO - at 225.8s,\tbest lgbm's error=0.3297,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:22:43] {993} INFO - iteration 90, current learner lgbm\n", - "[flaml.automl: 04-07 09:22:57] {1141} INFO - at 239.3s,\tbest lgbm's error=0.3297,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:22:57] {993} INFO - iteration 91, current learner extra_tree\n", - "[flaml.automl: 04-07 09:22:59] {1141} INFO - at 242.0s,\tbest extra_tree's error=0.3827,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:22:59] {993} INFO - iteration 92, current learner lgbm\n", - "[flaml.automl: 04-07 09:23:47] {1141} INFO - at 289.9s,\tbest lgbm's error=0.3297,\tbest lgbm's error=0.3297\n", - "[flaml.automl: 04-07 09:23:47] {993} INFO - iteration 93, current learner extra_tree\n", - "[flaml.automl: 04-07 09:23:47] {1149} INFO - no enough budget for learner extra_tree\n", - "[flaml.automl: 04-07 09:23:47] {993} INFO - iteration 94, current learner catboost\n", - "[flaml.automl: 04-07 09:23:47] {1149} INFO - no enough budget for learner catboost\n", - "[flaml.automl: 04-07 09:23:47] {993} INFO - iteration 95, current learner rf\n", - "[flaml.automl: 04-07 09:23:47] {1149} INFO - no enough budget for learner rf\n", - "[flaml.automl: 04-07 09:23:47] {993} INFO - iteration 96, current learner xgboost\n", - "[flaml.automl: 04-07 09:23:47] {1149} INFO - no enough budget for learner xgboost\n", - "[flaml.automl: 04-07 09:23:47] {993} INFO - iteration 97, current learner lrl1\n", - "[flaml.automl: 04-07 09:23:47] {1149} INFO - no enough budget for learner lrl1\n", - "[flaml.automl: 04-07 09:23:47] {1187} INFO - selected model: LGBMClassifier(learning_rate=0.0710308200604776, max_bin=1023,\n", - " min_child_samples=55, n_estimators=66, num_leaves=1256,\n", - " objective='binary', reg_alpha=0.02231327328083961,\n", - " reg_lambda=1.4895868449253324, subsample=0.7242742367442448)\n", - "[flaml.automl: 04-07 09:23:47] {944} INFO - fit succeeded\n" + "[flaml.automl: 05-01 16:26:06] {1141} INFO - at 276.4s,\tbest lrl1's error=0.4338,\tbest lgbm's error=0.3255\n", + "[flaml.automl: 05-01 16:26:40] {1164} INFO - retrain lrl1 for 33.8s\n", + "[flaml.automl: 05-01 16:26:40] {1187} INFO - selected model: LGBMClassifier(colsample_bytree=0.6957494744503872,\n", + " learning_rate=0.03736015062362056, max_bin=127,\n", + " min_child_samples=51, n_estimators=1254, num_leaves=199,\n", + " objective='binary', reg_alpha=0.06292808836994221,\n", + " reg_lambda=1.7855390807403162, subsample=0.9807570637220066)\n", + "[flaml.automl: 05-01 16:26:40] {944} INFO - fit succeeded\n" ] } ], @@ -387,7 +343,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "Best ML leaner: lgbm\nBest hyperparmeter config: {'n_estimators': 66.0, 'num_leaves': 1256.0, 'min_child_samples': 55.0, 'learning_rate': 0.0710308200604776, 'subsample': 0.7242742367442448, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.02231327328083961, 'reg_lambda': 1.4895868449253324, 'FLAML_sample_size': 364083}\nBest accuracy on validation data: 0.6703\nTraining duration of best run: 14.85 s\n" + "Best ML leaner: lgbm\nBest hyperparmeter config: {'n_estimators': 1254.0, 'num_leaves': 199.0, 'min_child_samples': 51.0, 'learning_rate': 0.03736015062362056, 'subsample': 0.9807570637220066, 'log_max_bin': 7.0, 'colsample_bytree': 0.6957494744503872, 'reg_alpha': 0.06292808836994221, 'reg_lambda': 1.7855390807403162, 'FLAML_sample_size': 364083}\nBest accuracy on validation data: 0.6745\nTraining duration of best run: 39.17 s\n" ] } ], @@ -412,10 +368,11 @@ "output_type": "execute_result", "data": { "text/plain": [ - "LGBMClassifier(learning_rate=0.0710308200604776, max_bin=1023,\n", - " min_child_samples=55, n_estimators=66, num_leaves=1256,\n", - " objective='binary', reg_alpha=0.02231327328083961,\n", - " reg_lambda=1.4895868449253324, subsample=0.7242742367442448)" + "LGBMClassifier(colsample_bytree=0.6957494744503872,\n", + " learning_rate=0.03736015062362056, max_bin=127,\n", + " min_child_samples=51, n_estimators=1254, num_leaves=199,\n", + " objective='binary', reg_alpha=0.06292808836994221,\n", + " reg_lambda=1.7855390807403162, subsample=0.9807570637220066)" ] }, "metadata": {}, @@ -482,7 +439,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "accuracy = 0.6704166234074426\nroc_auc = 0.7235915818210762\nlog_loss = 0.604223973210284\nf1 = 0.587509165328606\n" + "accuracy = 0.6720406982780357\nroc_auc = 0.7265069475647942\nlog_loss = 0.6023913941397441\nf1 = 0.5918638561777844\n" ] } ], @@ -522,7 +479,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 4.0, 'num_leaves': 4.0, 'min_child_samples': 25.0, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'log_max_bin': 10.0, 'colsample_bytree': 0.9684145930669938, 'reg_alpha': 0.001831177697321707, 'reg_lambda': 0.2790165919053839, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4.0, 'num_leaves': 4.0, 'min_child_samples': 25.0, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'log_max_bin': 10.0, 'colsample_bytree': 0.9684145930669938, 'reg_alpha': 0.001831177697321707, 'reg_lambda': 0.2790165919053839, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 23.0, 'num_leaves': 4.0, 'min_child_samples': 48.0, 'learning_rate': 1.0, 'subsample': 0.9814787163243813, 'log_max_bin': 10.0, 'colsample_bytree': 0.9534346594834143, 'reg_alpha': 0.002208534076096185, 'reg_lambda': 0.5460627024738886, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 23.0, 'num_leaves': 4.0, 'min_child_samples': 48.0, 'learning_rate': 1.0, 'subsample': 0.9814787163243813, 'log_max_bin': 10.0, 'colsample_bytree': 0.9534346594834143, 'reg_alpha': 0.002208534076096185, 'reg_lambda': 0.5460627024738886, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 11.0, 'num_leaves': 17.0, 'min_child_samples': 42.0, 'learning_rate': 0.4743416464891248, 'subsample': 0.9233328006239466, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473814, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 11.0, 'num_leaves': 17.0, 'min_child_samples': 42.0, 'learning_rate': 0.4743416464891248, 'subsample': 0.9233328006239466, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473814, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 11.0, 'num_leaves': 17.0, 'min_child_samples': 42.0, 'learning_rate': 0.4743416464891248, 'subsample': 0.9233328006239466, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473814, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 11.0, 'num_leaves': 17.0, 'min_child_samples': 42.0, 'learning_rate': 0.4743416464891248, 'subsample': 0.9233328006239466, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473814, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 33.0, 'num_leaves': 29.0, 'min_child_samples': 47.0, 'learning_rate': 0.15519742191308053, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.07062243857330504, 'reg_lambda': 0.5761644116593855, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 33.0, 'num_leaves': 29.0, 'min_child_samples': 47.0, 'learning_rate': 0.15519742191308053, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.07062243857330504, 'reg_lambda': 0.5761644116593855, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 114.0, 'num_leaves': 182.0, 'min_child_samples': 68.0, 'learning_rate': 0.05769715774252064, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0027586613290827237, 'reg_lambda': 0.6832434368844802, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 114.0, 'num_leaves': 182.0, 'min_child_samples': 68.0, 'learning_rate': 0.05769715774252064, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0027586613290827237, 'reg_lambda': 0.6832434368844802, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 318.0, 'num_leaves': 27.0, 'min_child_samples': 99.0, 'learning_rate': 0.04261799404250151, 'subsample': 0.9982731696185565, 'log_max_bin': 10.0, 'colsample_bytree': 0.8589079860800738, 'reg_alpha': 0.054177182376811454, 'reg_lambda': 3.7064664547599495, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 318.0, 'num_leaves': 27.0, 'min_child_samples': 99.0, 'learning_rate': 0.04261799404250151, 'subsample': 0.9982731696185565, 'log_max_bin': 10.0, 'colsample_bytree': 0.8589079860800738, 'reg_alpha': 0.054177182376811454, 'reg_lambda': 3.7064664547599495, 'FLAML_sample_size': 40000}}\n" + "{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 8.0, 'num_leaves': 4.0, 'min_child_samples': 26.0, 'learning_rate': 0.25676103984424165, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.8499027725496043, 'reg_alpha': 0.0015851927568202393, 'reg_lambda': 4.468020088227013, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 8.0, 'num_leaves': 4.0, 'min_child_samples': 26.0, 'learning_rate': 0.25676103984424165, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.8499027725496043, 'reg_alpha': 0.0015851927568202393, 'reg_lambda': 4.468020088227013, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 8.0, 'num_leaves': 12.0, 'min_child_samples': 30.0, 'learning_rate': 0.3127155723538002, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.7967145599266738, 'reg_alpha': 0.040774029561503077, 'reg_lambda': 22.553195483489322, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 8.0, 'num_leaves': 12.0, 'min_child_samples': 30.0, 'learning_rate': 0.3127155723538002, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.7967145599266738, 'reg_alpha': 0.040774029561503077, 'reg_lambda': 22.553195483489322, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 16.0, 'num_leaves': 17.0, 'min_child_samples': 53.0, 'learning_rate': 0.20056162642458597, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.6980216487058154, 'reg_alpha': 0.014469098513013432, 'reg_lambda': 7.806208895457607, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 16.0, 'num_leaves': 17.0, 'min_child_samples': 53.0, 'learning_rate': 0.20056162642458597, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.6980216487058154, 'reg_alpha': 0.014469098513013432, 'reg_lambda': 7.806208895457607, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 29.0, 'num_leaves': 30.0, 'min_child_samples': 27.0, 'learning_rate': 0.3345600006903613, 'subsample': 1.0, 'log_max_bin': 6.0, 'colsample_bytree': 0.6138481769580465, 'reg_alpha': 0.02608844295136239, 'reg_lambda': 4.068656226566239, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 29.0, 'num_leaves': 30.0, 'min_child_samples': 27.0, 'learning_rate': 0.3345600006903613, 'subsample': 1.0, 'log_max_bin': 6.0, 'colsample_bytree': 0.6138481769580465, 'reg_alpha': 0.02608844295136239, 'reg_lambda': 4.068656226566239, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 40000, 'Current Hyper-parameters': {'n_estimators': 63.0, 'num_leaves': 69.0, 'min_child_samples': 24.0, 'learning_rate': 0.10431691413559704, 'subsample': 0.8895588746662894, 'log_max_bin': 5.0, 'colsample_bytree': 0.5793842857429541, 'reg_alpha': 0.04072860923394475, 'reg_lambda': 1.6480344418782087, 'FLAML_sample_size': 40000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 63.0, 'num_leaves': 69.0, 'min_child_samples': 24.0, 'learning_rate': 0.10431691413559704, 'subsample': 0.8895588746662894, 'log_max_bin': 5.0, 'colsample_bytree': 0.5793842857429541, 'reg_alpha': 0.04072860923394475, 'reg_lambda': 1.6480344418782087, 'FLAML_sample_size': 40000}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 63.0, 'num_leaves': 69.0, 'min_child_samples': 24.0, 'learning_rate': 0.10431691413559704, 'subsample': 0.8895588746662894, 'log_max_bin': 5.0, 'colsample_bytree': 0.5793842857429541, 'reg_alpha': 0.04072860923394475, 'reg_lambda': 1.6480344418782087, 'FLAML_sample_size': 364083}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 63.0, 'num_leaves': 69.0, 'min_child_samples': 24.0, 'learning_rate': 0.10431691413559704, 'subsample': 0.8895588746662894, 'log_max_bin': 5.0, 'colsample_bytree': 0.5793842857429541, 'reg_alpha': 0.04072860923394475, 'reg_lambda': 1.6480344418782087, 'FLAML_sample_size': 364083}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 247.0, 'num_leaves': 194.0, 'min_child_samples': 40.0, 'learning_rate': 0.0784927400486222, 'subsample': 0.9292526044699931, 'log_max_bin': 6.0, 'colsample_bytree': 0.5754801137582828, 'reg_alpha': 0.2407021490497366, 'reg_lambda': 3.958964517957629, 'FLAML_sample_size': 364083}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 247.0, 'num_leaves': 194.0, 'min_child_samples': 40.0, 'learning_rate': 0.0784927400486222, 'subsample': 0.9292526044699931, 'log_max_bin': 6.0, 'colsample_bytree': 0.5754801137582828, 'reg_alpha': 0.2407021490497366, 'reg_lambda': 3.958964517957629, 'FLAML_sample_size': 364083}}\n" ] } ], @@ -548,8 +505,8 @@ "output_type": "display_data", "data": { "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEWCAYAAABxMXBSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3dfZxdVX3v8c+XSYBRCAMm0mQIECWJolAiEURQkSsmWpVIEXmoIt6C1GK11GBSC3LxUrGIVl+meMEiYk1RUgyjRiJWHtrwlIHEPAwNxoAyk2hCSOTBkSST3/1jrxN2hjmTfZLZ83S+79frvObstddZ+7dzYH6z11p7L0UEZmZmRe010AGYmdnQ4sRhZmY1ceIwM7OaOHGYmVlNnDjMzKwmThxmZlYTJw6zPiTpLZJWDXQcZmVy4rBhQ9ITkt4xkDFExH9FxOSy2pc0TdK9kp6VtEHSPZLeV9bxzHrixGFWA0kNA3jsM4BbgZuBQ4CDgcuB9+5GW5Lk//9tt/g/HBv2JO0laZakX0naKOn7kg7K7b9V0m8l/T79Nf+63L6bJF0naYGk54G3pyubT0talj7zPUn7pvonS2rPfb5q3bT/UknrJK2V9JeSQtIRPZyDgC8Dn4+Ib0bE7yNie0TcExEXpDpXSPq33GcOT+2NSNt3S7pK0iLgD8BMSa3djvO3klrS+30kfUnSbyT9TtI3JDXu4ddhw4ATh9WDTwAzgLcB44BNwJzc/p8AE4FXAo8A3+32+XOAq4D9gf9OZWcC04EJwNHAR3o5fo91JU0HLgHeARwBnNxLG5OB8cC8XuoU8SHgQrJz+QYwWdLE3P5zgLnp/dXAJOCYFF8z2RWO1TknDqsHFwGfjYj2iHgBuAI4o/KXeETcGBHP5vb9qaQDcp+/PSIWpb/w/5jKvhYRayPiaeCHZL9cq6lW90zgWxGxMiL+kI5dzSvSz3VFT7qKm9LxtkXE74HbgbMBUgJ5DdCSrnAuBP42Ip6OiGeBfwTO2sPj2zDgxGH14DDgB5I2S9oMPAp0AQdLapB0derGegZ4In1mdO7zT/bQ5m9z7/8A7NfL8avVHdet7Z6OU7Ex/RzbS50iuh9jLilxkF1tzE9JbAzwMuDh3L/bHanc6pwTh9WDJ4F3RURT7rVvRHSQ/bI8jay76ADg8PQZ5T5f1iOk15ENcleM76XuKrLz+PNe6jxP9su+4k96qNP9XO4Exkg6hiyBVLqpngI6gdfl/s0OiIjeEqTVCScOG25GSto39xpB1pd/laTDACSNkXRaqr8/8ALZX/QvI+uO6S/fB86X9FpJLwMuq1YxsvUPLgEuk3S+pFFp0P8kSdenakuBt0o6NHW1zd5VABGxlWym1jXAQWSJhIjYDtwAfEXSKwEkNUuatttna8OGE4cNNwvI/lKuvK4Avgq0AD+V9CzwAHB8qn8z8GugA2hL+/pFRPwE+BpwF7A6d+wXqtSfB3wQ+CiwFvgd8H/JximIiDuB7wHLgIeBHxUMZS7ZFdetEbEtV/6ZSlypG+9nZIP0VufkhZzMBgdJrwVWAPt0+wVuNqj4isNsAEl6f7pf4kDgi8APnTRssHPiMBtYHwPWA78im+n1VwMbjtmuuavKzMxq4isOMzOryYiBDqA/jB49Og4//PCBDsPMbEh5+OGHn4qIl9z0WReJ4/DDD6e1tXXXFc3MbAdJv+6p3F1VZmZWEycOMzOriROHmZnVxInDzMxq4sRhZmY1qYtZVWZm9WT+kg6uWbiKtZs7GdfUyMxpk5kxpbnP2nfiMDMbRuYv6WD2bcvp3NoFQMfmTmbfthygz5KHu6rMzIaRaxau2pE0Kjq3dnHNwlV9dgwnDjOzYWTt5s6ayneHE4eZ2TAyrqmxpvLd4cRhZjaMzJw2mcaRDTuVNY5sYOa0vlu80YPjZmbDSGUA/NJ5y9jStZ1mz6oyM7NdmTGlmX9/6DcAfO9jJ/R5+04cZsNE2XP3zSpKHeOQNF3SKkmrJc2qUudMSW2SVkqamyvvkrQ0vVpy5RMkPZja/J6kvcs8B7OhoDJ3v2NzJ8GLc/fnL+kY6NBsGCrtikNSAzAHOBVoBxZLaomItlydicBs4MSI2CTplbkmOiPimB6a/iLwlYi4RdI3gP8NXFfWeZgNBdXm7l86b9mOLgurL23rnuHIsaNKabvMK47jgNURsSYitgC3AKd1q3MBMCciNgFExPreGpQk4BRgXir6NjCjT6M2G4KqzdHf0rW9nyOxweLIsaM47ZhyuirLHONoBp7MbbcDx3erMwlA0iKgAbgiIu5I+/aV1ApsA66OiPnAK4DNEbEt12aP/zKSLgQuBDj00EP3/GzMBrFxTY109JA8mpsaSxkctfo20PdxjAAmAicDZwM3SGpK+w6LiKnAOcA/S3p1LQ1HxPURMTUipo4Z85Ilc82Glf6Yu29WUWbi6ADG57YPSWV57UBLRGyNiMeBx8gSCRHRkX6uAe4GpgAbgSZJI3pp06zuzJjSzBdOP4q9G7L/pZubGvnC6Ud5VpWVoszEsRiYmGZB7Q2cBbR0qzOf7GoDSaPJuq7WSDpQ0j658hOBtogI4C7gjPT584DbSzwHsyFjxpRmphzaxPETDmLRrFOcNKw0pSWONA5xMbAQeBT4fkSslHSlpPelaguBjZLayBLCzIjYCLwWaJX0i1R+dW421meASyStJhvz+NeyzsHMzF6q1BsAI2IBsKBb2eW59wFckl75OvcBR1Vpcw3ZjC0zMxsAAz04bmZmQ4wTh5mZ1cSJw8zMauLEYWZmNXHiMDOzmjhxmJlZTZw4zMysJl7IyfqFFxkyGz6cOKx0lUWGKutFVBYZApw8zIYgJw4rnRcZ6j9lLt5jVuExDiudFxnqP2Uu3mNW4SsOK50XGTIbXnzFYaXzIkNmw4uvOKx0lQHwS+ctY0vXdpo9q8psSHPisH4xY0rzjoFwd0+ZDW3uqjIzs5o4cZiZWU2cOMzMrCZOHGZmVhMnDjMzq4kTh5mZ1cSJw8zMauLEYWZmNXHiMDOzmjhxmJlZTUpNHJKmS1olabWkWVXqnCmpTdJKSXO77RslqV3S13Nld6c2l6bXK8s8BzMz21lpz6qS1ADMAU4F2oHFkloioi1XZyIwGzgxIjb1kAQ+D9zbQ/PnRkRrSaGbmVkvyrziOA5YHRFrImILcAtwWrc6FwBzImITQESsr+yQdCxwMPDTEmM0M7MalZk4moEnc9vtqSxvEjBJ0iJJD0iaDiBpL+Ba4NNV2v5W6qa6TJJ6qiDpQkmtklo3bNiwZ2diZmY7DPRj1UcAE4GTgUOAeyUdBfwFsCAi2nvIC+dGRIek/YH/AD4E3Ny9UkRcD1wPMHXq1CjtDPrB/CUdXLNwFWs3dzLOa1mY2QArM3F0AONz24eksrx24MGI2Ao8LukxskRyAvAWSR8H9gP2lvRcRMyKiA6AiHg2DaYfRw+JY7iYv6SD2bctp3NrFwAdmzuZfdtyACcPMxsQZSaOxcBESRPIEsZZwDnd6swHzibrehpN1nW1JiLOrVSQ9BFgakTMkjQCaIqIpySNBN4D/KzEcxhw1yxctSNpVHRu7eLSect2LIw0VLSte4Yjx44a6DDMbA+VljgiYpuki4GFQANwY0SslHQl0BoRLWnfOyW1AV3AzIjY2Euz+wALU9JoIEsaN5R1DoPB2s2dPZZv6drez5HsuSPHjuK0Y3yVZDbUKWJId/8XMnXq1GhtHZqzd0+8+ud09JA8mpsaWTTrlAGIyMzqhaSHI2Jq93LfOT7IzZw2mcaRDTuVNY5sYOa0yQMUkZnVu4GeVWW7UBkAv3TeMrZ0bafZs6rMbIDtMnFIesUuxh2sZDOmNO8YCP/ex04Y4GjMrN4V6ap6QNKtkt5d7WY7MzOrH0USxySyG+k+BPxS0j9KmlRuWGZmNljtMnFE5s6IOJvs2VLnAQ9JukeS+03MzOpMoTEOskeAfAj4HfAJoAU4BrgVmFBmgGZmNrgUmVV1P/AdYEZEtOfKWyV9o5ywzMxssCqSOCZHlbsEI+KLfRyPmZkNckUGx38qqamyIelASQtLjMnMzAaxIoljTERsrmykRZe8XKuZWZ0qkji6JB1a2ZB0GDD8H3BlZmY9KjLG8VngvyXdAwh4C3BhqVGZmdmgtcvEERF3SHoD8KZU9KmIeKrcsMzMbLAq+pDDLmA9sC9wpCQi4t7ywjIzs8GqyA2Afwl8kmzp16VkVx73A14MwsysDhUZHP8k8Ebg1xHxdmAKsLn3j5iZ2XBVJHH8MSL+CCBpn4j4H8CrCJmZ1akiYxzt6QbA+cCdkjYBvy43LDMzG6yKzKp6f3p7haS7gAOAO0qNyszMBq1eE4ekBmBlRLwGICLu6ZeozMxs0Op1jCMiuoBV+TvHzcysvhUZ4zgQWCnpIeD5SmFEvK+0qMzMbNAqkjguKz0KMzMbMooMjntcw8zMdtjlfRySnpX0THr9UVKXpGeKNC5puqRVklZLmlWlzpmS2iStlDS3275RktolfT1Xdqyk5anNr0lSkVjMzKxvFLni2L/yPv2SPo0XH3hYVZqRNQc4FWgHFktqiYi2XJ2JwGzgxIjYJKn7Oh+fB7o/E+s64ALgQWABMB34ya7iMTOzvlHkzvEdIjMfmFag+nHA6ohYExFbgFvIkk7eBcCctDgUEbG+skPSscDBwE9zZWOBURHxQFrO9mZgRi3nYGZme6bIQw5Pz23uBUwF/lig7Wbgydx2O3B8tzqT0jEWAQ3AFekx7nsB1wJ/AbyjW5vt3dpsrhL3haR1Qw491LOJzcz6SpFZVe/Nvd8GPMFLrxz25PgTgZPJnr57r6SjyBLGgoho390hjIi4HrgeYOrUqV6x0MysjxQZ4zh/N9vuAMbntg9JZXntwIMRsRV4XNJjZInkBOAtkj4O7AfsLek54Kupnd7aNDOzEhWZVfXt9JDDyvaBkm4s0PZiYKKkCZL2Bs4CWrrVmU92tYGk0WRdV2si4tyIODQiDgc+DdwcEbMiYh3wjKQ3pYH6DwO3F4jFzMz6SJHB8aMjYsf6G2kge8quPhQR24CLgYXAo8D3I2KlpCslVe46XwhslNQG3AXMjIiNu2j648A3gdXAr/CMKjOzflVkjGMvSQdWZj5JOqjg54iIBWRTZvNll+feB3BJelVr4ybgptx2K/D6Isc3M7O+VyQBXAvcL+nWtP0B4KryQjIzs8GsyOD4zZJaeXGN8dPzN/GZmVl9KXIfx5vI1uT4etoeJen4iHiw9OjMzGzQKTI4fh3wXG77uVRmZmZ1qEjiUBrEBiAitlNwcNzMzIafIoljjaS/kTQyvT4JrCk7MDMzG5yKJI6LgDeT3aFded7UBWUGZWZmg1eRWVXrye76BkBSI/Ae4NaqHzIzs2Gr0GPVJTVIerek7wCPAx8sNywzMxuser3ikPQ24Bzg3cBDwInAqyLiD/0Qm5mZDUJVE4ekduA3ZFNvPx0Rz0p63EnDzKy+9dZVNQ8YR9Yt9V5JLwe8roWZWZ2rmjgi4lPABLJnVZ0MrALGSDpT0n79E56ZmQ02vQ6OpzXG74qIC8mSyNlkq/890Q+xmZnZIFT4DvC0St+PgB+lKblmZlaHCk3H7S4iOvs6EDMzGxp2K3GYmVn9cuIwM7OaFFmPYxIwEzgsXz8iTqn6oWFs/pIOrlm4irWbOxnX1MjMaZOZMaV5oMMyM+s3RQbHbwW+AdwAdJUbzuA2f0kHs29bTufW7J+hY3Mns29bDuDkYWZ1o0ji2BYRXrgJuGbhqh1Jo6JzaxeXzlvGvz/0m1KP3bbuGY4cO6rUY5iZFVFkjOOHkj4uaaykgyqv0iMbhNZu7nky2Zau7aUf+8ixozjtGF/VmNnAK3LFcV76OTNXFsCr+j6cwW1cUyMdPSSP5qZGvvexEwYgIjOz/rfLK46ImNDDq+6SBsDMaZNpHNmwU1njyAZmTps8QBGZmfW/IrOqRgJ/Bbw1Fd0N/L90J3ndqMymyo9xNHtWlZnVoSJjHNcBxwL/kl7HprJdkjRd0ipJqyXNqlLnTEltklZKmpvKDpP0iKSlqfyiXP27U5tL0+uVRWLZE5XZVPluqr2Ek4aZ1aUiYxxvjIg/zW3/XNIvdvUhSQ3AHOBUsrXKF0tqiYi2XJ2JwGzgxIjYlEsC64ATIuKF9CTeFemza9P+cyOitUDsfaKn2VTbIyt34jCzelPkiqNL0qsrG5JeRbH7OY4DVkfEmojYAtxC9mTdvAuAORGxCXasb05EbImIF1KdfQrGWZpqs6mqlZuZDWdFfiHPBO5KXUT3AD8H/q7A55qBJ3Pb7aksbxIwSdIiSQ9Iml7ZIWm8pGWpjS/mrjYAvpW6qS6TpJ4OLulCSa2SWjds2FAg3OrGNfX8MOBq5WZmw1mRWVX/CUwE/gb4BDA5Iu7qo+OPSG2fTLbWxw2SmtJxn4yIo4EjgPMkHZw+c25EHAW8Jb0+VCXu6yNiakRMHTNmzB4F6dlUZmYvqpo4JJ2Sfp4O/BnZL/AjgD9LZbvSAYzPbR+SyvLagZaI2BoRjwOPkSWSHdKVxgqyJEFEdKSfzwJzybrESjVjSjNfOP0o9m7I/rmamxr5wulHeXzDzOpSb4PjbyPrlnpvD/sCuG0XbS8GJkqaQJYwzgLO6VZnPtmVxrckjSbruloj6RBgY0R0SjoQOAn4iqQRQFNEPJWmCb8H+Nku4ugTM6Y073isiG/2M7N6VjVxRMTn0tsr09XADikZ9Coitkm6GFgINAA3RsRKSVcCrRHRkva9U1Ib2YD7zIjYKOlU4FpJAQj4UkQsl/RyYGFKGg1kSeOGWk/azMx2X5HpuP8BvKFb2Tyy+zl6FRELgAXdyi7PvQ/gkvTK17kTOLqH9p4vclwzMytP1cQh6TXA64ADuo1pjAL2LTswMzMbnHq74phMNobQxM7jHM+S3X9hZmZ1qLcxjtuB2yWdEBH392NMZmY2iBUZ41gi6a/Juq12dFFFxEdLi8rMzAatIneOfwf4E2AacA/Z/RjPlhmUmZkNXkUSxxERcRnwfER8m+xmwOPLDcvMzAarIomjsu7GZkmvBw4ASn+UuZmZDU5FxjiuT3dvXwa0APsBl/f+keGjsoDT2s2djGzYi/EH+cGGZlbfdpk4IuKb6e091Nk645UFnCprcWzp2s7jTz3P/CUdfk6VmdWt3m4AvKTaPoCI+HLfhzO4eAEnM7OX6u2KY//0czLwRrJuKshuBnyozKAGCy/gZGb2Ur3dAPh/ACTdC7whPcYcSVcAP+6X6AbYuKbGndYZz5ebmdWrIrOqDga25La3pLJhzws4mZm9VJFZVTcDD0n6QdqeAdxUWkSDSGUc49J5y9jStZ3mpkZmTpvs8Q0zq2tFZlVdJeknpBX4gPMjYkm5YQ0eXsDJzGxnvc2qGhURz0g6CHgivSr7DoqIp8sPz8zMBpverjjmkj1W/WGypWIrlLbr6p4OMzPL9Dar6j3p5y6XiTUzs/rRW1dV9+VidxIRj/R9OGZmNtj11lV1bS/7Ajilj2MxM7MhoLeuqrf3ZyBmZjY0FLmPg/Q49SPZeQXAm8sKyszMBq9dJg5JnwNOJkscC4B3Af9NdmOgmZnVmSKPHDkD+F/AbyPifOBPyRZzMjOzOlQkcXRGxHZgm6RRwHpgfLlhmZnZYFUkcbRKagJuILsZ8BHg/iKNS5ouaZWk1ZJmValzpqQ2SSslzU1lh0l6RNLSVH5Rrv6xkpanNr8mSUViMTOzvtHbfRxzgLkR8fFU9A1JdwCjImLZrhqW1ADMAU4F2oHFkloioi1XZyIwGzgxIjZJqqxlvg44ISJekLQfsCJ9di1wHXAB8CDZmMt04Ce1nbaZme2u3q44HgO+JOkJSf8kaUpEPFEkaSTHAasjYk1EbAFuAU7rVucCYE5EbAKIiPXp55aIeCHV2acSp6SxZInrgYgIsgH6GQXjMTOzPlA1cUTEVyPiBOBtwEbgRkn/I+lzkiYVaLsZeDK33Z7K8iYBkyQtkvSApOmVHZLGS1qW2vhiutpoTu301mbl8xdKapXUumHDhgLhmplZEbsc44iIX0fEFyNiCnA22V/4j/bR8UcAE8mm+54N3JDGU4iIJyPiaOAI4DxJNS0eFRHXR8TUiJg6ZsyYPgrXzMx2mTgkjZD0XknfJRtLWAWcXqDtDnaefXVIKstrB1oiYmtEPE7WPTYxXyFdaawgWw+kI7XTW5tmZlaiqolD0qmSbiT75X4B2Trjr46IsyLi9gJtLwYmSpogaW/gLKClW535ZFcbSBpN1nW1RtIhkhpT+YHAScCqiFgHPCPpTWk21YeBIrGYmVkf6e3O8dlka3L8XWXwuhYRsU3SxcBCoAG4MSJWSroSaI2IlrTvnZLagC5gZkRslHQqcK2kIFv/40sRsTw1/XGypWsbya6APKPKzKwf9faQwz1++m1ELCCbMpsvuzz3PoBL0itf507g6CpttgKv39PYzMxs9xS5AdDMzGwHJw4zM6uJE4eZmdXEicPMzGrixGFmZjVx4jAzs5o4cZiZWU2cOMzMrCZOHGZmVhMnDjMzq4kTh5mZ1cSJw8zMauLEYWZmNXHiMDOzmjhxmJlZTZw4zMysJk4cZmZWEycOMzOriROHmZnVxInDzMxq4sRhZmY1ceIwM7OaOHGYmVlNnDjMzKwmThxmZlYTJw4zM6tJqYlD0nRJqyStljSrSp0zJbVJWilpbio7RtL9qWyZpA/m6t8k6XFJS9PrmDLPwczMdjairIYlNQBzgFOBdmCxpJaIaMvVmQjMBk6MiE2SXpl2/QH4cET8UtI44GFJCyNic9o/MyLmlRW7mZlVV+YVx3HA6ohYExFbgFuA07rVuQCYExGbACJiffr5WET8Mr1fC6wHxpQYq5mZFVRm4mgGnsxtt6eyvEnAJEmLJD0gaXr3RiQdB+wN/CpXfFXqwvqKpH16OrikCyW1SmrdsGHDnp2JmZntMNCD4yOAicDJwNnADZKaKjsljQW+A5wfEdtT8WzgNcAbgYOAz/TUcERcHxFTI2LqmDG+WDEz6ytlJo4OYHxu+5BUltcOtETE1oh4HHiMLJEgaRTwY+CzEfFA5QMRsS4yLwDfIusSMzOzflJm4lgMTJQ0QdLewFlAS7c688muNpA0mqzrak2q/wPg5u6D4OkqBEkCZgArSjwHMzPrprRZVRGxTdLFwEKgAbgxIlZKuhJojYiWtO+dktqALrLZUhsl/QXwVuAVkj6SmvxIRCwFvitpDCBgKXBRWedgZmYvVVriAIiIBcCCbmWX594HcEl65ev8G/BvVdo8pe8jNTOzogZ6cNzMzIYYJw4zM6uJE4eZmdXEicPMzGrixGFmZjVx4jAzs5o4cZiZWU2cOMzMrCZOHGZmVpNS7xwfyuYv6eCahatYu7mTkQ17Mf6gxoEOycxsUPAVRw/mL+lg9m3L6djcSQBburbz+FPPM39J94f7mpnVHyeOHlyzcBWdW7t2KtseWbmZWb1z4ujB2s2dNZWbmdUTJ44ejGvqeTyjWrmZWT1x4ujBzGmTaRzZsFNZ48gGZk6bPEARmZkNHp5V1YMZU5oBdsyqGtfUyMxpk3eUm5nVMyeOKmZMaXaiMDPrgbuqzMysJk4cZmZWEycOMzOriROHmZnVxInDzMxqoogY6BhKJ2kD8Osedo0GnurncAZCPZxnPZwj1Md5+hwHj8MiYkz3wrpIHNVIao2IqQMdR9nq4Tzr4RyhPs7T5zj4uavKzMxq4sRhZmY1qffEcf1AB9BP6uE86+EcoT7O0+c4yNX1GIeZmdWu3q84zMysRk4cZmZWk7pNHJKmS1olabWkWQMdTxkkPSFpuaSlkloHOp6+IulGSeslrciVHSTpTkm/TD8PHMgY91SVc7xCUkf6PpdKevdAxrinJI2XdJekNkkrJX0ylQ+377LaeQ7Z77MuxzgkNQCPAacC7cBi4OyIaBvQwPqYpCeAqRExFG40KkzSW4HngJsj4vWp7J+ApyPi6vSHwIER8ZmBjHNPVDnHK4DnIuJLAxlbX5E0FhgbEY9I2h94GJgBfITh9V1WO88zGaLfZ71ecRwHrI6INRGxBbgFOG2AY7KCIuJe4OluxacB307vv032P+aQVeUch5WIWBcRj6T3zwKPAs0Mv++y2nkOWfWaOJqBJ3Pb7QzxL7KKAH4q6WFJFw50MCU7OCLWpfe/BQ4eyGBKdLGkZakra0h34eRJOhyYAjzIMP4uu50nDNHvs14TR704KSLeALwL+OvU/THsRdb/Ohz7YK8DXg0cA6wDrh3YcPqGpP2A/wA+FRHP5PcNp++yh/Mcst9nvSaODmB8bvuQVDasRERH+rke+AFZF91w9bvUl1zpU14/wPH0uYj4XUR0RcR24AaGwfcpaSTZL9PvRsRtqXjYfZc9nedQ/j7rNXEsBiZKmiBpb+AsoGWAY+pTkl6eBuKQ9HLgncCK3j81pLUA56X35wG3D2Aspaj8Mk3ezxD/PiUJ+Ffg0Yj4cm7XsPouq53nUP4+63JWFUCa+vbPQANwY0RcNcAh9SlJryK7ygAYAcwdLuco6d+Bk8keTf074HPAfOD7wKFkj9A/MyKG7OBylXM8maxbI4AngI/lxgKGHEknAf8FLAe2p+K/J+v/H07fZbXzPJsh+n3WbeIwM7PdU69dVWZmtpucOMzMrCZOHGZmVhMnDjMzq4kTh5mZ1cSJw4Y8SV+R9Knc9kJJ38xtXyvpkl4+f5OkM9L7uyVN7aHOSElXpye2PiLpfknvSvuekDR6N+Lecdwq++ekp6a2SerMPUX1DEkLJDXVeswCMY2V9KNe9u8t6V5JI/r62DZ0OHHYcLAIeDOApL3I7n14XW7/m4H79vAYnwfGAq9Pj3GZAey/h232KiL+OiKOAd4N/CoijkmveRHx7ojYXMJhLyG7i7laTFuA/wQ+WMKxbYhw4rDh4D7ghPT+dWR34D4r6UBJ+wCvBR6RdLmkxZJWSLo+3dG7S5JeBlwAfCIiXoAdj4v4fg91L0ntr+h2FfTh9DC7X0j6Tg+f+3y6AmkoGNMTkkZLOlzS/6TPPibpu5LeIWlRujo6LtV/eXqQ3kOSlkiq9jToPwfuSJ95XbJHb0QAAALFSURBVKq/NMU+MdWZD5xbJE4bnny5aUNeRKyVtE3SoWRXF/eTPe34BOD3wPKI2CLp6xFxJUD65f0e4IcFDnEE8JvuD+DrTtKxwPnA8YCAByXdA2wB/gF4c0Q8Jemgbp+7huzq5fzYvTtyjwA+AHyU7HE65wAnAe8ju0N5BvBZ4OcR8dHUxfWQpJ9FxPO5OCYAmyrJEbgI+GpEfDc9mqeS1FYAb9yNOG2Y8BWHDRf3kSWNSuK4P7e9KNV5u6QHJS0HTmHn7qy+cBLwg4h4PiKeA24D3pKOdWtlQa1uj8+4DDggIi7azaQB8HhELE8Py1sJ/GdqazlweKrzTmCWpKXA3cC+ZI/0yBsLbMht3w/8vaTPAIdFRGeKvwvYUnkWmtUfJw4bLirjHEeR/UX8ANkVx5uB+yTtC/wLcEZEHEXWj79vwbZXA4dKGtXnUWdXCMd2vwqp0Qu599tz29t5sVdBwJ/nxkkOjYhHu7XTSe7fJCLmkl21dAILJJ2Sq7sP8Mc9iNmGMCcOGy7uI+t6ejo9qvppoIksedzHi78Qn0rrIlSdzdRdRPyB7OmmX01dNkgaI+kD3ar+FzBD0svSE4nfn8p+DnxA0ivSZ/NJ4g7gauDHJf8FvxD4RGVcR9KUHuo8xotXKJUHZa6JiK+RPaH26FT+CuCpiNhaYrw2iDlx2HCxnGw21QPdyn4fEU+lGUg3kF2NLCT7S78W/0DWjdMmaQXwI6D7okOPADcBD5E94fWbEbEkIlYCVwH3SPoF8OVun7s1xdYiqbHGuIr6PDASWCZpZdreSRrv+JWkI1LRmcCK1L31euDmVP524MclxWlDgJ+Oa2Y7SHo/cGxE/EMvdW4DZkXEY/0XmQ0mnlVlZjtExA8qXWo9SV1185006puvOMzMrCYe4zAzs5o4cZiZWU2cOMzMrCZOHGZmVhMnDjMzq8n/B0exDIjqNZL8AAAAAElFTkSuQmCC\n" + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3df5hdVX3v8feHIcBYDAMm0GQCBC9JNBYkGlFEKnKliVYhUkSwPxRvQdvq1VKHklrR4uWqN2qrT9N6wWsRr/xM4xg1MlJFqAgkgWBChg6N4ddMkISYUYSRJJNv/9jrJDuHPZOTMHvOzDmf1/OcZ85ee+2zv7Mh8z1rrb3XUkRgZmZW7YB6B2BmZmOTE4SZmRVygjAzs0JOEGZmVsgJwszMCjlBmJlZIScIs/0g6TRJPfWOw6xMThA27kh6RNKb6xlDRPx7RMwq6/MlzZN0h6SnJW2WdLuks8o6n1kRJwizApJa6njuc4GbgWuBacBRwOXA2/fjsyTJ/85tv/h/HGsYkg6QdJmkn0naIukmSUfk9t8s6eeSfpm+nb8it+8aSf8sabmkZ4A3pZbKRyWtScfcKOmQVP90Sb2544esm/ZfKukJSRsl/amkkHR8we8g4AvApyLiKxHxy4jYGRG3R8RFqc4nJf3/3DHT0+cdmLZ/JOlKSXcCzwIdklZVnecvJS1L7w+W9DlJj0l6UtKXJbW+wP8c1gCcIKyRfAhYALwRmApsBRbn9n8PmAEcCdwHfKPq+HcDVwIvBn6cys4D5gPHAScC7x3m/IV1Jc0HLgHeDBwPnD7MZ8wCjgaWDFOnFn8MXEz2u3wZmCVpRm7/u4Hr0vvPADOBk1J87WQtFmtyThDWSD4AfCwieiPiOeCTwLmVb9YR8dWIeDq375WSDssd/62IuDN9Y/9NKvtSRGyMiF8A3yb7IzqUoeqeB/xLRKyLiGfTuYfykvTziVp/6SFck863IyJ+CXwLuAAgJYqXActSi+Vi4C8j4hcR8TTwv4HzX+D5rQE4QVgjORb4pqR+Sf3Ag8AgcJSkFkmfSd1PvwIeScdMyh3/eMFn/jz3/lng0GHOP1TdqVWfXXSeii3p55Rh6tSi+hzXkRIEWeuhMyWrycCLgHtz1+2WVG5NzgnCGsnjwFsioi33OiQi+sj+KJ5N1s1zGDA9HaPc8WVNbfwE2WBzxdHD1O0h+z3+YJg6z5D9Ua/47YI61b/LrcBkSSeRJYpK99JTwADwitw1OywihkuE1iScIGy8miDpkNzrQLK+9islHQsgabKks1P9FwPPkX1DfxFZN8pouQm4UNLLJb0I+PhQFSObf/8S4OOSLpQ0MQ2+v0HSVana/cDvSjomdZEt3FsAEbGd7M6oRcARZAmDiNgJXA38vaQjASS1S5q337+tNQwnCBuvlpN98628Pgl8EVgGfF/S08DdwGtT/WuBR4E+oDvtGxUR8T3gS8BtwPrcuZ8bov4S4F3A+4CNwJPA/yIbRyAibgVuBNYA9wLfqTGU68haUDdHxI5c+V9X4krdb/9GNlhuTU5eMMhsdEl6OfAAcHDVH2qzMcUtCLNRIOkd6XmDw4HPAt92crCxzgnCbHS8H9gE/Izszqo/q284ZnvnLiYzMyvkFoSZmRU6sN4BjJRJkybF9OnT6x2Gmdm4cu+99z4VEYUPRjZMgpg+fTqrVq3ae0UzM9tF0qND7XMXk5mZFXKCMDOzQk4QZmZWyAnCzMwKOUGYmVmhhrmLycys2XSu7mNRVw8b+weY2tZKx7xZLJjTPmKf7wRhZjYOda7uY+HStQxsHwSgr3+AhUvXAoxYkii1i0nSfEk9ktZLumyIOudJ6pa0TtJ1qexNku7PvX4jaUGZsZqZjSeLunp2JYeKge2DLOrqGbFzlNaCkNRCtmD8mUAvsFLSsojoztWZQbbYyakRsbWyYElE3EZaz1fSEWRz1X+/rFjNzMabjf0D+1S+P8psQZwMrI+IDRGxDbiBbMnHvIuAxRGxFSAiNhV8zrnA99L6uWZmBkxta92n8v1RZoJoZ8+F03tTWd5MYKakOyXdLWl+weecD1xfdAJJF0taJWnV5s2bRyRoM7PxoGPeLFontOxR1jqhhY55I7cYYL1vcz0QmAGcTraQ+tWS2io7JU0BTgC6ig6OiKsiYm5EzJ08uXCuKTOzhrRgTjufPucEDmrJ/oy3t7Xy6XNOGDd3MfUBR+e2p6WyvF7gnrSg+sOSHiJLGCvT/vOAb6b9ZmaWs2BOO9eveAyAG99/yoh/fpktiJXADEnHSTqIrKtoWVWdTrLWA5ImkXU5bcjtv4AhupfMzKxcpSWItN7uB8m6hx4EboqIdZKukHRWqtYFbJHUDdwGdETEFgBJ08laILeXFaOZmQ2t1AflImI5sLyq7PLc+wAuSa/qYx/h+YPaZmY2Suo9SG1mZmOUE4SZmRVygjAzs0JOEGZmVsgJwszMCjlBmJlZIScIMzMr5ARhZmaFnCDMzKyQE4SZmRVygjAzs0JOEGZmVsgJwszMCjlBmJlZIScIMzMr5ARhZmaFnCDMzKyQE4SZmRVygjAzs0JOEGZmVsgJwszMCjlBmJlZIScIMzMr5ARhZmaFnCDMzKyQE4SZmRVygjAzs0JOEGZmVsgJwszMCjlBmJlZIScIMzMr5ARhZmaFSk0QkuZL6pG0XtJlQ9Q5T1K3pHWSrsuVHyPp+5IeTPunlxmrmZnt6cCyPlhSC7AYOBPoBVZKWhYR3bk6M4CFwKkRsVXSkbmPuBa4MiJulXQosLOsWM3M7PnKbEGcDKyPiA0RsQ24ATi7qs5FwOKI2AoQEZsAJM0GDoyIW1P5ryPi2RJjNTOzKmUmiHbg8dx2byrLmwnMlHSnpLslzc+V90taKmm1pEWpRbIHSRdLWiVp1ebNm0v5JczMmlW9B6kPBGYApwMXAFdLakvlpwEfBV4DvBR4b/XBEXFVRMyNiLmTJ08erZjNzJpCmQmiDzg6tz0tleX1AssiYntEPAw8RJYweoH7U/fUDqATeFWJsZqZWZUyE8RKYIak4yQdBJwPLKuq00nWekDSJLKupQ3p2DZJlWbBGUA3ZmY2akpLEOmb/weBLuBB4KaIWCfpCklnpWpdwBZJ3cBtQEdEbImIQbLupR9IWgsIuLqsWM3M7PlKu80VICKWA8uryi7PvQ/gkvSqPvZW4MQy4zMzs6HVe5DazMzGKCcIMzMrtNcuJkkviYgtoxGMWV7n6j4WdfWwsX+AqW2tdMybxYI51Y/SmFlZamlB3C3pZklvlaTSIzIjSw4Ll66lr3+AAPr6B1i4dC2dq6vvlDazstQySD0TeDPwPuBLkm4CromIh0qNzJraoq4eBrYP7lE2sH2QS5es4foVj9UpKrOxp/uJXzF7ysRSPnuvLYjI3BoRF5DNnfQeYIWk2yWdUkpU1vQ29g8Ulm8b9JyNZnmzp0zk7JPK6XqtaQwC+CPgj4EngQ+RPfB2EnAzcFwpkVlTm9rWSl9Bkmhva+XG9/t7idloqGUM4i5gIrAgIn4/IpZGxI6IWAV8udzwrFl1zJtF64Q952dsndBCx7xZdYrIrPnUMgYxKz3Q9jwR8dkRjscMYNfdSpcuWcO2wZ20+y4ms1FXS4L4vqR3RkQ/gKTDgRsiYl65oVmzWzCnfdeAtLuVzEZfLV1MkyvJASAt7nPkMPXNzKwB1JIgBiUdU9mQdCxQ2OVkZmaNo5Yupo8BP5Z0O9msqqcBF5calZmZ1d1eE0RE3CLpVcDrUtFHIuKpcsMyM7N6q3W670FgE3AIMFsSEXFHeWGZmVm91fKg3J8CHyZbMvR+spbEXWSrvJmZWYOqZZD6w8BrgEcj4k3AHKB/+EPMzGy8qyVB/CYifgMg6eCI+A/Aj7OamTW4WsYgeiW1AZ3ArZK2Ao+WG5aZmdVbLXcxvSO9/aSk24DDgFtKjcrMzOpu2AQhqQVYFxEvA4iI20clKjMzq7thxyAiYhDoyT9JbWZmzaGWMYjDgXWSVgDPVAoj4qzSojIzs7qrJUF8vPQozMxszKllkNrjDmZmTaiWJ6mfZvfsrQcBE4BnIqKcVbLNzGxMqKUF8eLKe0kCzmb3xH1mZtaganmSepfIdAJeTc7MrMHV0sV0Tm7zAGAu8JvSIjIzszGhlruY3p57vwN4hKybyczMGlgtYxAXjkYgZmY2tux1DELS19JkfZXtwyV9tdywzMys3moZpD4xInat/xARW8nWhNgrSfMl9UhaL+myIeqcJ6lb0jpJ1+XKByXdn17LajmfmZmNnFrGIA6QdHhKDEg6opbj0kR/i4EzgV5gpaRlEdGdqzMDWAicGhFbJR2Z+4iBiDhpH34XMzMbQbUkiM8Dd0m6OW2/E7iyhuNOBtZHxAYASTeQDW535+pcBCyuJJ+I2FRr4M2ic3Ufi7p62Ng/wNS2VjrmzWLBnPZ6h2VmTWCvXUwRcS1wDvBkep0TEV+v4bPbgcdz272pLG8mMFPSnZLuljQ/t+8QSatS+YKiE0i6ONVZtXnz5hpCGl86V/excOla+voHCKCvf4CFS9fSubqv3qGZWROopavodWRrQvxj2p4o6bURcc8InX8GcDowDbhD0glpzOPYiOiT9FLgh5LWRsTP8gdHxFXAVQBz584NGsyirh4Gtg/uUTawfZBLl6zh+hWP1Smq0dX9xK+YPcWzupjVQy2D1P8M/Dq3/etUtjd9wNG57WmpLK8XWBYR2yPiYeAhsoRBRPSlnxuAH1HjwHgj2dg/UFi+bXDnKEdSP7OnTOTsk9ylZlYPtYxBKCJ2fTuPiJ2SajluJTBD0nFkieF84N1VdTqBC4B/kTSJrMtpg6TDgWcj4rlUfirwf2o4Z0OZ2tZKX0GSaG9r5cb3n1KHiMysmdTSgtgg6X9KmpBeHwY27O2giNgBfBDoAh4EboqIdZKukFRZbKgL2CKpG7gN6IiILcDLgVWSfprKP5O/+6lZdMybReuElj3KWie00DFvVp0iMrNmolzjoLhCduvpl4AzyKb9/gHw4YgYU6PCc+fOjVWrVtU7jP0y3J1Knav7uHTJGrYN7qTddzGZ2QiTdG9EzC3aV8tUG5vIuocqH9YKvA24eciDrGaVO5Uqg9GVO5UAFsxpZ8Gc9l0D0u5WMrPRVMtYQuWht3lk4wVnAj/GCWJE1HKnku/kMbN6GDZBSHoj2cDyW4EVZIPFL42IZ0chtqZQy51KvpPHzOphyAQhqRd4jOyW1o9GxNOSHnZyGFm+U8nMxqrh7mJaAkwF3gW8XdJvsXttahshvlPJzMaqIRNERHwEOI5sLqbTgR5gcpp99dDRCa/xLZjTzqfPOYGDWrL/FO1trXz6nBN8p5KZ1d2wYxDpAbnbgNskTWD3QPU/AZPKD685+E4lMxuLarqLCSAitgPfAb6TbnU1M7MGVsuT1M8TEcW33piZWcPYrwRhZmaNzwnCzMwK1bIexEygAzg2Xz8izigxLjMzq7NaBqlvBr4MXA0M7qWumZk1iFoSxI6IqGWBIDMzayC1jEF8W9KfS5oi6YjKq/TIzMysrmppQbwn/ezIlQXw0pEPx8zMxopa1oM4bjQCMTOzsaWWu5gmAH8G/G4q+hHwf9OT1WZm1qBq6WL6Z2AC2fxLAH+cyv60rKDMzKz+akkQr4mIV+a2fyjpp2UFZGZmY0MtdzENSvpvlQ1JL8XPQ5iZNbxaWhAdZNN9bwBE9kT1haVGZWZmdVfLXUw/kDQDqCxx1hMRz5UblpmZ1dtwa1KfERE/lHRO1a7jJRERS0uOzczM6mi4FsQbgR8Cby/YF4AThJlZAxsyQUTEJ9LbKyLi4fw+SX54zsyswdVyF9O/FpQtGelAzMxsbBluDOJlwCuAw6rGISYCh5QdmJmZ1ddwYxCzgLcBbew5DvE0cFGZQZmZWf0NNwbxLeBbkk6JiLtGMSYzMxsDanlQbrWkvyDrbtrVtRQR7ystKjMzq7taBqm/Dvw2MA+4HZhG1s1kZmYNrJYEcXxEfBx4JiK+Bvw+8NpaPlzSfEk9ktZLumyIOudJ6pa0TtJ1VfsmSuqV9I+1nM/MzEZOLV1MlXUf+iX9DvBz4Mi9HSSpBVgMnAn0AislLYuI7lydGcBC4NSI2Cqp+nM/BdxRQ4xmZjbCakkQV0k6HPg4sAw4FLi8huNOBtZHxAYASTcAZwPduToXAYsjYitARGyq7JD0auAo4BZgbg3nGxM6V/exqKuHjf0DTG1rpWPeLBbMaa93WGZm+6yWyfq+kt7ezr6tQ90OPJ7b7uX5XVMzASTdCbQAn4yIWyQdAHwe+CPgzUOdQNLFwMUAxxxzzD6EVo7O1X0sXLqWge3ZbOh9/QMsXLoWwEnCzMad4R6Uu2S4AyPiCyN0/hnA6WSD33dIOoEsMSyPiF5Jw8VwFXAVwNy5c2ME4nlBFnX17EoOFQPbB7l0yRquX/HYsMd2P/ErZk+ZWGZ4Zmb7ZLgWxIvTz1nAa8i6lyB7aG5FDZ/dBxyd256WyvJ6gXvS+tYPS3qILGGcApwm6c/JurQOkvTriCgc6C7LvnYXbewfKCzfNrhzr+eaPWUiZ5/kVoaZjR3DPSj3dwCS7gBeFRFPp+1PAt+t4bNXAjPSxH59wPnAu6vqdAIXAP8iaRJZl9OGiPjDSgVJ7wXm1iM57Gt30dS2VvoKkkR7Wys3vv+U8oI1MytBLYPURwHbctvbUtmwImKHpA8CXWTjC1+NiHWSrgBWRcSytO/3JHWTLWPaERFb9vWXKMP+dBcdMuEADhDszHV2tU5ooWPerML6ZmZjWS0J4lpghaRvpu0FwDW1fHhELAeWV5VdnnsfwCXpNdRnXFPr+UbS/nQXTTr0YAAe/8UA2wZ30u67mMxsHKvlLqYrJX0POC0VXRgRq8sNq/7cXWRmzW7IJ6klTUw/jwAeIZty4+vAo6msoXXMm0XrhJY9ytxdZGbNZLgWxHVk033fS7bEaIXS9r48EzHuVLqFLl2yxt1FZtaUhruL6W3pZ9MuL7pgTvuuAWl3K5lZsxnuQblXDXdgRNw38uGYmdlYMVwX0+eH2RfAGSMci5mZjSHDdTG9aTQDMTOzsaWW5yBI03zPZs8V5a4tKygzM6u/vSYISZ8gm0xvNtlDb28Bfkz2AJ2ZmTWoWlaUOxf478DPI+JC4JXAYaVGZWZmdVdLghiIiJ3AjvTw3Cb2nKXVzMwaUC1jEKsktQFXkz0092vgrlKjMjOzuhvuOYjFwHUR8eep6MuSbgEmRsSaUYnOzMzqZrgWxEPA5yRNAW4Crm+GSfrMzCwz5BhERHwxIk4B3ghsAb4q6T8kfULSzFGL0MzM6mKvg9QR8WhEfDYi5pCt/rYAeLD0yMzMrK72miAkHSjp7ZK+AXwP6AHOKT0yMzOrq+EGqc8kazG8FVgB3ABcHBHPjFJsZmZWR8MNUi8kWxPiryJi6yjFY2ZmY8Rwk/V5tlYzsyZWy5PUZmbWhJwgzMyskBOEmZkVcoIwM7NCThBmZlbICcLMzAo5QZiZWaGa1qRuNp2r+1jU1cPG/gEmtBzA0Ue01jskM7NR5xZElc7VfSxcupa+/gEC2Da4k4efeobO1X31Ds3MbFQ5QVRZ1NXDwPbBPcp2RlZuZtZMnCCqbOwf2KdyM7NG5QRRZWpb8XjDUOVmZo2q1AQhab6kHknrJV02RJ3zJHVLWifpulR2rKT7JN2fyj9QZpx5HfNm0TqhZY+y1gktdMybNVohmJmNCaXdxSSpBVgMnAn0AislLYuI7lydGWTTip8aEVslHZl2PQGcEhHPSToUeCAdu7GseCsWzGkH4NIla9g2uJP2tlY65s3aVW5m1izKvM31ZGB9RGwAkHQDcDbQnatzEbC4st5ERGxKP7fl6hzMKHeFLZjTzvUrHgPgxvefMpqnNjMbM8r8w9sOPJ7b7k1leTOBmZLulHS3pPmVHZKOlrQmfcZni1oPki6WtErSqs2bN5fwK5iZNa96D1IfCMwATidb3vRqSW0AEfF4RJwIHA+8R9JR1QdHxFURMTci5k6ePHkUwzYza3xlJog+4Ojc9rRUltcLLIuI7RHxMPAQWcLYJbUcHgBOKzFWMzOrUmaCWAnMkHScpIOA84FlVXU6yVoPSJpE1uW0QdI0Sa2p/HDgDYCfVDMzG0WlJYiI2AF8EOgCHgRuioh1kq6QdFaq1gVskdQN3AZ0RMQW4OXAPZJ+CtwOfC4i1pYVq5mZPV+pk/VFxHJgeVXZ5bn3AVySXvk6twInlhmbmZkNr96D1GZmNkY5QZiZWSEnCDMzK+QEYWZmhZwgzMyskBOEmZkVcoIwM7NCThBmZlbICcLMzAo5QZiZWSEnCDMzK+QEYWZmhZwgzMyskBOEmZkVcoIwM7NCThBmZlbICcLMzAo5QZiZWaFSlxwdbzpX97Goq4eN/QNMaDmAo49orXdIZmZ14xZE0rm6j4VL19LXP0AA2wZ38vBTz9C5uq/eoZmZ1YUTRLKoq4eB7YN7lO2MrNzMrBk5QSQb+wf2qdzMrNE5QSRT24rHG4YqNzNrdE0/SF0ZmO7rH0BA5Pa1TmihY96seoVmZlZXTZ0gKgPTlbGHfHJob2ulY94sFsxpr09wZmZ11tQJomhgGuCglgO487Iz6hCRmdnY0dRjEEMNQG8b3DnKkZiZjT1NnSCGGoBu98C0mVlzJ4iOebNondCyR5kHps3MMk09BlEZgK5MrzHVA9NmZrs0dYKALEk4IZiZPV9TdzGZmdnQSk0QkuZL6pG0XtJlQ9Q5T1K3pHWSrktlJ0m6K5WtkfSuMuM0M7PnK62LSVILsBg4E+gFVkpaFhHduTozgIXAqRGxVdKRadezwJ9ExH9KmgrcK6krIvrLitfMzPZUZgviZGB9RGyIiG3ADcDZVXUuAhZHxFaAiNiUfj4UEf+Z3m8ENgGTS4zVzMyqlJkg2oHHc9u9qSxvJjBT0p2S7pY0v/pDJJ0MHAT8rGDfxZJWSVq1efPmEQzdzMzqfRfTgcAM4HRgGnCHpBMqXUmSpgBfB94TEc97vDkirgKuSnU3S3p0H849CXjqhYXfEHwddvO1yPg67NYM1+LYoXaUmSD6gKNz29NSWV4vcE9EbAcelvQQWcJYKWki8F3gYxFx995OFhH71AUlaVVEzN2XYxqRr8NuvhYZX4fdmv1alNnFtBKYIek4SQcB5wPLqup0krUekDSJrMtpQ6r/TeDaiFhSYoxmZjaE0hJEROwAPgh0AQ8CN0XEOklXSDorVesCtkjqBm4DOiJiC3Ae8LvAeyXdn14nlRWrmZk9nyJi77UakKSL0xhGU/N12M3XIuPrsFuzX4umTRBmZjY8T7VhZmaFnCDMzKxQ0yWIWuaHalSSvippk6QHcmVHSLpV0n+mn4fXM8bRIOloSbfl5gD7cCpvxmtxiKQVkn6arsXfpfLjJN2T/p3cmO4sbHiSWiStlvSdtN2U16GiqRJEbn6otwCzgQskza5vVKPqGqD6afXLgB9ExAzgB2m70e0A/ioiZgOvA/4i/X/QjNfiOeCMiHglcBIwX9LrgM8Cfx8RxwNbgf9RxxhH04fJ7rqsaNbrADRZgqC2+aEaVkTcAfyiqvhs4Gvp/deABaMaVB1ExBMRcV96/zTZH4R2mvNaRET8Om1OSK8AzgAqzyA1xbWQNA34feAraVs04XXIa7YEUcv8UM3mqIh4Ir3/OXBUPYMZbZKmA3OAe2jSa5G6Ve4nmxTzVrJ5z/rTs0zQPP9O/gG4FKhM6/MSmvM67NJsCcKGEdk9z01z37OkQ4F/BT4SEb/K72umaxERgxFxEtl0OCcDL6tzSKNO0tuATRFxb71jGUvqPVnfaKtlfqhm86SkKRHxRJoccVO9AxoNkiaQJYdvRMTSVNyU16IiIvol3QacArRJOjB9e26GfyenAmdJeitwCDAR+CLNdx320GwtiFrmh2o2y4D3pPfvAb5Vx1hGRepb/n/AgxHxhdyuZrwWkyW1pfetZAt8PUg29c25qVrDX4uIWBgR0yJiOtnfhR9GxB/SZNehWtM9SZ2+IfwD0AJ8NSKurHNIo0bS9WSTI04CngQ+QTZh4k3AMcCjwHkRUT2Q3VAkvQH4d2Atu/ub/4ZsHKLZrsWJZIOvLWRfGG+KiCskvZTsJo4jgNXAH0XEc/WLdPRIOh34aES8rZmvAzRhgjAzs9o0WxeTmZnVyAnCzMwKOUGYmVkhJwgzMyvkBGFmZoWcIGxckPT3kj6S2+6S9JXc9uclXTLM8ddIOje9/5Gk5y1EL2mCpM+k2Vzvk3SXpLekfY+kddP3Ne5d5x1i/+K0pG63pIHcErvnSlpeeUZhJEmaUpmtdIj9B0m6Q1KzPUhrVZwgbLy4E3g9gKQDyJ7leEVu/+uBn7zAc3wKmAL8TkS8imxithe/wM8cVkT8RZrm4q3AzyLipPRaEhFvjYj+Ek57CXD1MDFtI5vN9l0lnNvGEScIGy9+QjYFBGSJ4QHgaUmHSzoYeDlwn6TLJa2U9ICkq9JT03sl6UXARcCHKg9CRcSTEXFTQd1L0uc/UNWq+RNJa9LaCl8vOO5TqUXRUmNMj0iaJGm6pP9Ixz4k6RuS3izpztTaOTnV/y1la36sSGsaDDVT8R8At6RjXpHq359in5HqdAJ/WEuc1rjchLRxISI2Stoh6Riy1sJdZDNrngL8ElgbEdsk/WNEXAGQ/ki/Dfh2Dac4HnisetK+apJeDVwIvBYQcI+k24FtwN8Cr4+IpyQdUXXcIrLWyIWxf0+nHg+8E3gf2ZQx7wbeAJxF9hT4AuBjZFNEvC91Ta2Q9G8R8UwujuOArbmngT8AfDEivpGmn6kkrweA1+xHnNZA3IKw8eQnZMmhkiDuym3fmeq8SdkKYGvJ5vJ/RdEHvQBvAL4ZEc+kdRSWAqelc90cEU8BVE3R8XHgsIj4wH4mB4CHI2JtROwE1pEtbBRk04VMT3V+D7gsTd39I7JJ546p+pwpwObc9l3A30j6a+DYiBhI8Q8C2ySV2sVmY5sThI0nlXGIE8i+4d5N1oJ4PfATSYcA/wScGxEnkPWzH1LjZ68HjpE0ccSjzr7xv7q6VbGP8vP/7Mxt72R3T4CAP8iNYxwTEfnV0QAGyFNs1U8AAAFXSURBVF2TiLiOrBUyACyXdEau7sHAb15AzDbOOUHYePITsi6jX6Q1DH4BtJEliZ+w+w/fU2mthyHvHqoWEc+SzfD6xdTVUpnp9J1VVf8dWCDpRZJ+C3hHKvsh8E5JL0nH5pPBLcBngO+W/I28C/hQZdxF0pyCOg+xu8VBmoxuQ0R8iWym0hNT+UuApyJie4nx2hjnBGHjyVqyu5furir7ZUQ8le74uZqsddFF9s19X/wtWfdLt6QHgO8A1QsJ3Ue2tvcKstlfvxIRqyNiHXAlcLuknwJfqDru5hTbsjStdhk+RbZk6BpJ69L2HtJ4xM8kHZ+KzgMeSN1SvwNcm8rfBHy3pDhtnPBsrmZNRtI7gFdHxN8OU2cpcFlEPDR6kdlY47uYzJpMRHyz0hVWJHWxdTo5mFsQZmZWyGMQZmZWyAnCzMwKOUGYmVkhJwgzMyvkBGFmZoX+C+AUNne7LWAdAAAAAElFTkSuQmCC\n" }, "metadata": { "needs_background": "light" @@ -744,86 +701,139 @@ "output_type": "stream", "name": "stderr", "text": [ - "[flaml.automl: 04-07 09:24:01] {890} INFO - Evaluation method: holdout\n", - "[flaml.automl: 04-07 09:24:01] {596} INFO - Using StratifiedKFold\n", - "[flaml.automl: 04-07 09:24:01] {911} INFO - Minimizing error metric: 1-accuracy\n", - "[flaml.automl: 04-07 09:24:01] {929} INFO - List of ML learners in AutoML Run: ['RGF', 'lgbm', 'rf', 'xgboost']\n", - "[flaml.automl: 04-07 09:24:01] {993} INFO - iteration 0, current learner RGF\n", + "[flaml.automl: 05-01 16:27:27] {890} INFO - Evaluation method: holdout\n", + "[flaml.automl: 05-01 16:27:27] {596} INFO - Using StratifiedKFold\n", + "[flaml.automl: 05-01 16:27:27] {911} INFO - Minimizing error metric: 1-accuracy\n", + "[flaml.automl: 05-01 16:27:27] {929} INFO - List of ML learners in AutoML Run: ['RGF', 'lgbm', 'rf', 'xgboost']\n", + "[flaml.automl: 05-01 16:27:27] {993} INFO - iteration 0, current learner RGF\n", "/home/dmx/miniconda2/envs/blend/lib/python3.8/site-packages/rgf/utils.py:225: UserWarning: Cannot find FastRGF executable files. FastRGF estimators will be unavailable for usage.\n", " warnings.warn(\"Cannot find FastRGF executable files. \"\n", - "[flaml.automl: 04-07 09:24:05] {1141} INFO - at 4.2s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", - "[flaml.automl: 04-07 09:24:05] {993} INFO - iteration 1, current learner RGF\n", - "[flaml.automl: 04-07 09:24:06] {1141} INFO - at 5.8s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", - "[flaml.automl: 04-07 09:24:06] {993} INFO - iteration 2, current learner RGF\n", - "[flaml.automl: 04-07 09:24:08] {1141} INFO - at 7.7s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", - "[flaml.automl: 04-07 09:24:08] {993} INFO - iteration 3, current learner RGF\n", - "[flaml.automl: 04-07 09:24:10] {1141} INFO - at 9.5s,\tbest RGF's error=0.3706,\tbest RGF's error=0.3706\n", - "[flaml.automl: 04-07 09:24:10] {993} INFO - iteration 4, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:10] {1141} INFO - at 9.6s,\tbest lgbm's error=0.3777,\tbest RGF's error=0.3706\n", - "[flaml.automl: 04-07 09:24:10] {993} INFO - iteration 5, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:10] {1141} INFO - at 9.7s,\tbest lgbm's error=0.3777,\tbest RGF's error=0.3706\n", - "[flaml.automl: 04-07 09:24:10] {993} INFO - iteration 6, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:10] {1141} INFO - at 9.7s,\tbest lgbm's error=0.3672,\tbest lgbm's error=0.3672\n", - "[flaml.automl: 04-07 09:24:10] {993} INFO - iteration 7, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:10] {1141} INFO - at 9.8s,\tbest lgbm's error=0.3672,\tbest lgbm's error=0.3672\n", - "[flaml.automl: 04-07 09:24:10] {993} INFO - iteration 8, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:10] {1141} INFO - at 9.9s,\tbest lgbm's error=0.3648,\tbest lgbm's error=0.3648\n", - "[flaml.automl: 04-07 09:24:10] {993} INFO - iteration 9, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:11] {1141} INFO - at 10.1s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:24:11] {993} INFO - iteration 10, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:11] {1141} INFO - at 10.1s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:24:11] {993} INFO - iteration 11, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:11] {1141} INFO - at 10.3s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:24:11] {993} INFO - iteration 12, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:11] {1141} INFO - at 10.4s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:24:11] {993} INFO - iteration 13, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:11] {1141} INFO - at 10.5s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:24:11] {993} INFO - iteration 14, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:11] {1141} INFO - at 10.6s,\tbest lgbm's error=0.3639,\tbest lgbm's error=0.3639\n", - "[flaml.automl: 04-07 09:24:11] {993} INFO - iteration 15, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:11] {1141} INFO - at 10.9s,\tbest lgbm's error=0.3555,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:24:11] {993} INFO - iteration 16, current learner RGF\n", - "[flaml.automl: 04-07 09:24:13] {1141} INFO - at 12.5s,\tbest RGF's error=0.3706,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:24:13] {993} INFO - iteration 17, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:13] {1141} INFO - at 12.8s,\tbest lgbm's error=0.3555,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:24:13] {993} INFO - iteration 18, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:13] {1141} INFO - at 13.0s,\tbest lgbm's error=0.3555,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:24:13] {993} INFO - iteration 19, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:14] {1141} INFO - at 13.3s,\tbest lgbm's error=0.3555,\tbest lgbm's error=0.3555\n", - "[flaml.automl: 04-07 09:24:14] {993} INFO - iteration 20, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:14] {1141} INFO - at 13.8s,\tbest lgbm's error=0.3524,\tbest lgbm's error=0.3524\n", - "[flaml.automl: 04-07 09:24:14] {993} INFO - iteration 21, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:15] {1141} INFO - at 14.2s,\tbest lgbm's error=0.3524,\tbest lgbm's error=0.3524\n", - "[flaml.automl: 04-07 09:24:15] {993} INFO - iteration 22, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:17] {1141} INFO - at 17.0s,\tbest lgbm's error=0.3498,\tbest lgbm's error=0.3498\n", - "[flaml.automl: 04-07 09:24:17] {993} INFO - iteration 23, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:20] {1141} INFO - at 20.0s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:20] {993} INFO - iteration 24, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:22] {1141} INFO - at 22.0s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:22] {993} INFO - iteration 25, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:23] {1141} INFO - at 22.6s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:23] {993} INFO - iteration 26, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:27] {1141} INFO - at 26.6s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:27] {993} INFO - iteration 27, current learner xgboost\n", - "[flaml.automl: 04-07 09:24:27] {1141} INFO - at 26.6s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:27] {993} INFO - iteration 28, current learner xgboost\n", - "[flaml.automl: 04-07 09:24:27] {1141} INFO - at 26.7s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:27] {993} INFO - iteration 29, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:28] {1141} INFO - at 27.6s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:28] {993} INFO - iteration 30, current learner RGF\n", - "[flaml.automl: 04-07 09:24:30] {1141} INFO - at 29.1s,\tbest RGF's error=0.3706,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:30] {993} INFO - iteration 31, current learner lgbm\n", - "[flaml.automl: 04-07 09:24:37] {1141} INFO - at 36.5s,\tbest lgbm's error=0.3487,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:37] {993} INFO - iteration 32, current learner xgboost\n", - "[flaml.automl: 04-07 09:24:37] {1141} INFO - at 36.6s,\tbest xgboost's error=0.3746,\tbest lgbm's error=0.3487\n", - "[flaml.automl: 04-07 09:24:37] {993} INFO - iteration 33, current learner lgbm\n", - "[flaml.automl: 04-07 09:25:04] {1141} INFO - at 63.6s,\tbest lgbm's error=0.3428,\tbest lgbm's error=0.3428\n", - "[flaml.automl: 04-07 09:25:04] {1187} INFO - selected model: LGBMClassifier(colsample_bytree=0.8589079860800738,\n", - " learning_rate=0.04261799404250151, max_bin=1023,\n", - " min_child_samples=99, n_estimators=318, num_leaves=27,\n", - " objective='binary', reg_alpha=0.054177182376811454,\n", - " reg_lambda=3.7064664547599495, subsample=0.9982731696185565)\n", - "[flaml.automl: 04-07 09:25:04] {944} INFO - fit succeeded\n" + "[flaml.automl: 05-01 16:27:30] {1141} INFO - at 2.8s,\tbest RGF's error=0.3840,\tbest RGF's error=0.3840\n", + "[flaml.automl: 05-01 16:27:30] {993} INFO - iteration 1, current learner RGF\n", + "[flaml.automl: 05-01 16:27:31] {1141} INFO - at 4.1s,\tbest RGF's error=0.3840,\tbest RGF's error=0.3840\n", + "[flaml.automl: 05-01 16:27:31] {993} INFO - iteration 2, current learner RGF\n", + "[flaml.automl: 05-01 16:27:32] {1141} INFO - at 5.3s,\tbest RGF's error=0.3840,\tbest RGF's error=0.3840\n", + "[flaml.automl: 05-01 16:27:32] {993} INFO - iteration 3, current learner RGF\n", + "[flaml.automl: 05-01 16:27:33] {1141} INFO - at 6.6s,\tbest RGF's error=0.3840,\tbest RGF's error=0.3840\n", + "[flaml.automl: 05-01 16:27:33] {993} INFO - iteration 4, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:33] {1141} INFO - at 6.7s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:27:33] {993} INFO - iteration 5, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:33] {1141} INFO - at 6.8s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:27:33] {993} INFO - iteration 6, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:34] {1141} INFO - at 6.8s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:27:34] {993} INFO - iteration 7, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:34] {1141} INFO - at 6.9s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:27:34] {993} INFO - iteration 8, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:34] {1141} INFO - at 7.0s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:27:34] {993} INFO - iteration 9, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:34] {1141} INFO - at 7.0s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 05-01 16:27:34] {993} INFO - iteration 10, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:34] {1141} INFO - at 7.2s,\tbest lgbm's error=0.3765,\tbest lgbm's error=0.3765\n", + "[flaml.automl: 05-01 16:27:34] {993} INFO - iteration 11, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:34] {1141} INFO - at 7.4s,\tbest lgbm's error=0.3686,\tbest lgbm's error=0.3686\n", + "[flaml.automl: 05-01 16:27:34] {993} INFO - iteration 12, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:34] {1141} INFO - at 7.5s,\tbest lgbm's error=0.3686,\tbest lgbm's error=0.3686\n", + "[flaml.automl: 05-01 16:27:34] {993} INFO - iteration 13, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:35] {1141} INFO - at 7.8s,\tbest lgbm's error=0.3611,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:27:35] {993} INFO - iteration 14, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:35] {1141} INFO - at 8.0s,\tbest lgbm's error=0.3611,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:27:35] {993} INFO - iteration 15, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:35] {1141} INFO - at 8.1s,\tbest lgbm's error=0.3611,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:27:35] {993} INFO - iteration 16, current learner RGF\n", + "[flaml.automl: 05-01 16:27:36] {1141} INFO - at 9.2s,\tbest RGF's error=0.3840,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 05-01 16:27:36] {993} INFO - iteration 17, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:36] {1141} INFO - at 9.5s,\tbest lgbm's error=0.3603,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:27:36] {993} INFO - iteration 18, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:37] {1141} INFO - at 9.8s,\tbest lgbm's error=0.3603,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:27:37] {993} INFO - iteration 19, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:37] {1141} INFO - at 10.2s,\tbest lgbm's error=0.3603,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:27:37] {993} INFO - iteration 20, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:37] {1141} INFO - at 10.5s,\tbest lgbm's error=0.3603,\tbest lgbm's error=0.3603\n", + "[flaml.automl: 05-01 16:27:37] {993} INFO - iteration 21, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:38] {1141} INFO - at 10.8s,\tbest lgbm's error=0.3518,\tbest lgbm's error=0.3518\n", + "[flaml.automl: 05-01 16:27:38] {993} INFO - iteration 22, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:38] {1141} INFO - at 11.0s,\tbest lgbm's error=0.3518,\tbest lgbm's error=0.3518\n", + "[flaml.automl: 05-01 16:27:38] {993} INFO - iteration 23, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:39] {1141} INFO - at 11.8s,\tbest lgbm's error=0.3504,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:27:39] {993} INFO - iteration 24, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:39] {1141} INFO - at 12.1s,\tbest lgbm's error=0.3504,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:27:39] {993} INFO - iteration 25, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:43] {1141} INFO - at 16.1s,\tbest lgbm's error=0.3504,\tbest lgbm's error=0.3504\n", + "[flaml.automl: 05-01 16:27:43] {993} INFO - iteration 26, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:48] {1141} INFO - at 21.0s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:48] {993} INFO - iteration 27, current learner xgboost\n", + "[flaml.automl: 05-01 16:27:48] {1141} INFO - at 21.0s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:48] {993} INFO - iteration 28, current learner xgboost\n", + "[flaml.automl: 05-01 16:27:48] {1141} INFO - at 21.1s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:48] {993} INFO - iteration 29, current learner RGF\n", + "[flaml.automl: 05-01 16:27:49] {1141} INFO - at 22.3s,\tbest RGF's error=0.3762,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:49] {993} INFO - iteration 30, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:53] {1141} INFO - at 25.9s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:53] {993} INFO - iteration 31, current learner lgbm\n", + "[flaml.automl: 05-01 16:27:59] {1141} INFO - at 32.2s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:59] {993} INFO - iteration 32, current learner xgboost\n", + "[flaml.automl: 05-01 16:27:59] {1141} INFO - at 32.2s,\tbest xgboost's error=0.3765,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:59] {993} INFO - iteration 33, current learner rf\n", + "[flaml.automl: 05-01 16:27:59] {1141} INFO - at 32.4s,\tbest rf's error=0.4052,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:59] {993} INFO - iteration 34, current learner rf\n", + "[flaml.automl: 05-01 16:27:59] {1141} INFO - at 32.6s,\tbest rf's error=0.4052,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:27:59] {993} INFO - iteration 35, current learner rf\n", + "[flaml.automl: 05-01 16:28:00] {1141} INFO - at 32.8s,\tbest rf's error=0.4012,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:00] {993} INFO - iteration 36, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:00] {1141} INFO - at 32.9s,\tbest xgboost's error=0.3746,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:00] {993} INFO - iteration 37, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:00] {1141} INFO - at 32.9s,\tbest xgboost's error=0.3689,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:00] {993} INFO - iteration 38, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:00] {1141} INFO - at 33.0s,\tbest xgboost's error=0.3689,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:00] {993} INFO - iteration 39, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:00] {1141} INFO - at 33.1s,\tbest xgboost's error=0.3617,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:00] {993} INFO - iteration 40, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:00] {1141} INFO - at 33.2s,\tbest xgboost's error=0.3610,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:00] {993} INFO - iteration 41, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:00] {1141} INFO - at 33.3s,\tbest xgboost's error=0.3610,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:00] {993} INFO - iteration 42, current learner RGF\n", + "[flaml.automl: 05-01 16:28:01] {1141} INFO - at 34.5s,\tbest RGF's error=0.3762,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:01] {993} INFO - iteration 43, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:01] {1141} INFO - at 34.6s,\tbest xgboost's error=0.3610,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:01] {993} INFO - iteration 44, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:01] {1141} INFO - at 34.7s,\tbest xgboost's error=0.3595,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:01] {993} INFO - iteration 45, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:02] {1141} INFO - at 34.8s,\tbest xgboost's error=0.3595,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:02] {993} INFO - iteration 46, current learner lgbm\n", + "[flaml.automl: 05-01 16:28:05] {1141} INFO - at 37.8s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:05] {993} INFO - iteration 47, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:05] {1141} INFO - at 38.0s,\tbest xgboost's error=0.3590,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:05] {993} INFO - iteration 48, current learner lgbm\n", + "[flaml.automl: 05-01 16:28:13] {1141} INFO - at 45.9s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:13] {993} INFO - iteration 49, current learner rf\n", + "[flaml.automl: 05-01 16:28:13] {1141} INFO - at 46.1s,\tbest rf's error=0.3926,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:13] {993} INFO - iteration 50, current learner xgboost\n", + "[flaml.automl: 05-01 16:28:13] {1141} INFO - at 46.3s,\tbest xgboost's error=0.3590,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:13] {993} INFO - iteration 51, current learner rf\n", + "[flaml.automl: 05-01 16:28:13] {1141} INFO - at 46.5s,\tbest rf's error=0.3926,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:13] {993} INFO - iteration 52, current learner rf\n", + "[flaml.automl: 05-01 16:28:13] {1141} INFO - at 46.7s,\tbest rf's error=0.3926,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:13] {993} INFO - iteration 53, current learner lgbm\n", + "[flaml.automl: 05-01 16:28:15] {1141} INFO - at 48.5s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:15] {993} INFO - iteration 54, current learner RGF\n", + "[flaml.automl: 05-01 16:28:16] {1141} INFO - at 49.6s,\tbest RGF's error=0.3762,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:16] {993} INFO - iteration 55, current learner rf\n", + "[flaml.automl: 05-01 16:28:17] {1141} INFO - at 49.8s,\tbest rf's error=0.3926,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:17] {993} INFO - iteration 56, current learner lgbm\n", + "[flaml.automl: 05-01 16:28:19] {1141} INFO - at 52.4s,\tbest lgbm's error=0.3427,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:23] {1164} INFO - retrain lgbm for 3.4s\n", + "[flaml.automl: 05-01 16:28:23] {993} INFO - iteration 57, current learner rf\n", + "[flaml.automl: 05-01 16:28:23] {1141} INFO - at 56.0s,\tbest rf's error=0.3926,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:26] {1164} INFO - retrain rf for 3.1s\n", + "[flaml.automl: 05-01 16:28:26] {993} INFO - iteration 58, current learner rf\n", + "[flaml.automl: 05-01 16:28:26] {1141} INFO - at 59.2s,\tbest rf's error=0.3926,\tbest lgbm's error=0.3427\n", + "[flaml.automl: 05-01 16:28:27] {1164} INFO - retrain rf for 1.0s\n", + "[flaml.automl: 05-01 16:28:27] {1187} INFO - selected model: LGBMClassifier(colsample_bytree=0.5793842857429541,\n", + " learning_rate=0.10431691413559704, max_bin=31,\n", + " min_child_samples=24, n_estimators=63, num_leaves=69,\n", + " objective='binary', reg_alpha=0.04072860923394475,\n", + " reg_lambda=1.6480344418782087, subsample=0.8895588746662894)\n", + "[flaml.automl: 05-01 16:28:27] {944} INFO - fit succeeded\n" ] } ], @@ -859,7 +869,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "flaml accuracy = 0.6704166234074426\n" + "flaml accuracy = 0.6720406982780357\n" ] } ], @@ -990,7 +1000,7 @@ "metadata": { "kernelspec": { "name": "python3", - "display_name": "Python 3.8.0 64-bit ('blend': conda)", + "display_name": "Python 3.8.0 64-bit", "metadata": { "interpreter": { "hash": "0cfea3304185a9579d09e0953576b57c8581e46e6ebc6dfeb681bc5a511f7544" @@ -1007,7 +1017,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.0" + "version": "3.8.0-final" } }, "nbformat": 4, diff --git a/notebook/flaml_azureml.ipynb b/notebook/flaml_azureml.ipynb index c9458fcc1f..a4cc64f9ea 100644 --- a/notebook/flaml_azureml.ipynb +++ b/notebook/flaml_azureml.ipynb @@ -191,7 +191,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "Best ML leaner: lgbm\nBest hyperparmeter config: {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}\nBest accuracy on validation data: 0.6229\nTraining duration of best run: 0.9182 s\n" + "Best ML leaner: lgbm\nBest hyperparmeter config: {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}\nBest accuracy on validation data: 0.6229\nTraining duration of best run: 1.288 s\n" ] } ], @@ -348,8 +348,8 @@ "output_type": "display_data", "data": { "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAfU0lEQVR4nO3deZwdVZ338c+XsItsJiiQQMJDgrugLQroCIxARDYVEVxGcYRxHByVkRFcGXiYFz64PDpGmcCg4ogIDMaIYGRkU9Z0ZE2YYAgoHdQ0IVGWSEj4zh9VDZemurvS6erb3fm+X6/76ltVp+79nXT6/u45p+oc2SYiIqK3DdodQEREjExJEBERUSkJIiIiKiVBREREpSSIiIiolAQRERGVkiAiBkHSGyUtbHccEU1KgohRR9L9kt7czhhs/9L2bk29vqSDJF0n6RFJ3ZKulXRYU+8XUSUJIqKCpHFtfO8jgYuB84GJwAuBzwOHDuK1JCl/5zEo+Y8TY4akDSSdLOleScskXSRp25bjF0v6g6Q/ld/OX9Zy7DuSviXpckmPAfuVLZVPSrqjPOeHkjYty+8rqavl/D7Llsf/WdLvJT0o6UOSLGnXijoI+Apwuu1zbf/J9lO2r7V9XFnmVEn/2XLO5PL1Niy3r5F0hqTrgceBkyR19nqfT0iaXT7fRNKXJP1O0h8lnS1ps3X8dcQYkAQRY8lHgSOANwE7AMuBGS3HrwCmAtsBvwa+3+v8dwNnAM8HflXuOwqYDkwBXgl8oJ/3rywraTpwIvBmYFdg335eYzdgEnBJP2XqeB9wPEVdzgZ2kzS15fi7gQvK52cC04Ddy/h2pGixxHouCSLGkg8Dn7HdZfsJ4FTgyJ5v1rbPs/1Iy7FXSdqq5fwf276+/Mb+l3Lf120/aPth4CcUH6J96avsUcC3bc+3/Xj53n15Qfnz93Ur3YfvlO+32vafgB8DxwCUieLFwOyyxXI88AnbD9t+BPhX4Oh1fP8YA5IgYizZGfiRpBWSVgB3A2uAF0oaJ+nMsvvpz8D95TnjW85/oOI1/9Dy/HFgi37ev6+yO/R67ar36bGs/Ll9P2Xq6P0eF1AmCIrWw6wyWU0ANgfmtfy7/azcH+u5JIgYSx4A3mJ765bHpraXUHwoHk7RzbMVMLk8Ry3nNzW18e8pBpt7TOqn7EKKeryjnzKPUXyo93hRRZnedbkSmCBpd4pE0dO99BCwEnhZy7/ZVrb7S4SxnkiCiNFqI0mbtjw2pOhrP0PSzgCSJkg6vCz/fOAJim/om1N0owyXi4BjJb1E0ubA5/oq6GL+/ROBz0k6VtKW5eD7GyTNLIvdBvyVpJ3KLrJTBgrA9pMUV0adBWxLkTCw/RRwDvBVSdsBSNpR0kGDrm2MGUkQMVpdTvHNt+dxKvA1YDbwc0mPADcBryvLnw/8FlgCLCiPDQvbVwBfB64GFrW89xN9lL8EeBfwQeBB4I/A/6UYR8D2lcAPgTuAecBlNUO5gKIFdbHt1S37P9UTV9n99t8Ug+WxnlMWDIoYXpJeAtwFbNLrgzpiREkLImIYSHpbeb/BNsAXgZ8kOcRIlwQRMTz+DlgK3EtxZdXftzeciIGliykiIiqlBREREZU2bHcAQ2X8+PGePHlyu8OIiBhV5s2b95Dtyhsjx0yCmDx5Mp2dnQMXjIiIp0n6bV/H0sUUERGVkiAiIqJSEkRERFRKgoiIiEpJEBERUSkJIiIiKiVBREREpSSIiIiolAQRERGVkiAiIqJSEkRERFRKgoiIiEpJEBERUSkJIiIiKiVBREREpSSIiIiolAQRERGVkiAiIqJSowlC0nRJCyUtknRyH2WOkrRA0nxJF7TsXyPptvIxu8k4IyLiuRpbk1rSOGAGcADQBcyVNNv2gpYyU4FTgH1sL5e0XctLrLS9e1PxRURE/5psQewJLLK92PYq4ELg8F5ljgNm2F4OYHtpg/FERMRaaDJB7Ag80LLdVe5rNQ2YJul6STdJmt5ybFNJneX+I6reQNLxZZnO7u7uoY0+ImI911gX01q8/1RgX2AicJ2kV9heAexse4mkXYCrJN1p+97Wk23PBGYCdHR0eHhDj4gY25psQSwBJrVsTyz3teoCZtt+0vZ9wD0UCQPbS8qfi4FrgD0ajDUiInppMkHMBaZKmiJpY+BooPfVSLMoWg9IGk/R5bRY0jaSNmnZvw+wgIiIGDaNdTHZXi3pBGAOMA44z/Z8SacBnbZnl8cOlLQAWAOcZHuZpL2Bf5f0FEUSO7P16qeIiGie7LHRdd/R0eHOzs52hxERMapImme7o+pY7qSOiIhKSRAREVEpCSIiIiolQURERKUkiIiIqJQEERERlZIgIiKiUhJERERUSoKIiIhKSRAREVEpCSIiIiolQURERKUkiIiIqJQEERERlZIgIiKiUhJERERUSoKIiIhKSRAREVEpCSIiIiolQURERKUkiIiIqJQEERERlZIgIiKiUqMJQtJ0SQslLZJ0ch9ljpK0QNJ8SRf0OralpC5J32gyzoiIeK4Nm3phSeOAGcABQBcwV9Js2wtaykwFTgH2sb1c0na9XuZ04LqmYoyIiL412YLYE1hke7HtVcCFwOG9yhwHzLC9HMD20p4Dkl4DvBD4eYMxRkREH5pMEDsCD7Rsd5X7Wk0Dpkm6XtJNkqYDSNoA+DLwyf7eQNLxkjoldXZ3dw9h6BER0e5B6g2BqcC+wDHAOZK2Bj4CXG67q7+Tbc+03WG7Y8KECY0HGxGxPmlsDAJYAkxq2Z5Y7mvVBdxs+0ngPkn3UCSMvYA3SvoIsAWwsaRHbVcOdEdExNBrsgUxF5gqaYqkjYGjgdm9ysyiaD0gaTxFl9Ni2++xvZPtyRTdTOcnOUREDK/GEoTt1cAJwBzgbuAi2/MlnSbpsLLYHGCZpAXA1cBJtpc1FVNERNQn2+2OYUh0dHS4s7Oz3WFERIwqkubZ7qg6NmALQtILhj6kiIgY6ep0Md0k6WJJB0tS4xFFRMSIUCdBTANmAu8DfiPpXyVNazasiIhotwEThAtX2j6G4s7n9wO3SLpW0l6NRxgREW0x4H0Q5RjEeylaEH8EPkpxueruwMXAlCYDjIiI9qhzo9yNwPeAI3rd2dwp6exmwoqIiHarkyB2cx/Xwtr+4hDHExERI0SdQeqfl/MjASBpG0lzGowpIiJGgDoJYoLtFT0b5dTcvddtiIiIMaZOglgjaaeeDUk7A2Pj9uuIiOhTnTGIzwC/knQtIOCNwPGNRhUREW03YIKw/TNJrwZeX+76uO2Hmg0rIiLare56EGuApcCmwEslYTtrRUdEjGF1bpT7EPAxigV/bqNoSdwI7N9saBER0U51Bqk/BrwW+K3t/YA9gBX9nxIREaNdnQTxF9t/AZC0ie3/AXZrNqyIiGi3OmMQXeWNcrOAKyUtB37bbFgREdFuda5ielv59FRJVwNbAT9rNKqIiGi7fhOEpHHAfNsvBrB97bBEFRERbdfvGITtNcDC1jupIyJi/VBnDGIbYL6kW4DHenbaPqyxqCIiou3qJIjPNR5FRESMOHUGqQc97iBpOvA1YBxwru0zK8ocBZxKMQHg7bbfXU4I+COKLrCNgH+zncWJIiKGUZ07qR/hmdlbN6b4wH7M9pYDnDcOmAEcAHQBcyXNtr2gpcxU4BRgH9vLJfVMI/57YC/bT0jaArirPPfBtaxfREQMUp0WxPN7nksScDjPTNzXnz2BRbYXl+deWJ67oKXMccCMco0JbC8tf65qKbMJ9W7oi4iIIbRWH7wuzAIOqlF8R+CBlu2ucl+racA0SddLuqnskgJA0iRJd5Sv8cWq1oOk4yV1Surs7u5em6pERMQA6nQxvb1lcwOgA/jLEL7/VGBfiskAr5P0CtsrbD8AvFLSDsAsSZfY/mPrybZnAjMBOjo6sohRRMQQqnMV06Etz1cD91N0FQ1kCTCpZXtiua9VF3Cz7SeB+yTdQ5Ew5vYUsP2gpLsoFiq6pMb7RkTEEKgzBnHsIF97LjBV0hSKxHA08O5eZWYBxwDfljSeostpsaSJwDLbKyVtA7wB+Oog44iIiEEYcAxC0nfLyfp6treRdN5A59leDZwAzAHuBi6yPV/SaZJ6brKbAyyTtAC4GjjJ9jLgJcDNkm4HrgW+ZPvOta1cREQMnuz+u+4l3Wp7j4H2tVtHR4c7OzvbHUZExKgiaZ7tjqpjda5i2qDs5ul5sW2pv1RpRESMUnU+6L8M3Cjp4nL7ncAZzYUUEREjQZ1B6vMldfLMGtRvb70bOiIixqY690G8nmJNiG+U21tKep3tmxuPLiIi2qbOGMS3gEdbth8t90VExBhWJ0HILZc62X6KDFJHRIx5dRLEYkn/KGmj8vExYHHTgUVERHvVSRAfBvamuBu6C3gdxSysERExhtW5imkpxTQZAEjaDDgEuLjPkyIiYtSrNd23pHGSDpb0PeA+4F3NhhUREe3WbwtC0psoJtg7GLgF2AfYxfbjwxBbRES0UZ8JQlIX8DuKS1o/afsRSfclOURErB/662K6BNiBojvpUEnP45m1qSMiYozrM0HY/jgwhWIupn2BhcAESUdJ2mJ4wouIiHbpdwyivEHuauBqSRtRrEV9DPBNYHzz4UWMbrNuXcJZcxby4IqV7LD1Zpx00G4csUfvpdkjRqbad0SXy4JeBlxWXuoaEf2YdesSTrn0TlY+uQaAJStWcsqlxbpXSRIxGtS6zLU32yuHOpCIseasOQufTg49Vj65hrPmLGxTRBFrZ1AJIiIG9uCK6u9Rfe2PGGmSICIassPW1T2xfe2PGGkGTBCSpkk6R9LPJV3V8xiO4CJGs5MO2o3NNhr3rH2bbTSOkw7arU0RRaydOoPUFwNnA+cAawYoGxGlnoHoXMUUo1WdBLHadhYIihiEI/bYMQkhRq06YxA/kfQRSdtL2rbn0XhkERHRVnUSxPuBk4AbgHnlo7POi0uaLmmhpEWSTu6jzFGSFkiaL+mCct/ukm4s990hKbPHRkQMszrrQUwZzAtLGgfMAA6gWGhorqTZthe0lJkKnALsY3u5pO3KQ48Df2P7N5J2AOZJmmN7xWBiiYiItTdggiin2Ph74K/KXdcA/17eWd2fPYFFtheXr3MhcDiwoKXMccAM28vh6cWJsH1PTwHbD0paCkwAkiAiIoZJnS6mbwGvoZh/6Zvl8zqD1jsCD7Rsd5X7Wk0Dpkm6XtJNkqb3fhFJewIbA/dWHDteUqekzu7u7hohRUREXXWuYnqt7Ve1bF8l6fYhfP+pFLPFTgSuk/SKnq4kSdsD3wPeb/up3ifbngnMBOjo6MhU5BERQ6hOC2KNpP/TsyFpF+rdD7EEmNSyPbHc16oLmG37Sdv3AfdQJAwkbQn8FPiM7ZtqvF9ERAyhOgniJIrpvq+RdC1wFfBPNc6bC0yVNEXSxsDRwOxeZWZRtB6QNJ6iy2lxWf5HwPm2L6lVk4iIGFJ1rmL6RXm1Uc/8AAttP1HjvNWSTgDmAOOA82zPl3Qa0Gl7dnnsQEkLKFolJ9leJum9FIPiL5D0gfIlP2D7trWtYEREDI6KNYEqDkj7275K0turjtu+tNHI1lJHR4c7O2vdnhERESVJ82x3VB3rrwXxJorupEMrjhkYUQkiIiKGVp8JwvYXyqenlQPIT5M0qJvnIiJi9KgzSP1fFfsycBwRMcb12YKQ9GLgZcBWvcYhtgQ2bTqwiIhor/7GIHYDDgG25tnjEI9QTJERERFjWH9jED8GfixpL9s3DmNMERExAtSZauNWSf9A0d30dNeS7Q82FlVERLRdnUHq7wEvAg4CrqWYMuORJoOKiIj2q5MgdrX9OeAx298F3gq8rtmwIiKi3eokiJ51H1ZIejmwFbBdP+UjImIMqDMGMVPSNsDnKCbb2wL4fKNRRURE29WZrO/c8um1wC7NhhMRESNFfzfKndjfiba/MvThRETESNFfC+L55c/dgNfyzFoOhwK3NBlURES0X383yv0LgKTrgFfbfqTcPpVipbeIiBjD6lzF9EJgVcv2qnJfRESMYXWuYjofuEXSj8rtI4DvNBZRRESMCHWuYjpD0hXAG8tdx9q+tdmwIiKi3fq7imlL23+WtC1wf/noObat7YebDy8iItqlvxbEBRTTfc+jWGK0h8rt3BMRETGG9XcV0yHlzywvGhGxHuqvi+nV/Z1o+9dDH05ERIwU/XUxfbmfYwb2H+JYIiJiBOmvi2m/dX1xSdOBrwHjgHNtn1lR5ijgVIqkc7vtd5f7fwa8HvhVT3dXREQMnzr3QVBO8/1Snr2i3PkDnDMOmAEcAHQBcyXNtr2gpcxU4BRgH9vLJbVOI34WsDnwdzXrEhERQ2jAO6klfQH4t/KxH/D/gMNqvPaewCLbi22vAi4EDu9V5jhghu3lALaX9hyw/Quycl1ERNvUmWrjSOCvgT/YPhZ4FcWiQQPZEXigZbur3NdqGjBN0vWSbiq7pGqTdLykTkmd3d3da3NqREQMoE6CWGn7KWC1pC2BpcCkIXr/DYGpwL7AMcA5kraue7LtmbY7bHdMmDBhiEKKiAioNwbRWX5on0Nx09yjwI01zlvCsxPJxHJfqy7gZttPAvdJuociYcyt8foREdGgPlsQkmZI2sf2R2yvsH02xYDz+8uupoHMBaZKmiJpY+BonllToscsitYDksZTdDktHkQ9IiJiiPXXgrgH+JKk7YGLgB+szSR9tldLOgGYQ3GZ63m250s6Dei0Pbs8dqCkBcAa4CTbywAk/RJ4MbCFpC7gb23PGUQdIyJiEGS7/wLSzhTf/o8GNgN+QJEs7mk+vPo6Ojrc2dnZ7jAiIkYVSfNsd1QdG3CQ2vZvbX/R9h4UA8lHAHcPcYwRETHC1LkPYkNJh0r6PnAFsBB4e+ORRUREW/U3Wd8BFC2Gg4FbKG50O972Y8MUW0REtFF/g9SnUKwJ8U89dzpHRMT6o7/J+jJba0TEeqzOndQREbEeSoKIiIhKSRAREVEpCSIiIiolQURERKUkiIiIqJQEERERlZIgIiKiUhJERERUSoKIiIhKSRAREVEpCSIiIiolQURERKUkiIiIqJQEERERlZIgIiKiUhJERERUSoKIiIhKjSYISdMlLZS0SNLJfZQ5StICSfMlXdCy//2SflM+3t9knBER8Vx9rkm9riSNA2YABwBdwFxJs20vaCkzFTgF2Mf2cknblfu3Bb4AdAAG5pXnLm8q3oiIeLYmWxB7AotsL7a9CrgQOLxXmeOAGT0f/LaXlvsPAq60/XB57EpgeoOxRkREL00miB2BB1q2u8p9raYB0yRdL+kmSdPX4lwkHS+pU1Jnd3f3EIYeERHtHqTeEJgK7AscA5wjaeu6J9ueabvDdseECRMaCjEiYv3UZIJYAkxq2Z5Y7mvVBcy2/aTt+4B7KBJGnXMjIqJBTSaIucBUSVMkbQwcDczuVWYWResBSeMpupwWA3OAAyVtI2kb4MByX0REDJPGrmKyvVrSCRQf7OOA82zPl3Qa0Gl7Ns8kggXAGuAk28sAJJ1OkWQATrP9cFOxRkTEc8l2u2MYEh0dHe7s7Gx3GBERo4qkebY7qo61e5A6IiJGqCSIiIiolAQRERGVkiAiIqJSEkRERFRKgoiIiEpJEBERUSkJIiIiKiVBREREpSSIiIiolAQRERGVkiAiIqJSEkRERFRKgoiIiEpJEBERUSkJIiIiKiVBREREpSSIiIiolAQRERGVkiAiIqJSEkRERFRKgoiIiEqNJghJ0yUtlLRI0skVxz8gqVvSbeXjQy3HvijprvLxribjjIiI59qwqReWNA6YARwAdAFzJc22vaBX0R/aPqHXuW8FXg3sDmwCXCPpCtt/bireiIh4tiZbEHsCi2wvtr0KuBA4vOa5LwWus73a9mPAHcD0huKMiIgKTSaIHYEHWra7yn29vUPSHZIukTSp3Hc7MF3S5pLGA/sBk3qfKOl4SZ2SOru7u4c6/oiI9Vq7B6l/Aky2/UrgSuC7ALZ/DlwO3AD8ALgRWNP7ZNszbXfY7pgwYcLwRR0RsR5oMkEs4dnf+ieW+55me5ntJ8rNc4HXtBw7w/butg8ABNzTYKwREdFLkwliLjBV0hRJGwNHA7NbC0javmXzMODucv84SS8on78SeCXw8wZjjYiIXhq7isn2akknAHOAccB5tudLOg3otD0b+EdJhwGrgYeBD5SnbwT8UhLAn4H32l7dVKwREfFcst3uGIaEpG7gt+2OYx2MBx5qdxBDIPUYecZKXcZKPWBk1WVn25WDuGMmQYx2kjptd7Q7jnWVeow8Y6UuY6UeMHrq0u6rmCIiYoRKgoiIiEpJECPHzHYHMERSj5FnrNRlrNQDRkldMgYRERGV0oKIiIhKSRAREVEpCaJBks6TtFTSXX0cf7GkGyU9IemTvY5tXU5g+D+S7pa01/BEXW0d6/IJSfPLtT1+IGnT4Ym6Ms6B6vGecvLIOyXdIOlVLcf6Xd9kuA22LpImSbpa0oLy9/Kx4Y38OXEO+ndSHh8n6VZJlw1PxNXW8f/WiPp7f5rtPBp6AH9Fsa7FXX0c3w54LXAG8Mlex74LfKh8vjGw9WisC8UMvvcBm5XbFwEfGMH12BvYpnz+FuDm8vk44F5gl/L3cTvw0hH+O+mrLtsDry6fP59inrO21WWw9Wg5fiJwAXDZaPx9lNsj6u+955EWRINsX0cxhUhfx5fangs82bpf0lYU/9n+oyy3yvaKJmMdyGDrUtoQ2EzShsDmwIPNRDmwGvW4wfbycvMmikkmYd3WN2nEYOti+/e2f10+f4RiDrSqqfiHxTr8TpA0EXgrxWSfbTXYeozEv/ceSRAj0xSgG/h22XQ+V9Lz2h3UYNheAnwJ+B3we+BPLqZzHw3+FriifF53fZORqrUuT5M0GdgDuHmY4xms3vX4/8A/A0+1J5xBa63HiP17T4IYmTakaKp+y/YewGNA2/u8B0PSNhTftKcAOwDPk/Te9kY1MEn7UfwRf6rdsayrvuoiaQvgv4CPexQs59u7HpIOAZbantfWwNZSxe9jxP69J0GMTF1Al+2eb3WXUPwHGo3eDNxnu9v2k8ClFH2xI1Y5xfy5wOG2l5W7B1zfZCTqoy5I2ogiOXzf9qXtiq+uPuqxD3CYpPspuvz2l/SfbQqxlj7qMWL/3pMgRiDbfwAekLRbueuvgQVtDGld/A54fbl8rCjqcnebY+qTpJ0oktj7bLcuUjXg+iYjTV91KX8P/wHcbfsr7Yqvrr7qYfsU2xNtT6b4fVxle8S2Tvupx4j9e8+d1A2S9ANgX4qpff8IfIFirQtsny3pRUAnsCVFH+qjFFeT/FnS7hTfNDYGFgPHtgxwDbt1rMu/AO+iWPfjVoqrNZ54zpsMgxr1OBd4B89MHb/a5aybkg6m6PPuWd/kjOGN/tkGWxdJbwB+CdzJM333n7Z9+TCG/7R1+Z20vMa+FFfPHTJMYT/HOv7fGlF/7z2SICIiolK6mCIiolISREREVEqCiIiISkkQERFRKQkiIiIqJUHEqCDpq5I+3rI9p7xssGf7y5JO7Of870g6snx+jaTnLBgvaSNJZ0r6jaRfq5id9i3lsfsljR9E3E+/bx/HZ0i6rZxZdWX5/DZJR0q6XNLWa/ueNWLavr+ZTyVtLOm6cu6sWI8lQcRocT3lHdiSNqC41vxlLcf3Bm5Yx/c4nWKm05fbfjVwBMVsp42x/Q+2dwcOBu61vXv5uMT2wQ1N2nYicE4/Ma0CfkFx70qsx5IgYrS4AeiZI/9lwF3AI5K2kbQJ8BLg15I+L2muirUnZpZ3DQ9I0ubAccBHe27is/1H2xdVlD2xfP27erVq/qac7/92Sd+rOO/0skUxrmZM90saL2lyuU7AdyTdI+n7kt4s6fqytbNnWf55KtYkuKWc9K2v2WbfAfysPOdlZfnbytinlmVmAe+pE2eMXWlCxqhg+0FJq8vpCvYGbqSYTXUv4E/AnbZXSfqG7dMAyg/pQ4Cf1HiLXYHfDTRpnaTXAMcCrwME3CzpWmAV8Flgb9sPSdq213lnUbRGjvXg7k7dFXgn8EGKaT/eDbwBOAz4NEVr5zMU0018sOyaukXSf9t+rCWOKcDyljvZPwx8zfb3yylEepLXXRTre8R6LC2IGE1uoEgOPQnixpbt68sy+0m6WdKdwP48uxtqKLwB+JHtx2w/SjG3zhvL97rY9kMAtlvXBfgcsJXtDw8yOUAx4eGdtp8C5gO/KF/rTmByWeZA4GRJtwHXAJsCO/V6ne0pppbucSPwaUmfAna2vbKMfw2wSlKjXWwxsiVBxGjSMw7xCopvuDdRtCD2Bm5QsZTpN4Ejbb+Cop+97vKmi4CdJG055FEX3/hf07tVsZZa5656qmX7KZ7pCRDwjpZxjJ1s954YcSUt/ya2L6BohawELpe0f0vZTYC/rEPMMcolQcRocgNFl9HDtteU39K3pkgSN/DMB99DKtY66PPqod5sP04xw+nXyq4WJE2Q9M5eRX8JHKFidtrnAW8r910FvFPSC8pzW5PBz4AzgZ82/I18DvDRnnEXSXtUlLmHZ1ocSNoFWGz768CPgVeW+18APFRO0R7rqSSIGE3upLh66aZe+/5k+6Hyip9zKFoXcyi+ua+Nz1J0vyxQsfD8ZcCzxiTKpTq/A9xCsQrbubZvtT2fYj3uayXdDnyl13kXl7HNlrTZWsZV1+kUs4feIWl+uf0s5XjEvZJ2LXcdBdxVdku9HDi/3L8f8NOG4oxRIrO5RqxnJL0NeI3tz/ZT5lLg5F5rYsR6JlcxRaxnbP+opyusStnFNivJIdKCiIiIShmDiIiISkkQERFRKQkiIiIqJUFERESlJIiIiKj0v8h+bLSgfeIMAAAAAElFTkSuQmCC\n" + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3de5xdVX338c+XcJc7CZSbXGqCSrUgI17wArZApCKoiGBtEVuotVitladQa8uDD31hrfbRmmoDpQiVIlCI0YqBys0CgUzkmtCEEFAmIAmQKCICCd/+sdeBzWHP5CTMmTOX7/v1Oq85e+219vmdPTPnd/bae68l20RERLTboNcBRETE6JQEERERjZIgIiKiURJEREQ0SoKIiIhGSRAREdEoCSJiPUh6q6RFvY4jopuSIGLMkXS/pN/uZQy2f2h7725tX9Jhkq6X9LikFZKuk/Tubr1eRJMkiIgGkib18LWPBi4Bzgd2BXYE/ho4Yj22JUn5P4/1kj+cGDckbSDpVEn3SnpU0sWStqutv0TSTyX9rHw736e27jxJX5P0PUlPAAeXI5VPS7qjtPmWpE1L/YMkDdTaD1q3rP8/kh6S9KCkP5RkSa9oeA8CvgR8zvY5tn9m+1nb19k+sdQ5XdK/1drsUba3YVm+VtKZkm4AfgmcIqm/7XX+TNLs8nwTSX8v6SeSHpb0dUmbvcRfR4wDSRAxnnwcOAp4O7AzsBKYUVt/BTAV2AH4EfDNtvYfBM4EtgT+u5QdA0wH9gReC3x4iNdvrCtpOvAp4LeBVwAHDbGNvYHdgEuHqNOJ3wNOonovXwf2ljS1tv6DwIXl+VnANGDfEt8uVEcsMcElQcR48lHgM7YHbD8FnA4c3fpmbftc24/X1v2mpK1r7b9t+4byjf1Xpewrth+0/RjwHaoP0cEMVvcY4F9tL7D9y/Lag9m+/Hyo0zc9iPPK6622/TPg28BxACVRvBKYXY5YTgL+zPZjth8H/hY49iW+fowDSRAxnuwOXC5plaRVwN3AGmBHSZMknVW6n34O3F/aTK61f6Bhmz+tPf8lsMUQrz9Y3Z3btt30Oi2Plp87DVGnE+2vcSElQVAdPcwqyWoKsDkwv7bfvl/KY4JLgojx5AHgnba3qT02tb2M6kPxSKpunq2BPUob1dp3a2jjh6hONrfsNkTdRVTv431D1HmC6kO95dca6rS/l6uAKZL2pUoUre6lR4AngX1q+2xr20MlwpggkiBirNpI0qa1x4ZUfe1nStodQNIUSUeW+lsCT1F9Q9+cqhtlpFwMnCDpVZI2Bz47WEVX4+9/CvispBMkbVVOvr9F0sxS7TbgbZJeXrrITltbALafoboy6gvAdlQJA9vPAmcD/yBpBwBJu0g6bL3fbYwbSRAxVn2P6ptv63E68GVgNnClpMeBucAbSv3zgR8Dy4CFZd2IsH0F8BXgGmBJ7bWfGqT+pcAHgI8ADwIPA/+P6jwCtq8CvgXcAcwHvtthKBdSHUFdYnt1rfwvWnGV7rf/ojpZHhOcMmFQxMiS9CrgLmCTtg/qiFElRxARI0DSe8r9BtsCnwe+k+QQo10SRMTI+CNgOXAv1ZVVf9zbcCLWLl1MERHRKEcQERHRaMNeBzBcJk+e7D322KPXYUREjCnz589/xHbjjZHjJkHsscce9Pf3r71iREQ8R9KPB1uXLqaIiGiUBBEREY2SICIiolESRERENEqCiIiIRkkQERHRKAkiIiIaJUFERESjJIiIiGiUBBEREY2SICIiolESRERENEqCiIiIRkkQERHRKAkiIiIaJUFERESjJIiIiGiUBBEREY26miAkTZe0SNISSacOUucYSQslLZB0Ya18jaTbymN2N+OMiIgX69qc1JImATOAQ4ABYJ6k2bYX1upMBU4DDrS9UtIOtU08aXvfbsUXERFD6+YRxAHAEttLbT8NXAQc2VbnRGCG7ZUAtpd3MZ6IiFgH3UwQuwAP1JYHSlndNGCapBskzZU0vbZuU0n9pfyopheQdFKp079ixYrhjT4iYoLrWhfTOrz+VOAgYFfgekmvsb0K2N32Mkl7AVdLutP2vfXGtmcCMwH6+vo8sqFHRIxv3TyCWAbsVlvetZTVDQCzbT9j+z5gMVXCwPay8nMpcC2wXxdjjYiINt1MEPOAqZL2lLQxcCzQfjXSLKqjByRNpupyWippW0mb1MoPBBYSEREjpmtdTLZXSzoZmANMAs61vUDSGUC/7dll3aGSFgJrgFNsPyrpzcA/S3qWKomdVb/6KSIiuk/2+Oi67+vrc39/f6/DiIgYUyTNt93XtC53UkdERKMkiIiIaJQEERERjZIgIiKiURJEREQ0SoKIiIhGSRAREdEoCSIiIholQURERKMkiIiIaJQEERERjZIgIiKiURJEREQ0SoKIiIhGSRAREdEoCSIiIholQURERKMkiIiIaJQEERERjZIgIiKiURJEREQ0SoKIiIhGSRAREdGoqwlC0nRJiyQtkXTqIHWOkbRQ0gJJF7at20rSgKSvdjPOiIh4sQ27tWFJk4AZwCHAADBP0mzbC2t1pgKnAQfaXilph7bNfA64vlsxRkTE4Lp5BHEAsMT2UttPAxcBR7bVORGYYXslgO3lrRWS9gd2BK7sYowRETGIbiaIXYAHassDpaxuGjBN0g2S5kqaDiBpA+CLwKeHegFJJ0nql9S/YsWKYQw9IiJ6fZJ6Q2AqcBBwHHC2pG2AjwHfsz0wVGPbM2332e6bMmVK14ONiJhIunYOAlgG7FZb3rWU1Q0AN9t+BrhP0mKqhPEm4K2SPgZsAWws6Re2G090R0TE8OvmEcQ8YKqkPSVtDBwLzG6rM4vq6AFJk6m6nJba/l3bL7e9B1U30/lJDhERI6trCcL2auBkYA5wN3Cx7QWSzpD07lJtDvCopIXANcApth/tVkwREdE52e51DMOir6/P/f39vQ4jImJMkTTfdl/TurUeQUjafvhDioiI0a6TLqa5ki6RdLgkdT2iiIgYFTpJENOAmcDvAfdI+ltJ07obVkRE9NpaE4QrV9k+jurO5+OBWyRdJ+lNXY8wIiJ6Yq33QZRzEB+iOoJ4GPg41eWq+wKXAHt2M8CIiOiNTm6Uuwm4ADiq7c7mfklf705YERHRa50kiL09yLWwtj8/zPFERMQo0clJ6ivL+EgASNpW0pwuxhQREaNAJwliiu1VrYUyNHf7vA0RETHOdJIg1kh6eWtB0u7A+Lj9OiIiBtXJOYjPAP8t6TpAwFuBk7oaVURE9NxaE4Tt70t6HfDGUvRJ2490N6yIiOi1TueDWAMsBzYFXi0J25krOiJiHOvkRrk/BD5BNeHPbVRHEjcB7+huaBER0UudnKT+BPB64Me2Dwb2A1YN3SQiIsa6ThLEr2z/CkDSJrb/B9i7u2FFRESvdXIOYqDcKDcLuErSSuDH3Q0rIiJ6rZOrmN5Tnp4u6Rpga+D7XY0qIiJ6bsgEIWkSsMD2KwFsXzciUUVERM8NeQ7C9hpgUf1O6oiImBg6OQexLbBA0i3AE61C2+/uWlQREdFznSSIz3Y9ioiIGHU6OUm93ucdJE0HvgxMAs6xfVZDnWOA06kGALzd9gfLgICXU3WBbQT8o+1MThQRMYI6uZP6cZ4fvXVjqg/sJ2xvtZZ2k4AZwCHAADBP0mzbC2t1pgKnAQfaXimpNYz4Q8CbbD8laQvgrtL2wXV8fxERsZ46OYLYsvVckoAjeX7gvqEcACyxvbS0vai0XVircyIwo8wxge3l5efTtTqb0NkNfRERMYzW6YPXlVnAYR1U3wV4oLY8UMrqpgHTJN0gaW7pkgJA0m6S7ijb+HzT0YOkkyT1S+pfsWLFuryViIhYi066mN5bW9wA6AN+NYyvPxU4iGowwOslvcb2KtsPAK+VtDMwS9Klth+uN7Y9E5gJ0NfXl0mMIiKGUSdXMR1Re74auJ+qq2htlgG71ZZ3LWV1A8DNtp8B7pO0mCphzGtVsP2gpLuoJiq6tIPXjYiIYdDJOYgT1nPb84CpkvakSgzHAh9sqzMLOA74V0mTqbqclkraFXjU9pOStgXeAvzDesYRERHrYa3nICR9owzW11reVtK5a2tnezVwMjAHuBu42PYCSWdIat1kNwd4VNJC4BrgFNuPAq8CbpZ0O3Ad8Pe271zXNxcREetP9tBd95Jutb3f2sp6ra+vz/39/b0OIyJiTJE033Zf07pOrmLaoHTztDa2HZ1PVRoREWNUJx/0XwRuknRJWX4/cGb3QoqIiNGgk5PU50vq5/k5qN9bvxs6IiLGp07ug3gj1ZwQXy3LW0l6g+2bux5dRET0TCfnIL4G/KK2/ItSFhER41gnCUKuXepk+1lykjoiYtzrJEEslfSnkjYqj08AS7sdWERE9FYnCeKjwJup7oYeAN5ANQprRESMY51cxbScapgMACRtBrwLuGTQRhERMeZ1NNy3pEmSDpd0AXAf8IHuhhUREb025BGEpLdTDbB3OHALcCCwl+1fjkBsERHRQ4MmCEkDwE+oLmn9tO3HJd2X5BARMTEM1cV0KbAzVXfSEZJexvNzU0dExDg3aIKw/UlgT6qxmA4CFgFTJB0jaYuRCS8iInplyHMQ5Qa5a4BrJG1ENRf1ccA/AZO7H17E2Dbr1mV8Yc4iHlz1JDtvsxmnHLY3R+3XPjV7xOjU8R3RZVrQ7wLfLZe6RsQQZt26jNMuu5Mnn1kDwLJVT3LaZdW8V0kSMRZ0dJlrO9tPDncgEePNF+Ysei45tDz5zBq+MGdRjyKKWDfrlSAiYu0eXNX8PWqw8ojRJgkiokt23qa5J3aw8ojRZq0JQtI0SWdLulLS1a3HSAQXMZadctjebLbRpBeUbbbRJE45bO8eRRSxbjo5SX0J8HXgbGDNWupGRNE6EZ2rmGKs6iRBrLadCYIi1sNR++2ShBBjVifnIL4j6WOSdpK0XevR9cgiIqKnOkkQxwOnADcC88ujv5ONS5ouaZGkJZJOHaTOMZIWSlog6cJStq+km0rZHZIyemxExAjrZD6IPddnw5ImATOAQ6gmGponabbthbU6U4HTgANtr5S0Q1n1S+D3bd8jaWdgvqQ5tletTywREbHu1pogyhAbfwy8rRRdC/xzubN6KAcAS2wvLdu5CDgSWFircyIww/ZKeG5yImwvblWw/aCk5cAUIAkiImKEdNLF9DVgf6rxl/6pPO/kpPUuwAO15YFSVjcNmCbpBklzJU1v34ikA4CNgXsb1p0kqV9S/4oVKzoIKSIiOtXJVUyvt/2bteWrJd0+jK8/lWq02F2B6yW9ptWVJGkn4ALgeNvPtje2PROYCdDX15ehyCMihlEnRxBrJP16a0HSXnR2P8QyYLfa8q6lrG4AmG37Gdv3AYupEgaStgL+E/iM7bkdvF5ERAyjThLEKVTDfV8r6TrgauDPO2g3D5gqaU9JGwPHArPb6syiOnpA0mSqLqelpf7lwPm2L+3onURExLDq5CqmH5SrjVrjAyyy/VQH7VZLOhmYA0wCzrW9QNIZQL/t2WXdoZIWUh2VnGL7UUkfojopvr2kD5dNftj2bev6BiMiYv2omhOoYYX0DttXS3pv03rbl3U1snXU19fn/v6Obs+IiIhC0nzbfU3rhjqCeDtVd9IRDesMjKoEERERw2vQBGH7b8rTM8oJ5OdIWq+b5yIiYuzo5CT1fzSU5cRxRMQ4N+gRhKRXAvsAW7edh9gK2LTbgUVERG8NdQ5ib+BdwDa88DzE41RDZERExDg21DmIbwPflvQm2zeNYEwRETEKdDLUxq2S/oSqu+m5riXbH+laVBER0XOdnKS+APg14DDgOqohMx7vZlAREdF7nSSIV9j+LPCE7W8AvwO8obthRUREr3WSIFrzPqyS9BvA1sAOQ9SPiIhxoJNzEDMlbQt8lmqwvS2Av+5qVBER0XOdDNZ3Tnl6HbBXd8OJiIjRYqgb5T41VEPbXxr+cCIiYrQY6ghiy/Jzb+D1PD+XwxHALd0MKiIiem+oG+X+L4Ck64HX2X68LJ9ONdNbRESMY51cxbQj8HRt+elSFhER41gnVzGdD9wi6fKyfBRwXtciioiIUaGTq5jOlHQF8NZSdILtW7sbVkRE9NpQVzFtZfvnkrYD7i+P1rrtbD/W/fAiIqJXhjqCuJBquO/5VFOMtqgs556IiIhxbKirmN5VfmZ60YiICWioLqbXDdXQ9o+GP5yIiBgthupi+uIQ6wy8Y5hjiYiIUWSoLqaDX+rGJU0HvgxMAs6xfVZDnWOA06mSzu22P1jKvw+8EfjvVndXRESMnE7ug6AM8/1qXjij3PlraTMJmAEcAgwA8yTNtr2wVmcqcBpwoO2VkurDiH8B2Bz4ow7fS0REDKO13kkt6W+AfyyPg4G/A97dwbYPAJbYXmr7aeAi4Mi2OicCM2yvBLC9vLXC9g/IzHURET3TyVAbRwO/BfzU9gnAb1JNGrQ2uwAP1JYHSlndNGCapBskzS1dUh2TdJKkfkn9K1asWJemERGxFp0kiCdtPwuslrQVsBzYbZhef0NgKnAQcBxwtqRtOm1se6btPtt9U6ZMGaaQIiICOjsH0V8+tM+mumnuF8BNHbRbxgsTya6lrG4AuNn2M8B9khZTJYx5HWw/IiK6aNAjCEkzJB1o+2O2V9n+OtUJ5+NLV9PazAOmStpT0sbAsTw/p0TLLKqjByRNpupyWroe7yMiIobZUEcQi4G/l7QTcDHw7+sySJ/t1ZJOBuZQXeZ6ru0Fks4A+m3PLusOlbQQWAOcYvtRAEk/BF4JbCFpAPgD23PW4z1GRMR6kO2hK0i7U337PxbYDPh3qmSxuPvhda6vr8/9/f29DiMiYkyRNN92X9O6tZ6ktv1j25+3vR/VieSjgLuHOcaIiBhlOrkPYkNJR0j6JnAFsAh4b9cji4iInhpqsL5DqI4YDgduobrR7STbT4xQbBER0UNDnaQ+jWpOiD9v3ekcERETx1CD9WW01oiICayTO6kjImICSoKIiIhGSRAREdEoCSIiIholQURERKMkiIiIaJQEERERjZIgIiKiURJEREQ0SoKIiIhGSRAREdEoCSIiIholQURERKMkiIiIaJQEERERjZIgIiKiURJEREQ0SoKIiIhGXU0QkqZLWiRpiaRTB6lzjKSFkhZIurBWfryke8rj+G7GGRERLzbonNQvlaRJwAzgEGAAmCdptu2FtTpTgdOAA22vlLRDKd8O+BugDzAwv7Rd2a14IyLihbp5BHEAsMT2UttPAxcBR7bVORGY0frgt728lB8GXGX7sbLuKmB6F2ONiIg23UwQuwAP1JYHSlndNGCapBskzZU0fR3aIukkSf2S+lesWDGMoUdERK9PUm8ITAUOAo4Dzpa0TaeNbc+03We7b8qUKV0KMSJiYupmglgG7FZb3rWU1Q0As20/Y/s+YDFVwuikbUREdFE3E8Q8YKqkPSVtDBwLzG6rM4vq6AFJk6m6nJYCc4BDJW0raVvg0FIWEREjpGtXMdleLelkqg/2ScC5thdIOgPotz2b5xPBQmANcIrtRwEkfY4qyQCcYfuxbsUaEREvJtu9jmFY9PX1ub+/v9dhRESMKZLm2+5rWtfrk9QRETFKJUFERESjJIiIiGiUBBEREY2SICIiolESRERENEqCiIiIRkkQERHRKAkiIiIaJUFERESjJIiIiGiUBBEREY2SICIiolESRERENEqCiIiIRkkQERHRKAkiIiIaJUFERESjJIiIiGiUBBEREY2SICIiolESRERENOpqgpA0XdIiSUskndqw/sOSVki6rTz+sLbu85LuKo8PdDPOiIh4sQ27tWFJk4AZwCHAADBP0mzbC9uqfsv2yW1tfwd4HbAvsAlwraQrbP+8W/FGRMQLdfMI4gBgie2ltp8GLgKO7LDtq4Hrba+2/QRwBzC9S3FGRESDbiaIXYAHassDpazd+yTdIelSSbuVstuB6ZI2lzQZOBjYrb2hpJMk9UvqX7FixXDHHxExofX6JPV3gD1svxa4CvgGgO0rge8BNwL/DtwErGlvbHum7T7bfVOmTBm5qCMiJoBuJohlvPBb/66l7Dm2H7X9VFk8B9i/tu5M2/vaPgQQsLiLsUZERJtuJoh5wFRJe0raGDgWmF2vIGmn2uK7gbtL+SRJ25fnrwVeC1zZxVgjIqJN165isr1a0snAHGAScK7tBZLOAPptzwb+VNK7gdXAY8CHS/ONgB9KAvg58CHbq7sVa0REvJhs9zqGYSFpBfDj9Wg6GXhkmMPplrES61iJE8ZOrGMlThg7sY6VOKG7se5uu/Ek7rhJEOtLUr/tvl7H0YmxEutYiRPGTqxjJU4YO7GOlTihd7H2+iqmiIgYpZIgIiKiURIEzOx1AOtgrMQ6VuKEsRPrWIkTxk6sYyVO6FGsE/4cRERENMsRRERENEqCiIiIRuMqQUg6V9JySXcNsv6U2twTd0laI2m7sq5x7opyJ/jNpfxb5a7wnsUqaTdJ10haKGmBpE/U2pwuaVmt3eG9irOsu1/SnWVdf63NdpKuknRP+bntS43zpcQqae9a+W2Sfi7pk6VNL/bp1pK+I+n28js+obbu+LLf7pF0fK18/7Kvl0j6ispdpr2KVdK+km4qZXeoNqeLpPMk3Vfbp/v2Ks6ybk0tltm18l797w+2Tw9u+zv9laSjyrph36cA2B43D+BtVPNI3NVB3SOAq8vzScC9wF7AxlSjyb66rLsYOLY8/zrwxz2OdSfgdeX5llRjVLViPR349GjYp2X5fmByQ72/A04tz08FPt/rWGvlk4CfUt081JN9Cvxla58AU6hGGdgY2A5YWn5uW55vW+rdAryRatyyK4B39jjWacDUUr4z8BCwTVk+Dzh6NOzTsvyLQdr05H9/qFhrdbYr5Zt3a5/aHl9HELavp9ppnTiOaqRYGGTuivIt7B3ApaXeN4Cjehmr7Yds/6g8f5xq/KqmYdSHxUvYp0M5kjJyL6Ngn7b5LeBe2+tzV35HOojTwJbl72+LUnc1cBhwle3HbK+kGgF5uqoxzbayPdfVp8X5jNw+bYzV9mLb95RtPAgsp/qw64qXsE8b9fh/v5NYjwausP3L4YhpMOMqQXRK0uZUExD9RykabO6K7YFVfn4cqMHmtOiahljr6/YA9gNurhWfXA7pzx2urptODBKngSslzZd0Uq18R9sPlec/BXYcoTCBofcp1aCS7YljpPfpV4FXAQ8CdwKfsP0sg/+d7lKet5ePhMFifY6kA6iOKu6tFZ9Z9uk/SNqkx3FuqmpembmtLht6+7+/1n1K89/psO/TCZkgqLoXbrDd6bfNXmqMVdIWVB9wn/TzU7F+Dfh1qqlaHwK+2OM432L7dcA7gT+R9Lb2RuUb70hfaz3YPt2YalThS2rFvdinhwG3UXXN7At8VdJWI/C662PIWMvRzQXACbUPudOAVwKvp+oq+Ysex7m7q2EsPgj8f0m/PgLxDKWTffoaqoFQW7qyTydqgmjPvoPNXfEosI2kDdvKR9KLvilI2ogqOXzT9mWtctsP215T/hHPpuo661mctpeVn8uBy2vxPFz+yFt/7MtHME5o/vYFVSL7ke2HWwU92qcnAJe5sgS4j+qff7C/02XleXv5SBgsVsqH2n8Cn7E9t9WgdJPa1Vww/0pv92n973QpcC3VUXkv//cHjbU4Brjc9jOtgm7t0wmXICRtDbwd+HatuHHuivLt9hqq/j6A49vajXispV/yX4C7bX+prX59fo33AI1XSYxQnC+TtGXrOXBoLZ7ZVPsSRsE+rXnReYke7dOfUJ0LQdKOwN5UJ6TnAIdK2rZ0dR0KzCnddT+X9Mby9/H7jNw+bYy1/B9dDpxv+9J6g9qXA1H16/dsn5Z9uUkpnwwcCCzs8f/+YL//lkH/Tod9nw73We9ePspOewh4hqrP8A+AjwIfrdX5MHBRQ9vDqa4IupfqG0+rfC+qK0SWUHU9bNLLWIG3UHXJ3EF1GHobcHhZdwFVn+UdVB/CO/Uwzr2orga7HVjQtk+3B34A3AP8F7DdKPj9v4zqW+PWbeUjvk+puhauLK97F9V8KK22Hyl/i0uoum1a5X2l7r1UfdgaiX06WKzAh0qb22qPfcu6q2v1/w3YoodxvrmU3V5+/kHb3/CI/++v5fe/B9WRzAZt2xz2fWo7Q21ERESzCdfFFBERnUmCiIiIRkkQERHRKAkiIiIaJUFERESjJIgYE8rwAZ+sLc+RdE5t+YuSPjVE+/MkHV2eXyvpRRPAS9pI0lmqRkr9karRSN9Z1t1frpNf17ife91B1s8oo28ulPRkbTTOoyV9T9I26/qaHcS0k6TvDrF+Y0nX124SiwkqCSLGihuorllH0gbAZGCf2vo3Aze+xNf4HNVoub/haoiQo6hGzO0a239ie1+q+3Dutb1veVxq+3Dbq7rwsp+iuit8sJieprpP5QOD1YmJIQkixoobgTeV5/tQ3RD0eO1O2FcBP5L015LmqZrvYWa5s3StygB+JwIfdzVcAa6G2bi4oe6nyvbvajuq+f0yWNrtki5oaPe5ckQxqcOY7pc0WdIekv6ntF0s6ZuSflvSDeVo54BS/2VlQMFbJN0q6chBNv0+4PulzT6l/m0l9qmlzizgdzuJM8avHELGmGD7QUmrJb2c6mjhJqrRNd8E/Ay40/bTkr5q+wyA8iH9LuA7HbzEK4Cf+PmBDxtJ2p9qrJw3UM29cLOk64Cngb8C3mz7EZVJk2rtvkB1NHKC1+/u1FcA76e6k3oe1cByb6EaXPAvqY52PkM1x8VHStfULZL+y/YTtTj2BFa2kiDVHbxftv3NMjxGK3ndRTXwW0xgOYKIseRGquTQShA31ZZvKHUOVjUL2J1U4/nv07Shl+AtVAOlPWH7F8BlwFvLa11i+xEAv3Ck2M9SDeHx0fVMDgD32b7T1aCBC4AflG3dSTX8AlRjM50q6TaqQec2BV7etp2dgBW15ZuAv5T0F1Sjmj5Z4l8DPN0aTysmpiSIGEta5yFeQ/UNdy7VEcSbgRslbQr8E9XMWq+h6mfftMNtLwFeru4Mqz0P2L/9qGIdPVV7/mxt+Vme7wkQ8L7aeYyX2767bbStqZ0AAAFaSURBVDtPUtsnti+kOgp5EviepHfU6m4C/OolxBxjXBJEjCU3UnUZPeZqCO7HgG2oksSNPP/B94iq+TIGvXqonauZuf4F+HLpakHSFEnvb6v6Q+AoSZuXUWrfU8quBt4vafvStp4Mvg+cBfxnl7+RzwE+3jrvImm/hjqLef6IA0l7AUttf4VqtNLXlvLtgUdcG1I6Jp4kiBhL7qS6emluW9nPbD9Srvg5m+roYg7VN/d18VdU3S8LVU0o/13gBeckXE33eh7VKJ83A+fYvtX2AuBM4DpJtwNfamt3SYlttqTN1jGuTn0O2Ai4Q9KCsvwC5XzEvZJeUYqOAe4q3VK/QTVdKcDBVHM5xASW0VwjJhhJ7wH2t/1XQ9S5DDjV9uKRiyxGm1zFFDHB2L681RXWpHSxzUpyiBxBREREo5yDiIiIRkkQERHRKAkiIiIaJUFERESjJIiIiGj0vxTlm8ZMzT1/AAAAAElFTkSuQmCC\n" }, "metadata": { "needs_background": "light" @@ -372,7 +372,7 @@ "metadata": { "kernelspec": { "name": "python3", - "display_name": "Python 3.8.0 64-bit ('blend': conda)", + "display_name": "Python 3.8.0 64-bit", "metadata": { "interpreter": { "hash": "0cfea3304185a9579d09e0953576b57c8581e46e6ebc6dfeb681bc5a511f7544" @@ -389,7 +389,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.0" + "version": "3.8.0-final" } }, "nbformat": 4, diff --git a/notebook/flaml_finetune_transformer.ipynb b/notebook/flaml_finetune_transformer.ipynb index 444df831d6..01718b9cc5 100644 --- a/notebook/flaml_finetune_transformer.ipynb +++ b/notebook/flaml_finetune_transformer.ipynb @@ -11,11 +11,13 @@ }, { "cell_type": "code", - "execution_count": 102, - "metadata": {}, + "execution_count": 1, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "!pip install torch transformers datasets ipywidgets flaml[blendsearch,ray];" + "#!pip install torch transformers datasets ipywidgets flaml[blendsearch,ray];" ] }, { @@ -27,7 +29,7 @@ }, { "cell_type": "code", - "execution_count": 103, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -36,7 +38,7 @@ }, { "cell_type": "code", - "execution_count": 104, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -45,7 +47,7 @@ }, { "cell_type": "code", - "execution_count": 105, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -54,7 +56,7 @@ }, { "cell_type": "code", - "execution_count": 106, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -65,7 +67,7 @@ ] }, "metadata": {}, - "execution_count": 106 + "execution_count": 5 } ], "source": [ @@ -81,7 +83,7 @@ }, { "cell_type": "code", - "execution_count": 107, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -90,7 +92,7 @@ }, { "cell_type": "code", - "execution_count": 108, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -99,14 +101,14 @@ }, { "cell_type": "code", - "execution_count": 109, + "execution_count": 8, "metadata": {}, "outputs": [ { "output_type": "stream", "name": "stderr", "text": [ - "Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n" + "Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n" ] } ], @@ -116,7 +118,7 @@ }, { "cell_type": "code", - "execution_count": 110, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -128,16 +130,64 @@ }, { "cell_type": "code", - "execution_count": 111, + "execution_count": 10, "metadata": {}, "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "HBox(children=(FloatProgress(value=0.0, max=9.0), HTML(value='')))", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "ecc66e6795f848e0a41e6cf1ce37bdf2" + } + }, + "metadata": {} + }, { "output_type": "stream", - "name": "stderr", + "name": "stdout", "text": [ - "Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c3dd50f05994d4a5.arrow\n", - "Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-f2290a23c3c6f190.arrow\n", - "Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-6868a7b57fb52895.arrow\n" + "\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": "HBox(children=(FloatProgress(value=0.0, max=2.0), HTML(value='')))", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "2d33fc70b80b403080ad8c0e77ed1891" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": "HBox(children=(FloatProgress(value=0.0, max=2.0), HTML(value='')))", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "d2ab3feb1a354187abb2dded0ead404f" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\n" ] } ], @@ -147,7 +197,7 @@ }, { "cell_type": "code", - "execution_count": 112, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -180,7 +230,7 @@ ] }, "metadata": {}, - "execution_count": 112 + "execution_count": 11 } ], "source": [ @@ -196,7 +246,7 @@ }, { "cell_type": "code", - "execution_count": 113, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -205,7 +255,7 @@ }, { "cell_type": "code", - "execution_count": 114, + "execution_count": 13, "metadata": {}, "outputs": [ { @@ -227,7 +277,7 @@ }, { "cell_type": "code", - "execution_count": 115, + "execution_count": 14, "metadata": {}, "outputs": [ { @@ -350,7 +400,7 @@ ] }, "metadata": {}, - "execution_count": 115 + "execution_count": 14 } ], "source": [ @@ -366,7 +416,7 @@ }, { "cell_type": "code", - "execution_count": 116, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ @@ -375,7 +425,7 @@ }, { "cell_type": "code", - "execution_count": 117, + "execution_count": 16, "metadata": {}, "outputs": [ { @@ -428,7 +478,7 @@ ] }, "metadata": {}, - "execution_count": 117 + "execution_count": 16 } ], "source": [ @@ -437,7 +487,7 @@ }, { "cell_type": "code", - "execution_count": 118, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -457,7 +507,7 @@ }, { "cell_type": "code", - "execution_count": 119, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ @@ -467,7 +517,7 @@ }, { "cell_type": "code", - "execution_count": 120, + "execution_count": 19, "metadata": {}, "outputs": [], "source": [ @@ -479,7 +529,7 @@ }, { "cell_type": "code", - "execution_count": 121, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ @@ -495,9 +545,42 @@ }, { "cell_type": "code", - "execution_count": 122, + "execution_count": 21, "metadata": {}, - "outputs": [], + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "/home/chiw/.local/lib/python3.8/site-packages/torch/nn/parallel/_functions.py:65: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\n warnings.warn('Was asked to gather along dimension 0, but all '\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": "", + "text/html": "\n
\n \n \n \n [ 2/804 : < :, Epoch 0.00/3]\n
\n \n \n \n \n \n \n \n \n \n
StepTraining Loss

" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "/home/chiw/.local/lib/python3.8/site-packages/torch/nn/parallel/_functions.py:65: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\n warnings.warn('Was asked to gather along dimension 0, but all '\n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "TrainOutput(global_step=804, training_loss=0.3209413462017306, metrics={'train_runtime': 115.5328, 'train_samples_per_second': 6.959, 'total_flos': 238363718990580.0, 'epoch': 3.0, 'init_mem_cpu_alloc_delta': 2336600064, 'init_mem_gpu_alloc_delta': 268953088, 'init_mem_cpu_peaked_delta': 257929216, 'init_mem_gpu_peaked_delta': 0, 'train_mem_cpu_alloc_delta': 2381066240, 'train_mem_gpu_alloc_delta': 806788096, 'train_mem_cpu_peaked_delta': 186974208, 'train_mem_gpu_peaked_delta': 550790144})" + ] + }, + "metadata": {}, + "execution_count": 21 + } + ], "source": [ "trainer.train()" ] @@ -518,7 +601,7 @@ }, { "cell_type": "code", - "execution_count": 123, + "execution_count": 22, "metadata": {}, "outputs": [], "source": [ @@ -587,7 +670,7 @@ }, { "cell_type": "code", - "execution_count": 124, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ @@ -604,7 +687,7 @@ }, { "cell_type": "code", - "execution_count": 125, + "execution_count": 24, "metadata": {}, "outputs": [], "source": [ @@ -631,16 +714,15 @@ }, { "cell_type": "code", - "execution_count": 126, + "execution_count": 25, "metadata": {}, "outputs": [ { "output_type": "stream", "name": "stderr", "text": [ - "2021-02-24 13:56:21,166\tINFO services.py:1173 -- View the Ray dashboard at \u001b[1m\u001b[32mhttp://127.0.0.1:8265\u001b[39m\u001b[22m\n", - "2021-02-24 13:56:21,951\tWARNING optuna.py:126 -- You passed a `space` parameter to that contained unresolved search space definitions. should however be instantiated with fully configured search spaces only. To use Ray Tune's automatic search space conversion, pass the space definition as part of the `config` argument to `tune.run()` instead.\n", - "\u001b[32m[I 2021-02-24 13:56:21,955]\u001b[0m A new study created in memory with name: optuna\u001b[0m\n", + "2021-05-07 02:35:57,130\tINFO services.py:1172 -- View the Ray dashboard at \u001b[1m\u001b[32mhttp://127.0.0.1:8265\u001b[39m\u001b[22m\n", + "2021-05-07 02:35:58,044\tWARNING function_runner.py:540 -- Function checkpointing is disabled. This may result in unexpected behavior when using checkpointing features or certain schedulers. To enable, set the train function arguments to be `func(config, checkpoint_dir=None)`.\n", "Tuning started...\n" ] }, @@ -648,7 +730,7 @@ "output_type": "display_data", "data": { "text/plain": "", - "text/html": "== Status ==
Memory usage on this node: 11.7/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 1/infinite (1 RUNNING)

" + "text/html": "== Status ==
Memory usage on this node: 26.0/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 1/infinite (1 RUNNING)

" }, "metadata": {} }, @@ -656,24 +738,32 @@ "output_type": "stream", "name": "stderr", "text": [ - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m {'train_runtime': 37.2833, 'train_samples_per_second': 7.188, 'epoch': 1.0}\n", - "Trial train_distilbert_21b2c490 reported matthews_correlation=0.00 with parameters={'num_train_epochs': 1, 'learning_rate': 5.61151641533451e-06, 'adam_epsilon': 7.969454818643929e-08, 'adam_beta1': 0.9390788489441669, 'adam_beta2': 0.99186521389353}.\n" + "\u001b[2m\u001b[36m(pid=886303)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.0/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 21b2c490 with matthews_correlation=0.0 and parameters={'num_train_epochs': 1, 'learning_rate': 5.61151641533451e-06, 'adam_epsilon': 7.969454818643929e-08, 'adam_beta1': 0.9390788489441669, 'adam_beta2': 0.99186521389353}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 2/infinite (1 PENDING, 1 RUNNING)

" + "text/html": "== Status ==
Memory usage on this node: 30.9/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 2/infinite (1 PENDING, 1 RUNNING)

" }, "metadata": {} }, @@ -681,26 +771,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_21b2c490 completed. Last result: loss=0.5786514282226562,matthews_correlation=0.0\n", - "\u001b[2m\u001b[36m(pid=29589)\u001b[0m {'eval_loss': 0.5786514282226562, 'eval_matthews_correlation': 0.0, 'eval_runtime': 1.8133, 'eval_samples_per_second': 575.184, 'epoch': 1.0}\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m {'train_runtime': 205.6814, 'train_samples_per_second': 8.469, 'epoch': 6.5}\n", - "Trial train_distilbert_21b2c491 reported matthews_correlation=0.51 with parameters={'num_train_epochs': 6.496661243646011, 'learning_rate': 3.1345403715761375e-05, 'adam_epsilon': 1.2428131101359459e-08, 'adam_beta1': 0.9100859688137786, 'adam_beta2': 0.9850788361346603}.\n" + "Trial train_distilbert_a0c303d0 completed. Last result: loss=0.5879864692687988,matthews_correlation=0.0\n", + "\u001b[2m\u001b[36m(pid=886302)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.5/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 21b2c491 with matthews_correlation=0.5093030018169853 and parameters={'num_train_epochs': 6.496661243646011, 'learning_rate': 3.1345403715761375e-05, 'adam_epsilon': 1.2428131101359459e-08, 'adam_beta1': 0.9100859688137786, 'adam_beta2': 0.9850788361346603}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 3/infinite (1 PENDING, 1 RUNNING, 1 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.2/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 3/infinite (1 PENDING, 1 RUNNING, 1 TERMINATED)

" }, "metadata": {} }, @@ -708,26 +805,32 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_21b2c491 completed. Last result: loss=0.9910964965820312,matthews_correlation=0.5093030018169853\n", - "\u001b[2m\u001b[36m(pid=29588)\u001b[0m {'eval_loss': 0.9910964965820312, 'eval_matthews_correlation': 0.5093030018169853, 'eval_runtime': 1.8366, 'eval_samples_per_second': 567.883, 'epoch': 6.5}\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m {'train_runtime': 37.2801, 'train_samples_per_second': 7.189, 'epoch': 1.0}\n", - "Trial train_distilbert_3f0da820 reported matthews_correlation=0.00 with parameters={'num_train_epochs': 1.0, 'learning_rate': 5.265428651017862e-06, 'adam_epsilon': 1e-07, 'adam_beta1': 0.9093950363089345, 'adam_beta2': 0.9937145453421068}.\n" + "Trial train_distilbert_a0c303d1 completed. Last result: loss=0.6030182838439941,matthews_correlation=0.0\n", + "\u001b[2m\u001b[36m(pid=886305)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.7/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 21b2c491 with matthews_correlation=0.5093030018169853 and parameters={'num_train_epochs': 6.496661243646011, 'learning_rate': 3.1345403715761375e-05, 'adam_epsilon': 1.2428131101359459e-08, 'adam_beta1': 0.9100859688137786, 'adam_beta2': 0.9850788361346603}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 4/infinite (1 PENDING, 1 RUNNING, 2 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.4/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 4/infinite (1 PENDING, 1 RUNNING, 2 TERMINATED)

" }, "metadata": {} }, @@ -735,26 +838,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_3f0da820 completed. Last result: loss=0.5775065422058105,matthews_correlation=0.0\n", - "\u001b[2m\u001b[36m(pid=29591)\u001b[0m {'eval_loss': 0.5775065422058105, 'eval_matthews_correlation': 0.0, 'eval_runtime': 1.7547, 'eval_samples_per_second': 594.388, 'epoch': 1.0}\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m {'train_runtime': 197.3016, 'train_samples_per_second': 8.591, 'epoch': 6.32}\n", - "Trial train_distilbert_c1106c22 reported matthews_correlation=0.55 with parameters={'num_train_epochs': 6.324445967486241, 'learning_rate': 2.9412189965562634e-05, 'adam_epsilon': 2.256452443236495e-08, 'adam_beta1': 0.880402156178546, 'adam_beta2': 0.9869155143904086}.\n" + "Trial train_distilbert_c39b2ef0 completed. Last result: loss=0.5865175724029541,matthews_correlation=0.0\n", + "\u001b[2m\u001b[36m(pid=886304)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 15.9/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: c1106c22 with matthews_correlation=0.5451837431775948 and parameters={'num_train_epochs': 6.324445967486241, 'learning_rate': 2.9412189965562634e-05, 'adam_epsilon': 2.256452443236495e-08, 'adam_beta1': 0.880402156178546, 'adam_beta2': 0.9869155143904086}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 5/infinite (1 PENDING, 1 RUNNING, 3 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.7/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 5/infinite (1 PENDING, 1 RUNNING, 3 TERMINATED)

" }, "metadata": {} }, @@ -762,26 +872,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_c1106c22 completed. Last result: loss=0.8939734101295471,matthews_correlation=0.5451837431775948\n", - "\u001b[2m\u001b[36m(pid=29590)\u001b[0m {'eval_loss': 0.8939734101295471, 'eval_matthews_correlation': 0.5451837431775948, 'eval_runtime': 1.8277, 'eval_samples_per_second': 570.669, 'epoch': 6.32}\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m {'train_runtime': 105.8952, 'train_samples_per_second': 7.847, 'epoch': 3.1}\n", - "Trial train_distilbert_de95f5e6 reported matthews_correlation=0.48 with parameters={'num_train_epochs': 3.097601049860023, 'learning_rate': 3.015866216468612e-05, 'adam_epsilon': 6.092346813998939e-09, 'adam_beta1': 0.9628888910610184, 'adam_beta2': 0.9832186589335725}.\n" + "Trial train_distilbert_f00776e2 completed. Last result: loss=0.5813134908676147,matthews_correlation=0.0\n", + "\u001b[2m\u001b[36m(pid=892770)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.3/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: c1106c22 with matthews_correlation=0.5451837431775948 and parameters={'num_train_epochs': 6.324445967486241, 'learning_rate': 2.9412189965562634e-05, 'adam_epsilon': 2.256452443236495e-08, 'adam_beta1': 0.880402156178546, 'adam_beta2': 0.9869155143904086}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 6/infinite (1 PENDING, 1 RUNNING, 4 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 32.0/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 6/infinite (1 PENDING, 1 RUNNING, 4 TERMINATED)

" }, "metadata": {} }, @@ -789,27 +906,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_de95f5e6 completed. Last result: loss=0.5720887780189514,matthews_correlation=0.48369222635456827\n", - "\u001b[2m\u001b[36m(pid=8754)\u001b[0m {'eval_loss': 0.5720887780189514, 'eval_matthews_correlation': 0.48369222635456827, 'eval_runtime': 1.8561, 'eval_samples_per_second': 561.936, 'epoch': 3.1}\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m {'train_runtime': 330.1466, 'train_samples_per_second': 8.732, 'epoch': 10.76}\n", - "Trial train_distilbert_5bb0a1fc reported matthews_correlation=0.53 with parameters={'num_train_epochs': 10.755455977982155, 'learning_rate': 5.858103269448852e-05, 'adam_epsilon': 5.045085830072572e-08, 'adam_beta1': 0.845137019185222, 'adam_beta2': 0.9882166289933315}.\n", - "\u001b[2m\u001b[36m(pid=12777)\u001b[0m {'eval_loss': 1.5075323581695557, 'eval_matthews_correlation': 0.5282404248888111, 'eval_runtime': 1.7504, 'eval_samples_per_second': 595.853, 'epoch': 10.76}\n" + "Trial train_distilbert_11ab3900 completed. Last result: loss=0.5855756998062134,matthews_correlation=0.0\n", + "\u001b[2m\u001b[36m(pid=897725)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 15.9/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: c1106c22 with matthews_correlation=0.5451837431775948 and parameters={'num_train_epochs': 6.324445967486241, 'learning_rate': 2.9412189965562634e-05, 'adam_epsilon': 2.256452443236495e-08, 'adam_beta1': 0.880402156178546, 'adam_beta2': 0.9869155143904086}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 7/infinite (1 PENDING, 1 RUNNING, 5 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 30.9/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 7/infinite (1 PENDING, 1 RUNNING, 5 TERMINATED)

" }, "metadata": {} }, @@ -817,25 +940,31 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_5bb0a1fc completed. Last result: loss=1.5075323581695557,matthews_correlation=0.5282404248888111\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m {'train_runtime': 182.3796, 'train_samples_per_second': 8.724, 'epoch': 5.94}\n", - "Trial train_distilbert_a247fb2e reported matthews_correlation=0.54 with parameters={'num_train_epochs': 5.933063389003551, 'learning_rate': 1.845204084769373e-05, 'adam_epsilon': 1.372505378696326e-08, 'adam_beta1': 0.8534841230874768, 'adam_beta2': 0.9858475457825921}.\n" + "Trial train_distilbert_353025b6 completed. Last result: loss=0.5316324830055237,matthews_correlation=0.38889272875750597\n", + "\u001b[2m\u001b[36m(pid=907288)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.4/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: c1106c22 with matthews_correlation=0.5451837431775948 and parameters={'num_train_epochs': 6.324445967486241, 'learning_rate': 2.9412189965562634e-05, 'adam_epsilon': 2.256452443236495e-08, 'adam_beta1': 0.880402156178546, 'adam_beta2': 0.9869155143904086}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 8/infinite (1 PENDING, 1 RUNNING, 6 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.3/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 8/infinite (1 PENDING, 1 RUNNING, 6 TERMINATED)

" }, "metadata": {} }, @@ -843,26 +972,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_a247fb2e completed. Last result: loss=0.6974263191223145,matthews_correlation=0.5399503104637741\n", - "\u001b[2m\u001b[36m(pid=39770)\u001b[0m {'eval_loss': 0.6974263191223145, 'eval_matthews_correlation': 0.5399503104637741, 'eval_runtime': 1.8585, 'eval_samples_per_second': 561.204, 'epoch': 5.94}\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m {'train_runtime': 189.7562, 'train_samples_per_second': 8.59, 'epoch': 6.08}\n", - "Trial train_distilbert_6e9e8ec2 reported matthews_correlation=0.52 with parameters={'num_train_epochs': 6.078693989748608, 'learning_rate': 1.8357895987910622e-05, 'adam_epsilon': 1.5849146381322022e-08, 'adam_beta1': 0.8904370071918882, 'adam_beta2': 0.9844583428325462}.\n" + "Trial train_distilbert_5728a1de completed. Last result: loss=0.5385054349899292,matthews_correlation=0.2805581766595423\n", + "\u001b[2m\u001b[36m(pid=908756)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 17.1/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: c1106c22 with matthews_correlation=0.5451837431775948 and parameters={'num_train_epochs': 6.324445967486241, 'learning_rate': 2.9412189965562634e-05, 'adam_epsilon': 2.256452443236495e-08, 'adam_beta1': 0.880402156178546, 'adam_beta2': 0.9869155143904086}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 9/infinite (1 PENDING, 1 RUNNING, 7 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.6/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 9/infinite (1 PENDING, 1 RUNNING, 7 TERMINATED)

" }, "metadata": {} }, @@ -870,28 +1006,31 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_6e9e8ec2 completed. Last result: loss=0.7202959656715393,matthews_correlation=0.5185394246694179\n", - "\u001b[2m\u001b[36m(pid=7123)\u001b[0m {'eval_loss': 0.7202959656715393, 'eval_matthews_correlation': 0.5185394246694179, 'eval_runtime': 1.6051, 'eval_samples_per_second': 649.814, 'epoch': 6.08}\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m {'train_runtime': 329.789, 'train_samples_per_second': 8.448, 'epoch': 10.4}\n", - "Trial train_distilbert_e30fd860 reported matthews_correlation=0.54 with parameters={'num_train_epochs': 10.39182109947885, 'learning_rate': 6.762356226483751e-05, 'adam_epsilon': 5.0195217227379364e-08, 'adam_beta1': 0.8951148565195837, 'adam_beta2': 0.9914274194005184}.\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m {'eval_loss': 1.505250334739685, 'eval_matthews_correlation': 0.5353569722427551, 'eval_runtime': 1.8314, 'eval_samples_per_second': 569.522, 'epoch': 10.4}\n", - "\u001b[2m\u001b[36m(pid=14798)\u001b[0m \n" + "Trial train_distilbert_9394c2e2 completed. Last result: loss=0.5391769409179688,matthews_correlation=0.3272948213494272\n", + "\u001b[2m\u001b[36m(pid=912284)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 15.9/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: c1106c22 with matthews_correlation=0.5451837431775948 and parameters={'num_train_epochs': 6.324445967486241, 'learning_rate': 2.9412189965562634e-05, 'adam_epsilon': 2.256452443236495e-08, 'adam_beta1': 0.880402156178546, 'adam_beta2': 0.9869155143904086}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 10/infinite (1 PENDING, 1 RUNNING, 8 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.9/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 10/infinite (1 PENDING, 1 RUNNING, 8 TERMINATED)

" }, "metadata": {} }, @@ -899,25 +1038,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_e30fd860 completed. Last result: loss=1.505250334739685,matthews_correlation=0.5353569722427551\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m {'train_runtime': 259.759, 'train_samples_per_second': 9.078, 'epoch': 8.8}\n", - "Trial train_distilbert_5bddb1ae reported matthews_correlation=0.55 with parameters={'num_train_epochs': 8.797715187430134, 'learning_rate': 2.72412577596775e-05, 'adam_epsilon': 7.4151444539151255e-09, 'adam_beta1': 0.869942964703411, 'adam_beta2': 0.9852670758817403}.\n" + "Trial train_distilbert_b6543fec completed. Last result: loss=0.5275164842605591,matthews_correlation=0.37917684067701946\n", + "\u001b[2m\u001b[36m(pid=914582)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.8/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 5bddb1ae with matthews_correlation=0.5492247863049868 and parameters={'num_train_epochs': 8.797715187430134, 'learning_rate': 2.72412577596775e-05, 'adam_epsilon': 7.4151444539151255e-09, 'adam_beta1': 0.869942964703411, 'adam_beta2': 0.9852670758817403}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 11/infinite (1 PENDING, 1 RUNNING, 9 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.0/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 11/infinite (1 PENDING, 1 RUNNING, 9 TERMINATED)

" }, "metadata": {} }, @@ -925,26 +1072,30 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_5bddb1ae completed. Last result: loss=1.0900800228118896,matthews_correlation=0.5492247863049868\n", - "\u001b[2m\u001b[36m(pid=27867)\u001b[0m {'eval_loss': 1.0900800228118896, 'eval_matthews_correlation': 0.5492247863049868, 'eval_runtime': 1.6198, 'eval_samples_per_second': 643.889, 'epoch': 8.8}\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m {'train_runtime': 251.169, 'train_samples_per_second': 8.544, 'epoch': 8.01}\n", - "Trial train_distilbert_27da6108 reported matthews_correlation=0.55 with parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}.\n" + "Trial train_distilbert_0071f998 completed. Last result: loss=0.5162246823310852,matthews_correlation=0.417156672319181\n", + "\u001b[2m\u001b[36m(pid=918301)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.1/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 27da6108 with matthews_correlation=0.550740569901542 and parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 12/infinite (1 PENDING, 1 RUNNING, 10 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.2/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 12/infinite (1 PENDING, 1 RUNNING, 10 TERMINATED)

" }, "metadata": {} }, @@ -952,26 +1103,35 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_27da6108 completed. Last result: loss=0.8646725416183472,matthews_correlation=0.550740569901542\n", - "\u001b[2m\u001b[36m(pid=38727)\u001b[0m {'eval_loss': 0.8646725416183472, 'eval_matthews_correlation': 0.550740569901542, 'eval_runtime': 1.7453, 'eval_samples_per_second': 597.588, 'epoch': 8.01}\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m {'train_runtime': 150.7963, 'train_samples_per_second': 8.641, 'epoch': 4.86}\n", - "Trial train_distilbert_ca4167f2 reported matthews_correlation=0.55 with parameters={'num_train_epochs': 4.8609021804212205, 'learning_rate': 3.0765755916918634e-05, 'adam_epsilon': 3.2784085089990583e-09, 'adam_beta1': 0.9001311340399742, 'adam_beta2': 0.9865549219923857}.\n" + "Trial train_distilbert_2f830be6 completed. Last result: loss=0.5516289472579956,matthews_correlation=0.06558874629318973\n", + "\u001b[2m\u001b[36m(pid=920414)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.7/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 27da6108 with matthews_correlation=0.550740569901542 and parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 13/infinite (1 PENDING, 1 RUNNING, 11 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.7/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 13/infinite (1 PENDING, 1 RUNNING, 11 TERMINATED)

" }, "metadata": {} }, @@ -979,26 +1139,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_ca4167f2 completed. Last result: loss=0.7426601052284241,matthews_correlation=0.5474713423103301\n", - "\u001b[2m\u001b[36m(pid=8698)\u001b[0m {'eval_loss': 0.7426601052284241, 'eval_matthews_correlation': 0.5474713423103301, 'eval_runtime': 1.6955, 'eval_samples_per_second': 615.172, 'epoch': 4.86}\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m {'train_runtime': 168.574, 'train_samples_per_second': 8.56, 'epoch': 5.38}\n", - "Trial train_distilbert_6776ad66 reported matthews_correlation=0.50 with parameters={'num_train_epochs': 5.381515555130151, 'learning_rate': 1.4923436298344364e-05, 'adam_epsilon': 4.718609673277113e-08, 'adam_beta1': 0.8855356638050199, 'adam_beta2': 0.9817714112199931}.\n" + "Trial train_distilbert_7ce03f12 completed. Last result: loss=0.523731529712677,matthews_correlation=0.45354879777314566\n", + "\u001b[2m\u001b[36m(pid=925520)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 15.7/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 27da6108 with matthews_correlation=0.550740569901542 and parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 14/infinite (1 PENDING, 1 RUNNING, 12 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 32.3/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 14/infinite (1 PENDING, 1 RUNNING, 12 TERMINATED)

" }, "metadata": {} }, @@ -1006,27 +1173,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "\u001b[2m\u001b[36m(pid=26401)\u001b[0m {'eval_loss': 0.6062898635864258, 'eval_matthews_correlation': 0.5039642659976749, 'eval_runtime': 1.8481, 'eval_samples_per_second': 564.358, 'epoch': 5.38}\n", - "Trial train_distilbert_6776ad66 completed. Last result: loss=0.6062898635864258,matthews_correlation=0.5039642659976749\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m {'train_runtime': 267.304, 'train_samples_per_second': 8.694, 'epoch': 8.67}\n", - "Trial train_distilbert_c904a63c reported matthews_correlation=0.54 with parameters={'num_train_epochs': 8.670157213614129, 'learning_rate': 3.589310669581693e-05, 'adam_epsilon': 1e-07, 'adam_beta1': 0.9159421419473668, 'adam_beta2': 0.9870278515925665}.\n", - "\u001b[2m\u001b[36m(pid=36494)\u001b[0m {'eval_loss': 1.15528404712677, 'eval_matthews_correlation': 0.541934635424655, 'eval_runtime': 1.8046, 'eval_samples_per_second': 577.975, 'epoch': 8.67}\n" + "Trial train_distilbert_aaab0508 completed. Last result: loss=0.5112878680229187,matthews_correlation=0.4508496945113286\n", + "\u001b[2m\u001b[36m(pid=929827)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.4/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 27da6108 with matthews_correlation=0.550740569901542 and parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 15/infinite (1 PENDING, 1 RUNNING, 13 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.2/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 15/infinite (1 PENDING, 1 RUNNING, 13 TERMINATED)

" }, "metadata": {} }, @@ -1034,26 +1207,37 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_c904a63c completed. Last result: loss=1.15528404712677,matthews_correlation=0.541934635424655\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m {'train_runtime': 401.1267, 'train_samples_per_second': 8.808, 'epoch': 13.18}\n", - "Trial train_distilbert_34cd23b2 reported matthews_correlation=0.54 with parameters={'num_train_epochs': 13.180325143440442, 'learning_rate': 1.1392631517503339e-05, 'adam_epsilon': 8.551227707433237e-08, 'adam_beta1': 0.8917360114521684, 'adam_beta2': 0.9933954023113967}.\n", - "\u001b[2m\u001b[36m(pid=7128)\u001b[0m {'eval_loss': 0.9118097424507141, 'eval_matthews_correlation': 0.5361146089547957, 'eval_runtime': 1.6269, 'eval_samples_per_second': 641.089, 'epoch': 13.18}\n" + "Trial train_distilbert_14262454 completed. Last result: loss=0.5350601673126221,matthews_correlation=0.40085080763525827\n", + "\u001b[2m\u001b[36m(pid=934238)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.4/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 27da6108 with matthews_correlation=0.550740569901542 and parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 16/infinite (1 PENDING, 1 RUNNING, 14 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.8/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 16/infinite (1 PENDING, 1 RUNNING, 14 TERMINATED)

" }, "metadata": {} }, @@ -1061,25 +1245,33 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_34cd23b2 completed. Last result: loss=0.9118097424507141,matthews_correlation=0.5361146089547957\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m {'train_runtime': 261.9267, 'train_samples_per_second': 8.548, 'epoch': 8.35}\n", - "Trial train_distilbert_dbc01c60 reported matthews_correlation=0.53 with parameters={'num_train_epochs': 8.351740081197375, 'learning_rate': 4.14474164779562e-05, 'adam_epsilon': 2.5536744573294183e-08, 'adam_beta1': 0.9010345773126118, 'adam_beta2': 0.98213801095907}.\n" + "Trial train_distilbert_6d211fe6 completed. Last result: loss=0.609851062297821,matthews_correlation=0.5268023551875569\n", + "\u001b[2m\u001b[36m(pid=942628)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 16.0/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 27da6108 with matthews_correlation=0.550740569901542 and parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 17/infinite (1 PENDING, 1 RUNNING, 15 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 31.1/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 17/infinite (1 PENDING, 1 RUNNING, 15 TERMINATED)

" }, "metadata": {} }, @@ -1087,28 +1279,218 @@ "output_type": "stream", "name": "stdout", "text": [ - "Trial train_distilbert_dbc01c60 completed. Last result: loss=1.270609974861145,matthews_correlation=0.5331291095663535\n", - "\u001b[2m\u001b[36m(pid=23493)\u001b[0m {'eval_loss': 1.270609974861145, 'eval_matthews_correlation': 0.5331291095663535, 'eval_runtime': 1.7863, 'eval_samples_per_second': 583.876, 'epoch': 8.35}\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-bec756fc24993464.arrow\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-3b411a778de4d998.arrow\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m Loading cached processed dataset at /home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4/cache-c7231adac87a0159.arrow\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias', 'vocab_projector.weight', 'vocab_projector.bias']\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight', 'classifier.bias']\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m {'train_runtime': 307.947, 'train_samples_per_second': 8.501, 'epoch': 9.77}\n", - "2021-02-24 15:01:18,861\tINFO stopper.py:193 -- Reached timeout of 3600 seconds. Stopping all trials.\n", - "Trial train_distilbert_d1e00f7e reported matthews_correlation=0.50 with parameters={'num_train_epochs': 9.768470529742105, 'learning_rate': 7.278242504625585e-06, 'adam_epsilon': 9.024121328462365e-08, 'adam_beta1': 0.9568651413276459, 'adam_beta2': 0.9898624818542463}.\n", - "\u001b[2m\u001b[36m(pid=33982)\u001b[0m {'eval_loss': 0.6356746554374695, 'eval_matthews_correlation': 0.502884728860933, 'eval_runtime': 1.7441, 'eval_samples_per_second': 598.03, 'epoch': 9.77}\n" + "Trial train_distilbert_c980bae4 completed. Last result: loss=0.5422758460044861,matthews_correlation=0.32496815807366203\n", + "\u001b[2m\u001b[36m(pid=945904)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", - "text/html": "== Status ==
Memory usage on this node: 15.9/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 0/4 CPUs, 0/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 27da6108 with matthews_correlation=0.550740569901542 and parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 18/infinite (18 TERMINATED)

" + "text/html": "== Status ==
Memory usage on this node: 32.2/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 18/infinite (1 PENDING, 1 RUNNING, 16 TERMINATED)

" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Trial train_distilbert_6d0d29d6 completed. Last result: loss=0.9238015413284302,matthews_correlation=0.5494735380761103\n", + "\u001b[2m\u001b[36m(pid=973869)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", + "text/html": "== Status ==
Memory usage on this node: 31.2/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 19/infinite (1 PENDING, 1 RUNNING, 17 TERMINATED)

" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Trial train_distilbert_b16ea82a completed. Last result: loss=0.5334658622741699,matthews_correlation=0.4513069078434825\n", + "\u001b[2m\u001b[36m(pid=978003)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", + "text/html": "== Status ==
Memory usage on this node: 31.2/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 20/infinite (1 PENDING, 1 RUNNING, 18 TERMINATED)

" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Trial train_distilbert_eddf7cc0 completed. Last result: loss=0.9832845330238342,matthews_correlation=0.5699304939602442\n", + "\u001b[2m\u001b[36m(pid=1000417)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", + "text/html": "== Status ==
Memory usage on this node: 31.4/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 4/4 CPUs, 4/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 21/infinite (1 PENDING, 1 RUNNING, 19 TERMINATED)

" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Trial train_distilbert_43008974 completed. Last result: loss=0.8574612736701965,matthews_correlation=0.5200220944545176\n", + "\u001b[2m\u001b[36m(pid=1022436)\u001b[0m Reusing dataset glue (/home/chiw/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n", + " 0%| | 0/9 [00:00", + "text/html": "== Status ==
Memory usage on this node: 32.0/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 0/4 CPUs, 0/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 22/infinite (22 TERMINATED)

" }, "metadata": {} }, @@ -1116,7 +1498,7 @@ "output_type": "display_data", "data": { "text/plain": "", - "text/html": "== Status ==
Memory usage on this node: 15.9/251.8 GiB
Using FIFO scheduling algorithm.
Resources requested: 0/4 CPUs, 0/4 GPUs, 0.0/161.91 GiB heap, 0.0/50.63 GiB objects (0/1.0 accelerator_type:V100)
Current best trial: 27da6108 with matthews_correlation=0.550740569901542 and parameters={'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}
Result logdir: /raid/chiw/FLAML/notebook/logs/train_distilbert_2021-02-24_13-56-21
Number of trials: 18/infinite (18 TERMINATED)
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
Trial name status loc adam_beta1 adam_beta2 adam_epsilon learning_rate num_train_epochs iter total time (s) loss matthews_correlation
train_distilbert_21b2c490TERMINATED 0.939079 0.991865 7.96945e-08 5.61152e-06 1 1 46.96980.578651 0
train_distilbert_21b2c491TERMINATED 0.910086 0.985079 1.24281e-08 3.13454e-05 6.49666 1 215.872 0.991096 0.509303
train_distilbert_3f0da820TERMINATED 0.909395 0.993715 1e-07 5.26543e-06 1 1 47.30680.577507 0
train_distilbert_c1106c22TERMINATED 0.880402 0.986916 2.25645e-08 2.94122e-05 6.32445 1 207.618 0.893973 0.545184
train_distilbert_de95f5e6TERMINATED 0.962889 0.983219 6.09235e-09 3.01587e-05 3.0976 1 115.872 0.572089 0.483692
train_distilbert_5bb0a1fcTERMINATED 0.845137 0.988217 5.04509e-08 5.8581e-05 10.7555 1 340.281 1.50753 0.52824
train_distilbert_a247fb2eTERMINATED 0.853484 0.985848 1.37251e-08 1.8452e-05 5.93306 1 192.779 0.697426 0.53995
train_distilbert_6e9e8ec2TERMINATED 0.890437 0.984458 1.58491e-08 1.83579e-05 6.07869 1 200.122 0.720296 0.518539
train_distilbert_e30fd860TERMINATED 0.895115 0.991427 5.01952e-08 6.76236e-05 10.3918 1 339.615 1.50525 0.535357
train_distilbert_5bddb1aeTERMINATED 0.869943 0.985267 7.41514e-09 2.72413e-05 8.79772 1 269.864 1.09008 0.549225
train_distilbert_27da6108TERMINATED 0.911674 0.98694 6.69698e-08 1.93183e-05 8.00568 1 261.261 0.864673 0.550741
train_distilbert_ca4167f2TERMINATED 0.900131 0.986555 3.27841e-09 3.07658e-05 4.8609 1 161.146 0.74266 0.547471
train_distilbert_6776ad66TERMINATED 0.885536 0.981771 4.71861e-08 1.49234e-05 5.38152 1 178.269 0.60629 0.503964
train_distilbert_c904a63cTERMINATED 0.915942 0.987028 1e-07 3.58931e-05 8.67016 1 277.56 1.15528 0.541935
train_distilbert_34cd23b2TERMINATED 0.891736 0.993395 8.55123e-08 1.13926e-05 13.1803 1 410.4 0.91181 0.536115
train_distilbert_dbc01c60TERMINATED 0.901035 0.982138 2.55367e-08 4.14474e-05 8.35174 1 272.136 1.27061 0.533129
train_distilbert_d1e00f7eTERMINATED 0.956865 0.989862 9.02412e-08 7.27824e-06 9.76847 1 317.557 0.635675 0.502885
train_distilbert_759d8c04TERMINATED 0.852308 0.986484 3.65877e-08 2.91155e-05 4.12326


" + "text/html": "== Status ==
Memory usage on this node: 32.0/251.6 GiB
Using FIFO scheduling algorithm.
Resources requested: 0/4 CPUs, 0/4 GPUs, 0.0/150.39 GiB heap, 0.0/47.22 GiB objects (0/1.0 accelerator_type:V100)
Result logdir: /home/chiw/FLAML/notebook/logs/train_distilbert_2021-05-07_02-35-58
Number of trials: 22/infinite (22 TERMINATED)
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
Trial name status loc adam_beta1 adam_beta2 adam_epsilon learning_rate num_train_epochs iter total time (s) loss matthews_correlation
train_distilbert_a0c303d0TERMINATED 0.939079 0.991865 7.96945e-08 5.61152e-06 1 1 55.69090.587986 0
train_distilbert_a0c303d1TERMINATED 0.811036 0.997214 2.05111e-09 2.05134e-06 1.44427 1 71.76630.603018 0
train_distilbert_c39b2ef0TERMINATED 0.909395 0.993715 1e-07 5.26543e-06 1 1 53.76190.586518 0
train_distilbert_f00776e2TERMINATED 0.968763 0.990019 4.38943e-08 5.98035e-06 1.02723 1 56.83820.581313 0
train_distilbert_11ab3900TERMINATED 0.962198 0.991838 7.09296e-08 5.06608e-06 1 1 54.02310.585576 0
train_distilbert_353025b6TERMINATED 0.91596 0.991892 8.95426e-08 6.21568e-06 2.15443 1 98.32330.531632 0.388893
train_distilbert_5728a1deTERMINATED 0.926933 0.993146 1e-07 1.00902e-05 1 1 55.37260.538505 0.280558
train_distilbert_9394c2e2TERMINATED 0.928106 0.990614 4.49975e-08 3.45674e-06 2.72935 1 121.388 0.539177 0.327295
train_distilbert_b6543fecTERMINATED 0.876896 0.992098 1e-07 7.01176e-06 1.59538 1 76.02440.527516 0.379177
train_distilbert_0071f998TERMINATED 0.955024 0.991687 7.39776e-08 5.50998e-06 2.90939 1 126.871 0.516225 0.417157
train_distilbert_2f830be6TERMINATED 0.886931 0.989628 7.6127e-08 4.37646e-06 1.53338 1 73.89340.551629 0.0655887
train_distilbert_7ce03f12TERMINATED 0.984053 0.993956 8.70144e-08 7.82557e-06 4.08775 1 174.027 0.523732 0.453549
train_distilbert_aaab0508TERMINATED 0.940707 0.993946 1e-07 8.91979e-06 3.40243 1 146.249 0.511288 0.45085
train_distilbert_14262454TERMINATED 0.99 0.991696 4.60093e-08 4.83405e-06 3.4954 1 152.008 0.53506 0.400851
train_distilbert_6d211fe6TERMINATED 0.959277 0.994556 5.40791e-08 1.17333e-05 6.64995 1 271.444 0.609851 0.526802
train_distilbert_c980bae4TERMINATED 0.99 0.993355 1e-07 5.21929e-06 2.51275 1 111.799 0.542276 0.324968
train_distilbert_6d0d29d6TERMINATED 0.965773 0.995182 9.9752e-08 1.15549e-05 13.694 1 527.944 0.923802 0.549474
train_distilbert_b16ea82aTERMINATED 0.952781 0.993931 2.93182e-08 1.19145e-05 3.2293 1 139.844 0.533466 0.451307
train_distilbert_eddf7cc0TERMINATED 0.99 0.997109 8.13498e-08 1.28515e-05 15.5807 1 614.789 0.983285 0.56993
train_distilbert_43008974TERMINATED 0.929089 0.993258 1e-07 1.03892e-05 12.0357 1 474.387 0.857461 0.520022
train_distilbert_b3408a4eTERMINATED 0.99 0.993809 4.67441e-08 1.10418e-05 11.9165 1 474.126 0.828205 0.526164
train_distilbert_cfbfb220TERMINATED 0.979454 0.9999 1e-07 1.49578e-05 20.3715


" }, "metadata": {} }, @@ -1124,7 +1506,7 @@ "output_type": "stream", "name": "stderr", "text": [ - "2021-02-24 15:01:18,957\tINFO tune.py:448 -- Total run time: 3897.00 seconds (3896.97 seconds for the tuning loop).\n" + "2021-05-07 03:42:30,035\tINFO tune.py:450 -- Total run time: 3992.00 seconds (3991.90 seconds for the tuning loop).\n" ] } ], @@ -1138,12 +1520,11 @@ "print(\"Tuning started...\")\n", "analysis = flaml.tune.run(\n", " train_distilbert,\n", - " config=search_space,\n", - " low_cost_partial_config={\n", - " \"num_train_epochs\": 1,\n", - " },\n", - " metric=HP_METRIC,\n", - " mode=MODE,\n", + " search_alg=flaml.CFO(\n", + " space=search_space,\n", + " metric=HP_METRIC,\n", + " mode=MODE,\n", + " low_cost_partial_config={\"num_train_epochs\": 1}),\n", " report_intermediate_result=False,\n", " # uncomment the following if report_intermediate_result = True\n", " # max_resource=max_num_epoch, min_resource=1,\n", @@ -1159,14 +1540,14 @@ }, { "cell_type": "code", - "execution_count": 127, + "execution_count": 26, "metadata": {}, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ - "n_trials=18\ntime=3903.5583679676056\nBest model eval matthews_correlation: 0.5507\nBest model parameters: {'num_train_epochs': 8.005678804316002, 'learning_rate': 1.931832460928058e-05, 'adam_epsilon': 6.696984191794608e-08, 'adam_beta1': 0.9116736888940158, 'adam_beta2': 0.9869397626562693}\n" + "n_trials=22\ntime=3999.769361972809\nBest model eval matthews_correlation: 0.5699\nBest model parameters: {'num_train_epochs': 15.580684188655825, 'learning_rate': 1.2851507818900338e-05, 'adam_epsilon': 8.134982521948352e-08, 'adam_beta1': 0.99, 'adam_beta2': 0.9971094424784387}\n" ] } ], @@ -1194,9 +1575,8 @@ ], "metadata": { "kernelspec": { - "name": "python3", - "display_name": "Python 3", - "language": "python" + "name": "python385jvsc74a57bd031f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6", + "display_name": "Python 3.8.5 64-bit" }, "language_info": { "codemirror_mode": { @@ -1208,7 +1588,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9-final" + "version": "3.8.5" + }, + "metadata": { + "interpreter": { + "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" + } } }, "nbformat": 4, diff --git a/notebook/flaml_lightgbm.ipynb b/notebook/flaml_lightgbm.ipynb index 4e1ee7bca9..deb32db439 100644 --- a/notebook/flaml_lightgbm.ipynb +++ b/notebook/flaml_lightgbm.ipynb @@ -58,7 +58,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": { "slideshow": { "slide_type": "subslide" @@ -69,7 +69,9 @@ { "output_type": "stream", "name": "stdout", - "text": "load dataset from./openml_ds537.pkl\nDataset name:houses\nX_train.shape: (15480, 8), y_train.shape: (15480,);\nX_test.shape: (5160, 8), y_test.shape: (5160,)\n" + "text": [ + "download dataset from openml\nDataset name: houses\nX_train.shape: (15480, 8), y_train.shape: (15480,);\nX_test.shape: (5160, 8), y_test.shape: (5160,)\n" + ] } ], "source": [ @@ -91,7 +93,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 2, "metadata": { "slideshow": { "slide_type": "slide" @@ -107,7 +109,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 3, "metadata": { "slideshow": { "slide_type": "slide" @@ -126,7 +128,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 4, "metadata": { "slideshow": { "slide_type": "slide" @@ -137,7 +139,80 @@ { "output_type": "stream", "name": "stderr", - "text": "[flaml.automl: 04-09 19:57:58] {890} INFO - Evaluation method: cv\n[flaml.automl: 04-09 19:57:58] {606} INFO - Using RepeatedKFold\n[flaml.automl: 04-09 19:57:58] {911} INFO - Minimizing error metric: 1-r2\n[flaml.automl: 04-09 19:57:58] {930} INFO - List of ML learners in AutoML Run: ['lgbm']\n[flaml.automl: 04-09 19:57:58] {994} INFO - iteration 0, current learner lgbm\n[flaml.automl: 04-09 19:57:58] {1147} INFO - at 0.2s,\tbest lgbm's error=0.7383,\tbest lgbm's error=0.7383\n[flaml.automl: 04-09 19:57:58] {994} INFO - iteration 1, current learner lgbm\n[flaml.automl: 04-09 19:57:58] {1147} INFO - at 0.3s,\tbest lgbm's error=0.7383,\tbest lgbm's error=0.7383\n[flaml.automl: 04-09 19:57:58] {994} INFO - iteration 2, current learner lgbm\n[flaml.automl: 04-09 19:57:59] {1147} INFO - at 0.3s,\tbest lgbm's error=0.3888,\tbest lgbm's error=0.3888\n[flaml.automl: 04-09 19:57:59] {994} INFO - iteration 3, current learner lgbm\n[flaml.automl: 04-09 19:57:59] {1147} INFO - at 0.4s,\tbest lgbm's error=0.3888,\tbest lgbm's error=0.3888\n[flaml.automl: 04-09 19:57:59] {994} INFO - iteration 4, current learner lgbm\n[flaml.automl: 04-09 19:57:59] {1147} INFO - at 0.6s,\tbest lgbm's error=0.2657,\tbest lgbm's error=0.2657\n[flaml.automl: 04-09 19:57:59] {994} INFO - iteration 5, current learner lgbm\n[flaml.automl: 04-09 19:57:59] {1147} INFO - at 0.8s,\tbest lgbm's error=0.2256,\tbest lgbm's error=0.2256\n[flaml.automl: 04-09 19:57:59] {994} INFO - iteration 6, current learner lgbm\n[flaml.automl: 04-09 19:57:59] {1147} INFO - at 0.9s,\tbest lgbm's error=0.2256,\tbest lgbm's error=0.2256\n[flaml.automl: 04-09 19:57:59] {994} INFO - iteration 7, current learner lgbm\n[flaml.automl: 04-09 19:57:59] {1147} INFO - at 1.1s,\tbest lgbm's error=0.2256,\tbest lgbm's error=0.2256\n[flaml.automl: 04-09 19:57:59] {994} INFO - iteration 8, current learner lgbm\n[flaml.automl: 04-09 19:57:59] {1147} INFO - at 1.2s,\tbest lgbm's error=0.2256,\tbest lgbm's error=0.2256\n[flaml.automl: 04-09 19:57:59] {994} INFO - iteration 9, current learner lgbm\n[flaml.automl: 04-09 19:58:00] {1147} INFO - at 1.4s,\tbest lgbm's error=0.2256,\tbest lgbm's error=0.2256\n[flaml.automl: 04-09 19:58:00] {994} INFO - iteration 10, current learner lgbm\n[flaml.automl: 04-09 19:58:00] {1147} INFO - at 1.5s,\tbest lgbm's error=0.2256,\tbest lgbm's error=0.2256\n[flaml.automl: 04-09 19:58:00] {994} INFO - iteration 11, current learner lgbm\n[flaml.automl: 04-09 19:58:00] {1147} INFO - at 2.0s,\tbest lgbm's error=0.2099,\tbest lgbm's error=0.2099\n[flaml.automl: 04-09 19:58:00] {994} INFO - iteration 12, current learner lgbm\n[flaml.automl: 04-09 19:58:01] {1147} INFO - at 2.9s,\tbest lgbm's error=0.2099,\tbest lgbm's error=0.2099\n[flaml.automl: 04-09 19:58:01] {994} INFO - iteration 13, current learner lgbm\n[flaml.automl: 04-09 19:58:01] {1147} INFO - at 3.0s,\tbest lgbm's error=0.2099,\tbest lgbm's error=0.2099\n[flaml.automl: 04-09 19:58:01] {994} INFO - iteration 14, current learner lgbm\n[flaml.automl: 04-09 19:58:03] {1147} INFO - at 4.7s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:03] {994} INFO - iteration 15, current learner lgbm\n[flaml.automl: 04-09 19:58:04] {1147} INFO - at 5.3s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:04] {994} INFO - iteration 16, current learner lgbm\n[flaml.automl: 04-09 19:58:13] {1147} INFO - at 14.6s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:13] {994} INFO - iteration 17, current learner lgbm\n[flaml.automl: 04-09 19:58:14] {1147} INFO - at 15.4s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:14] {994} INFO - iteration 18, current learner lgbm\n[flaml.automl: 04-09 19:58:18] {1147} INFO - at 20.0s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:18] {994} INFO - iteration 19, current learner lgbm\n[flaml.automl: 04-09 19:58:19] {1147} INFO - at 20.7s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:19] {994} INFO - iteration 20, current learner lgbm\n[flaml.automl: 04-09 19:58:20] {1147} INFO - at 21.9s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:20] {994} INFO - iteration 21, current learner lgbm\n[flaml.automl: 04-09 19:58:20] {1147} INFO - at 22.3s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:20] {994} INFO - iteration 22, current learner lgbm\n[flaml.automl: 04-09 19:58:25] {1147} INFO - at 27.2s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:25] {994} INFO - iteration 23, current learner lgbm\n[flaml.automl: 04-09 19:58:28] {1147} INFO - at 29.6s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:28] {994} INFO - iteration 24, current learner lgbm\n[flaml.automl: 04-09 19:58:29] {1147} INFO - at 30.7s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:29] {994} INFO - iteration 25, current learner lgbm\n[flaml.automl: 04-09 19:58:30] {1147} INFO - at 31.7s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:30] {994} INFO - iteration 26, current learner lgbm\n[flaml.automl: 04-09 19:58:32] {1147} INFO - at 34.1s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:32] {994} INFO - iteration 27, current learner lgbm\n[flaml.automl: 04-09 19:58:33] {1147} INFO - at 34.6s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:33] {994} INFO - iteration 28, current learner lgbm\n[flaml.automl: 04-09 19:58:35] {1147} INFO - at 36.4s,\tbest lgbm's error=0.1644,\tbest lgbm's error=0.1644\n[flaml.automl: 04-09 19:58:35] {994} INFO - iteration 29, current learner lgbm\n[flaml.automl: 04-09 19:58:42] {1147} INFO - at 44.3s,\tbest lgbm's error=0.1604,\tbest lgbm's error=0.1604\n[flaml.automl: 04-09 19:58:42] {994} INFO - iteration 30, current learner lgbm\n[flaml.automl: 04-09 19:58:43] {1147} INFO - at 44.9s,\tbest lgbm's error=0.1604,\tbest lgbm's error=0.1604\n[flaml.automl: 04-09 19:58:43] {994} INFO - iteration 31, current learner lgbm\n[flaml.automl: 04-09 19:59:01] {1147} INFO - at 62.5s,\tbest lgbm's error=0.1604,\tbest lgbm's error=0.1604\n[flaml.automl: 04-09 19:59:01] {994} INFO - iteration 32, current learner lgbm\n[flaml.automl: 04-09 19:59:06] {1147} INFO - at 68.0s,\tbest lgbm's error=0.1604,\tbest lgbm's error=0.1604\n[flaml.automl: 04-09 19:59:06] {994} INFO - iteration 33, current learner lgbm\n[flaml.automl: 04-09 19:59:07] {1147} INFO - at 68.8s,\tbest lgbm's error=0.1604,\tbest lgbm's error=0.1604\n[flaml.automl: 04-09 19:59:07] {994} INFO - iteration 34, current learner lgbm\n[flaml.automl: 04-09 19:59:57] {1147} INFO - at 118.9s,\tbest lgbm's error=0.1604,\tbest lgbm's error=0.1604\n[flaml.automl: 04-09 19:59:57] {1187} INFO - selected model: LGBMRegressor(colsample_bytree=0.7586723794764185,\n learning_rate=0.10418050364992694, max_bin=127,\n min_child_samples=21, n_estimators=95, num_leaves=254,\n objective='regression', reg_alpha=0.09228337080759572,\n reg_lambda=0.46673178167010676, subsample=0.9097941662911945)\n[flaml.automl: 04-09 19:59:57] {944} INFO - fit succeeded\n" + "text": [ + "[flaml.automl: 05-01 16:54:10] {890} INFO - Evaluation method: cv\n", + "[flaml.automl: 05-01 16:54:10] {606} INFO - Using RepeatedKFold\n", + "[flaml.automl: 05-01 16:54:10] {911} INFO - Minimizing error metric: 1-r2\n", + "[flaml.automl: 05-01 16:54:10] {929} INFO - List of ML learners in AutoML Run: ['lgbm']\n", + "[flaml.automl: 05-01 16:54:10] {993} INFO - iteration 0, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:10] {1141} INFO - at 0.6s,\tbest lgbm's error=0.7383,\tbest lgbm's error=0.7383\n", + "[flaml.automl: 05-01 16:54:10] {993} INFO - iteration 1, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:10] {1141} INFO - at 0.8s,\tbest lgbm's error=0.7383,\tbest lgbm's error=0.7383\n", + "[flaml.automl: 05-01 16:54:10] {993} INFO - iteration 2, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:11] {1141} INFO - at 1.2s,\tbest lgbm's error=0.5538,\tbest lgbm's error=0.5538\n", + "[flaml.automl: 05-01 16:54:11] {993} INFO - iteration 3, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:11] {1141} INFO - at 1.5s,\tbest lgbm's error=0.3888,\tbest lgbm's error=0.3888\n", + "[flaml.automl: 05-01 16:54:11] {993} INFO - iteration 4, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:11] {1141} INFO - at 1.8s,\tbest lgbm's error=0.3888,\tbest lgbm's error=0.3888\n", + "[flaml.automl: 05-01 16:54:11] {993} INFO - iteration 5, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:12] {1141} INFO - at 2.0s,\tbest lgbm's error=0.3888,\tbest lgbm's error=0.3888\n", + "[flaml.automl: 05-01 16:54:12] {993} INFO - iteration 6, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:12] {1141} INFO - at 2.4s,\tbest lgbm's error=0.3017,\tbest lgbm's error=0.3017\n", + "[flaml.automl: 05-01 16:54:12] {993} INFO - iteration 7, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:12] {1141} INFO - at 2.9s,\tbest lgbm's error=0.2633,\tbest lgbm's error=0.2633\n", + "[flaml.automl: 05-01 16:54:12] {993} INFO - iteration 8, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:13] {1141} INFO - at 3.3s,\tbest lgbm's error=0.2633,\tbest lgbm's error=0.2633\n", + "[flaml.automl: 05-01 16:54:13] {993} INFO - iteration 9, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:13] {1141} INFO - at 3.9s,\tbest lgbm's error=0.2314,\tbest lgbm's error=0.2314\n", + "[flaml.automl: 05-01 16:54:13] {993} INFO - iteration 10, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:14] {1141} INFO - at 4.3s,\tbest lgbm's error=0.2314,\tbest lgbm's error=0.2314\n", + "[flaml.automl: 05-01 16:54:14] {993} INFO - iteration 11, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:14] {1141} INFO - at 4.6s,\tbest lgbm's error=0.2314,\tbest lgbm's error=0.2314\n", + "[flaml.automl: 05-01 16:54:14] {993} INFO - iteration 12, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:15] {1141} INFO - at 5.4s,\tbest lgbm's error=0.1954,\tbest lgbm's error=0.1954\n", + "[flaml.automl: 05-01 16:54:15] {993} INFO - iteration 13, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:16] {1141} INFO - at 6.0s,\tbest lgbm's error=0.1954,\tbest lgbm's error=0.1954\n", + "[flaml.automl: 05-01 16:54:16] {993} INFO - iteration 14, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:17] {1141} INFO - at 7.0s,\tbest lgbm's error=0.1933,\tbest lgbm's error=0.1933\n", + "[flaml.automl: 05-01 16:54:17] {993} INFO - iteration 15, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:17] {1141} INFO - at 7.6s,\tbest lgbm's error=0.1933,\tbest lgbm's error=0.1933\n", + "[flaml.automl: 05-01 16:54:17] {993} INFO - iteration 16, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:19] {1141} INFO - at 9.2s,\tbest lgbm's error=0.1799,\tbest lgbm's error=0.1799\n", + "[flaml.automl: 05-01 16:54:19] {993} INFO - iteration 17, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:19] {1141} INFO - at 9.8s,\tbest lgbm's error=0.1799,\tbest lgbm's error=0.1799\n", + "[flaml.automl: 05-01 16:54:19] {993} INFO - iteration 18, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:23] {1141} INFO - at 13.1s,\tbest lgbm's error=0.1799,\tbest lgbm's error=0.1799\n", + "[flaml.automl: 05-01 16:54:23] {993} INFO - iteration 19, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:23] {1141} INFO - at 13.6s,\tbest lgbm's error=0.1799,\tbest lgbm's error=0.1799\n", + "[flaml.automl: 05-01 16:54:23] {993} INFO - iteration 20, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:31] {1141} INFO - at 21.7s,\tbest lgbm's error=0.1799,\tbest lgbm's error=0.1799\n", + "[flaml.automl: 05-01 16:54:31] {993} INFO - iteration 21, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:32] {1141} INFO - at 22.6s,\tbest lgbm's error=0.1799,\tbest lgbm's error=0.1799\n", + "[flaml.automl: 05-01 16:54:32] {993} INFO - iteration 22, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:34] {1141} INFO - at 24.5s,\tbest lgbm's error=0.1722,\tbest lgbm's error=0.1722\n", + "[flaml.automl: 05-01 16:54:34] {993} INFO - iteration 23, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:36] {1141} INFO - at 26.4s,\tbest lgbm's error=0.1714,\tbest lgbm's error=0.1714\n", + "[flaml.automl: 05-01 16:54:36] {993} INFO - iteration 24, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:38] {1141} INFO - at 28.1s,\tbest lgbm's error=0.1714,\tbest lgbm's error=0.1714\n", + "[flaml.automl: 05-01 16:54:38] {993} INFO - iteration 25, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:38] {1141} INFO - at 28.4s,\tbest lgbm's error=0.1714,\tbest lgbm's error=0.1714\n", + "[flaml.automl: 05-01 16:54:38] {993} INFO - iteration 26, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:49] {1141} INFO - at 39.6s,\tbest lgbm's error=0.1582,\tbest lgbm's error=0.1582\n", + "[flaml.automl: 05-01 16:54:49] {993} INFO - iteration 27, current learner lgbm\n", + "[flaml.automl: 05-01 16:54:54] {1141} INFO - at 44.7s,\tbest lgbm's error=0.1582,\tbest lgbm's error=0.1582\n", + "[flaml.automl: 05-01 16:54:54] {993} INFO - iteration 28, current learner lgbm\n", + "[flaml.automl: 05-01 16:55:06] {1141} INFO - at 56.8s,\tbest lgbm's error=0.1582,\tbest lgbm's error=0.1582\n", + "[flaml.automl: 05-01 16:55:06] {993} INFO - iteration 29, current learner lgbm\n", + "[flaml.automl: 05-01 16:55:07] {1141} INFO - at 57.9s,\tbest lgbm's error=0.1582,\tbest lgbm's error=0.1582\n", + "[flaml.automl: 05-01 16:55:07] {993} INFO - iteration 30, current learner lgbm\n", + "[flaml.automl: 05-01 16:56:07] {1141} INFO - at 117.4s,\tbest lgbm's error=0.1582,\tbest lgbm's error=0.1582\n", + "[flaml.automl: 05-01 16:56:07] {1187} INFO - selected model: LGBMRegressor(colsample_bytree=0.7018843176351586,\n", + " learning_rate=0.05528362885527569, max_bin=255,\n", + " min_child_samples=64, n_estimators=266, num_leaves=204,\n", + " objective='regression', reg_alpha=0.005771390107656191,\n", + " reg_lambda=62.31073135366825)\n", + "[flaml.automl: 05-01 16:56:07] {944} INFO - fit succeeded\n" + ] } ], "source": [ @@ -158,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 5, "metadata": { "slideshow": { "slide_type": "slide" @@ -169,7 +244,9 @@ { "output_type": "stream", "name": "stdout", - "text": "Best hyperparmeter config:{'n_estimators': 95.0, 'num_leaves': 254.0, 'min_child_samples': 21.0, 'learning_rate': 0.10418050364992694, 'subsample': 0.9097941662911945, 'log_max_bin': 7.0, 'colsample_bytree': 0.7586723794764185, 'reg_alpha': 0.09228337080759572, 'reg_lambda': 0.46673178167010676}\nBest r2 on validation data: 0.8396\nTraining duration of best run: 7.868 s\n" + "text": [ + "Best hyperparmeter config: {'n_estimators': 266.0, 'num_leaves': 204.0, 'min_child_samples': 64.0, 'learning_rate': 0.05528362885527569, 'subsample': 1.0, 'log_max_bin': 8.0, 'colsample_bytree': 0.7018843176351586, 'reg_alpha': 0.005771390107656191, 'reg_lambda': 62.31073135366825}\nBest r2 on validation data: 0.8418\nTraining duration of best run: 11.19 s\n" + ] } ], "source": [ @@ -181,7 +258,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 6, "metadata": { "slideshow": { "slide_type": "slide" @@ -191,10 +268,16 @@ { "output_type": "execute_result", "data": { - "text/plain": "LGBMRegressor(colsample_bytree=0.7586723794764185,\n learning_rate=0.10418050364992694, max_bin=127,\n min_child_samples=21, n_estimators=95, num_leaves=254,\n objective='regression', reg_alpha=0.09228337080759572,\n reg_lambda=0.46673178167010676, subsample=0.9097941662911945)" + "text/plain": [ + "LGBMRegressor(colsample_bytree=0.7018843176351586,\n", + " learning_rate=0.05528362885527569, max_bin=255,\n", + " min_child_samples=64, n_estimators=266, num_leaves=204,\n", + " objective='regression', reg_alpha=0.005771390107656191,\n", + " reg_lambda=62.31073135366825)" + ] }, "metadata": {}, - "execution_count": 9 + "execution_count": 6 } ], "source": [ @@ -203,7 +286,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 7, "metadata": { "slideshow": { "slide_type": "slide" @@ -219,7 +302,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 8, "metadata": { "slideshow": { "slide_type": "slide" @@ -230,7 +313,9 @@ { "output_type": "stream", "name": "stdout", - "text": "Predicted labels[150367.25556214 263353.37798151 136897.76625025 ... 190606.68038356\n 237816.02972335 263063.11183796]\nTrue labels[136900. 241300. 200700. ... 160900. 227300. 265600.]\n" + "text": [ + "Predicted labels [149389.7449446 258417.92579444 137823.39715453 ... 211944.92125371\n 246243.4022559 277524.07243136]\nTrue labels [136900. 241300. 200700. ... 160900. 227300. 265600.]\n" + ] } ], "source": [ @@ -242,7 +327,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 9, "metadata": { "slideshow": { "slide_type": "slide" @@ -253,7 +338,9 @@ { "output_type": "stream", "name": "stdout", - "text": "r2=0.8500929784828137\nmse=1981546944.5284543\nmae=29485.579651356835\n" + "text": [ + "r2 = 0.8491961402689281\nmse = 1993401806.32529\nmae = 29616.531139250474\n" + ] } ], "source": [ @@ -266,7 +353,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 10, "metadata": { "slideshow": { "slide_type": "subslide" @@ -277,7 +364,9 @@ { "output_type": "stream", "name": "stdout", - "text": "{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4.0, 'num_leaves': 4.0, 'min_child_samples': 25.0, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'log_max_bin': 10.0, 'colsample_bytree': 0.9684145930669938, 'reg_alpha': 0.001831177697321707, 'reg_lambda': 0.2790165919053839}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4.0, 'num_leaves': 4.0, 'min_child_samples': 25.0, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'log_max_bin': 10.0, 'colsample_bytree': 0.9684145930669938, 'reg_alpha': 0.001831177697321707, 'reg_lambda': 0.2790165919053839}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 20.0, 'num_leaves': 4.0, 'min_child_samples': 48.0, 'learning_rate': 1.0, 'subsample': 0.9814787163243813, 'log_max_bin': 10.0, 'colsample_bytree': 0.9534346594834143, 'reg_alpha': 0.002208534076096185, 'reg_lambda': 0.5460627024738886}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 20.0, 'num_leaves': 4.0, 'min_child_samples': 48.0, 'learning_rate': 1.0, 'subsample': 0.9814787163243813, 'log_max_bin': 10.0, 'colsample_bytree': 0.9534346594834143, 'reg_alpha': 0.002208534076096185, 'reg_lambda': 0.5460627024738886}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 11.0, 'num_leaves': 15.0, 'min_child_samples': 42.0, 'learning_rate': 0.4743416464891248, 'subsample': 0.9233328006239466, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473814}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 11.0, 'num_leaves': 15.0, 'min_child_samples': 42.0, 'learning_rate': 0.4743416464891248, 'subsample': 0.9233328006239466, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473814}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 22.0, 'num_leaves': 44.0, 'min_child_samples': 33.0, 'learning_rate': 0.7277554644304967, 'subsample': 0.8890322269681047, 'log_max_bin': 9.0, 'colsample_bytree': 0.8917187085424868, 'reg_alpha': 0.3477637978466495, 'reg_lambda': 0.24655709710146537}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 22.0, 'num_leaves': 44.0, 'min_child_samples': 33.0, 'learning_rate': 0.7277554644304967, 'subsample': 0.8890322269681047, 'log_max_bin': 9.0, 'colsample_bytree': 0.8917187085424868, 'reg_alpha': 0.3477637978466495, 'reg_lambda': 0.24655709710146537}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 60.0, 'num_leaves': 72.0, 'min_child_samples': 37.0, 'learning_rate': 0.23811059538783155, 'subsample': 1.0, 'log_max_bin': 8.0, 'colsample_bytree': 0.9162072323824675, 'reg_alpha': 0.7017839907881602, 'reg_lambda': 0.23027329389914142}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 60.0, 'num_leaves': 72.0, 'min_child_samples': 37.0, 'learning_rate': 0.23811059538783155, 'subsample': 1.0, 'log_max_bin': 8.0, 'colsample_bytree': 0.9162072323824675, 'reg_alpha': 0.7017839907881602, 'reg_lambda': 0.23027329389914142}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 95.0, 'num_leaves': 254.0, 'min_child_samples': 21.0, 'learning_rate': 0.10418050364992694, 'subsample': 0.9097941662911945, 'log_max_bin': 7.0, 'colsample_bytree': 0.7586723794764185, 'reg_alpha': 0.09228337080759572, 'reg_lambda': 0.46673178167010676}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 95.0, 'num_leaves': 254.0, 'min_child_samples': 21.0, 'learning_rate': 0.10418050364992694, 'subsample': 0.9097941662911945, 'log_max_bin': 7.0, 'colsample_bytree': 0.7586723794764185, 'reg_alpha': 0.09228337080759572, 'reg_lambda': 0.46673178167010676}}\n" + "text": [ + "{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'num_leaves': 4, 'min_child_samples': 20, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4.0, 'num_leaves': 4.0, 'min_child_samples': 12.0, 'learning_rate': 0.25912534572860507, 'subsample': 0.9266743941610592, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0013933617380144255, 'reg_lambda': 0.18096917948292954}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4.0, 'num_leaves': 4.0, 'min_child_samples': 12.0, 'learning_rate': 0.25912534572860507, 'subsample': 0.9266743941610592, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0013933617380144255, 'reg_lambda': 0.18096917948292954}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4.0, 'num_leaves': 4.0, 'min_child_samples': 24.0, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'log_max_bin': 10.0, 'colsample_bytree': 0.946138073111236, 'reg_alpha': 0.0018311776973217071, 'reg_lambda': 0.27901659190538414}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4.0, 'num_leaves': 4.0, 'min_child_samples': 24.0, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'log_max_bin': 10.0, 'colsample_bytree': 0.946138073111236, 'reg_alpha': 0.0018311776973217071, 'reg_lambda': 0.27901659190538414}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 11.0, 'num_leaves': 4.0, 'min_child_samples': 36.0, 'learning_rate': 1.0, 'subsample': 0.8894434216129232, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0013605736901132325, 'reg_lambda': 0.1222158118565165}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 11.0, 'num_leaves': 4.0, 'min_child_samples': 36.0, 'learning_rate': 1.0, 'subsample': 0.8894434216129232, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0013605736901132325, 'reg_lambda': 0.1222158118565165}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 20.0, 'num_leaves': 4.0, 'min_child_samples': 46.0, 'learning_rate': 1.0, 'subsample': 0.9814787163243813, 'log_max_bin': 9.0, 'colsample_bytree': 0.8499027725496043, 'reg_alpha': 0.0022085340760961856, 'reg_lambda': 0.546062702473889}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 20.0, 'num_leaves': 4.0, 'min_child_samples': 46.0, 'learning_rate': 1.0, 'subsample': 0.9814787163243813, 'log_max_bin': 9.0, 'colsample_bytree': 0.8499027725496043, 'reg_alpha': 0.0022085340760961856, 'reg_lambda': 0.546062702473889}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 20.0, 'num_leaves': 11.0, 'min_child_samples': 52.0, 'learning_rate': 1.0, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.7967145599266738, 'reg_alpha': 0.05680749758595097, 'reg_lambda': 2.756357095973371}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 20.0, 'num_leaves': 11.0, 'min_child_samples': 52.0, 'learning_rate': 1.0, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.7967145599266738, 'reg_alpha': 0.05680749758595097, 'reg_lambda': 2.756357095973371}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 37.0, 'num_leaves': 15.0, 'min_child_samples': 93.0, 'learning_rate': 0.6413547778096401, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.6980216487058154, 'reg_alpha': 0.020158745350617662, 'reg_lambda': 0.954042157679914}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 37.0, 'num_leaves': 15.0, 'min_child_samples': 93.0, 'learning_rate': 0.6413547778096401, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.6980216487058154, 'reg_alpha': 0.020158745350617662, 'reg_lambda': 0.954042157679914}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 107.0, 'num_leaves': 8.0, 'min_child_samples': 99.0, 'learning_rate': 0.23511987355535005, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.6531014185931541, 'reg_alpha': 0.006493597884251342, 'reg_lambda': 1.7292368007993142}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 107.0, 'num_leaves': 8.0, 'min_child_samples': 99.0, 'learning_rate': 0.23511987355535005, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.6531014185931541, 'reg_alpha': 0.006493597884251342, 'reg_lambda': 1.7292368007993142}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 186.0, 'num_leaves': 14.0, 'min_child_samples': 50.0, 'learning_rate': 0.39220715578198356, 'subsample': 1.0, 'log_max_bin': 6.0, 'colsample_bytree': 0.5689279468453852, 'reg_alpha': 0.011708252438810487, 'reg_lambda': 0.9012915451024669}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 186.0, 'num_leaves': 14.0, 'min_child_samples': 50.0, 'learning_rate': 0.39220715578198356, 'subsample': 1.0, 'log_max_bin': 6.0, 'colsample_bytree': 0.5689279468453852, 'reg_alpha': 0.011708252438810487, 'reg_lambda': 0.9012915451024669}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 201.0, 'num_leaves': 29.0, 'min_child_samples': 56.0, 'learning_rate': 0.14827217992006533, 'subsample': 0.9860465287537004, 'log_max_bin': 6.0, 'colsample_bytree': 0.6195974449388929, 'reg_alpha': 0.0009765625, 'reg_lambda': 6.847622848964946}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 201.0, 'num_leaves': 29.0, 'min_child_samples': 56.0, 'learning_rate': 0.14827217992006533, 'subsample': 0.9860465287537004, 'log_max_bin': 6.0, 'colsample_bytree': 0.6195974449388929, 'reg_alpha': 0.0009765625, 'reg_lambda': 6.847622848964946}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 76.0, 'num_leaves': 79.0, 'min_child_samples': 38.0, 'learning_rate': 0.07347198684652415, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.7057884896198299, 'reg_alpha': 0.0009765625, 'reg_lambda': 25.93865918819648}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 76.0, 'num_leaves': 79.0, 'min_child_samples': 38.0, 'learning_rate': 0.07347198684652415, 'subsample': 1.0, 'log_max_bin': 7.0, 'colsample_bytree': 0.7057884896198299, 'reg_alpha': 0.0009765625, 'reg_lambda': 25.93865918819648}}\n{'Current Learner': 'lgbm', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 266.0, 'num_leaves': 204.0, 'min_child_samples': 64.0, 'learning_rate': 0.05528362885527569, 'subsample': 1.0, 'log_max_bin': 8.0, 'colsample_bytree': 0.7018843176351586, 'reg_alpha': 0.005771390107656191, 'reg_lambda': 62.31073135366825}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 266.0, 'num_leaves': 204.0, 'min_child_samples': 64.0, 'learning_rate': 0.05528362885527569, 'subsample': 1.0, 'log_max_bin': 8.0, 'colsample_bytree': 0.7018843176351586, 'reg_alpha': 0.005771390107656191, 'reg_lambda': 62.31073135366825}}\n" + ] } ], "source": [ @@ -291,7 +380,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 11, "metadata": { "slideshow": { "slide_type": "slide" @@ -302,8 +391,8 @@ "output_type": "display_data", "data": { "text/plain": "

", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAbhElEQVR4nO3df5xWdZ338dfbEQNLRGJycQChlShMkyLNfqqrC5oJpbno3nuXbVG76bbaUtCquXp7ry27tva4qW50XbX1txFikaybv0pNQTERDBfRhMEUVJRsEoHP/nHO6MU111xzAXOua+b6vp+PxzzmOt/zPed85ijznvPrexQRmJlZunZrdAFmZtZYDgIzs8Q5CMzMEucgMDNLnIPAzCxxDgIzs8Q5CMyqkPQRSSsbXYdZkRwE1mdJekrS0Y2sISJ+HhHjilq/pEmS7pa0SdJ6SXdJOqGo7ZlV4iCwpElqaeC2TwJuBK4CRgD7AucCn9iJdUmS/z3bTvH/ONbvSNpN0kxJT0h6XtINkoaWzL9R0m8lvZT/tX1gybwrJH1P0kJJrwBH5kcefyfpkXyZ6yUNzPsfIWltyfLd9s3nf03SM5LWSfq8pJB0QIWfQcDFwAURcVlEvBQR2yLiroj4Qt7nPEn/UbLM6Hx9u+fTd0q6UNI9wO+BGZKWlG3nTEkL8s9vkvTPkp6W9Kyk70satIv/OawJOAisPzoDmAp8DNgPeBGYUzL/p8BY4G3AQ8DVZcufClwI7AX8Im87GZgMjAEOBj5bZfsV+0qaDJwFHA0cABxRZR3jgJHATVX61OIvgOlkP8v3gXGSxpbMPxW4Jv98EfAO4JC8vjayIxBLnIPA+qMvAX8fEWsj4lXgPOCkzr+UI+LyiNhUMu89kvYuWf7miLgn/wv8D3nbdyJiXUS8ANxC9suyO931PRn494hYHhG/z7fdnbfm35+p9YfuxhX59rZExEvAzcApAHkgvBNYkB+BTAfOjIgXImIT8H+Babu4fWsCDgLrj/YHfiRpo6SNwGPAVmBfSS2SLspPG70MPJUvM6xk+TUV1vnbks+/B95SZfvd9d2vbN2VttPp+fz78Cp9alG+jWvIg4DsaGB+HkqtwJ7AgyX77da83RLnILD+aA1wbEQMKfkaGBHtZL/8ppCdntkbGJ0vo5Llixpy9xmyi76dRlbpu5Ls5zixSp9XyH55d/qjCn3Kf5bbgFZJh5AFQudpoQ1AB3BgyT7bOyKqBZ4lwkFgfd0ASQNLvnYnOxd+oaT9ASS1SpqS998LeJXsL+49yU5/1MsNwGmS3iVpT+Cc7jpGNv77WcA5kk6TNDi/CP5hSXPzbg8DH5U0Kj+1NaunAiLiNbI7kWYDQ8mCgYjYBlwKfFvS2wAktUmatNM/rTUNB4H1dQvJ/pLt/DoPuARYAPynpE3AL4HD8v5XAb8B2oEV+by6iIifAt8B7gBWlWz71W763wT8GfA5YB3wLPB/yM7zExG3AdcDjwAPAj+usZRryI6IboyILSXtX++sKz9t9l9kF60tcfKLacyKIeldwKPAm8p+IZv1KT4iMOtFkj6Z36+/D/At4BaHgPV1DgKz3vVF4DngCbI7mf6qseWY9cynhszMEucjAjOzxO3e6AJ21LBhw2L06NGNLsPMrF958MEHN0RExQcI+10QjB49miVLlvTc0czMXifpN93N86khM7PEOQjMzBLnIDAzS5yDwMwscQ4CM7PE9bu7hszMUjN/aTuzF61k3cYO9hsyiBmTxjF1Qluvrd9BYGbWh81f2s6secvoeG0rAO0bO5g1bxlAr4WBg6BgRSe5mTW32YtWvh4CnTpe28rsRSsdBP1BPZLczJrbuo0dO9S+MxwEBeouyb920yNc+8DTDarKzPqTAS27sXnrti7t+w0Z1Gvb8F1DBeousSv9RzUzq2Tk0EHspu3bBg1oYcak3nu5nI8ICrTfkEG0VwiDtiGDuP6LhzegIjPrj3zXUD82Y9K47a4RQO8nuZk1v6kT2gq9ruggKFDnf7iv3fQIm7duo813DZlZH+QgKNjUCW2vXxj26SAz64t8sdjMLHEOAjOzxDkIzMwS5yAwM0tcoUEgabKklZJWSZpZYf4oSXdIWirpEUnHFVmPmZl1VVgQSGoB5gDHAuOBUySNL+t2NnBDREwApgHfLaoeMzOrrMgjgkOBVRGxOiI2A9cBU8r6BDA4/7w3sK7AeszMrIIinyNoA9aUTK8FDivrcx7wn5LOAN4MHF1pRZKmA9MBRo0a1euFVuLho80sFY2+WHwKcEVEjACOA34gqUtNETE3IiZGxMTW1tbCi+ocPrp9YwfBG8NHz1/aXvi2zczqrcgjgnZgZMn0iLyt1F8CkwEi4j5JA4FhwHMF1tWj3h4+esUzLzN++OCeO5qZNUCRRwSLgbGSxkjag+xi8IKyPk8DfwIg6V3AQGB9gTXVpLeHjx4/fDBTDvFpJTPrmwo7IoiILZJOBxYBLcDlEbFc0vnAkohYAHwVuFTSmWQXjj8bEVFUTbXy8NFmlpJCB52LiIXAwrK2c0s+rwA+VGQNO8PDR5tZSpIdfbTaXUEePtrMUpJkENTyUnkPH21mqUgyCGq9K8h3+5hZChr9HEFD1HpXkO/2MbMUJHlE4LuCzMzekOQRwYxJ4xg0oGW7Nt8VZGapSvKIwHcFmZm9IckgAN8VZGbWKclTQ2Zm9gYHgZlZ4hwEZmaJcxCYmSXOQWBmlrjk7hoqHWxuQMtujBw6qNElmZk1VFJHBOWvoNy8dRtPbnjFr6A0s6QlFQSVBpvbFlm7mVmqkgqC7gab667dzCwFSQXBfkMqXw/ort3MLAVJBYEHmzMz6yqpu4Y82JyZWVdJBQF4sDkzs3JJnRoyM7OuHARmZolzEJiZJc5BYGaWuEKDQNJkSSslrZI0s8L8b0t6OP96XNLGIusxM7OuCrtrSFILMAc4BlgLLJa0ICJWdPaJiDNL+p8BTCiqHjMzq6zII4JDgVURsToiNgPXAVOq9D8FuLbAeszMrIIig6ANWFMyvTZv60LS/sAY4PZu5k+XtETSkvXr1/d6oWZmKesrF4unATdFxNZKMyNibkRMjIiJra2tdS7NzKy5FRkE7cDIkukReVsl0/BpITOzhigyCBYDYyWNkbQH2S/7BeWdJL0T2Ae4r8BazMysG4UFQURsAU4HFgGPATdExHJJ50s6oaTrNOC6iIiiajEzs+4VOuhcRCwEFpa1nVs2fV6RNZiZWXV95WKxmZk1iIPAzCxxDgIzs8Q5CMzMEucgMDNLnIPAzCxxDgIzs8Q5CMzMEucgMDNLnIPAzCxxDgIzs8Q5CMzMEucgMDNLnIPAzCxxDgIzs8Q5CMzMElc1CCQNlvTHFdoPLq4kMzOrp26DQNLJwK+BH0paLun9JbOvKLowMzOrj2pHBN8A3hcRhwCnAT+Q9Ml8ngqvzMzM6qLaO4tbIuIZgIh4QNKRwI8ljQT8onkzsyZR7YhgU+n1gTwUjgCmAAcWXJeZmdVJtSOCv6LsFFBEbJI0GTi50KoKMH9pO7MXrWTdxg4GtOzGyKGDGl2SmVmf0O0RQUT8CnhS0h1l7a9FxNWFV9aL5i9tZ9a8ZbRv7CCAzVu38eSGV5i/tL3RpZmZNVzV20cjYiuwTdLedaqnELMXraTjta3btW2LrN3MLHXVTg11+h2wTNJtwCudjRHxN4VV1cvWbezYoXYzs5TU8mTxPOAc4G7gwZKvHkmaLGmlpFWSZnbT52RJK/JnFa6ptfAdsd+QytcDums3M0tJj0cEEXHlzqxYUgswBzgGWAsslrQgIlaU9BkLzAI+FBEvSnrbzmyrJzMmjWPWvGXbnR4aNKCFGZPGFbE5M7N+pcixhg4FVkXE6ojYDFxHdutpqS8AcyLiRYCIeK6IQqZOaOMfP3UQe7RkP27bkEH846cOYuqEtiI2Z2bWr9RyjWBntQFrSqbXAoeV9XkHgKR7gBbgvIi4tXxFkqYD0wFGjRq1U8VMndDGtQ88DcD1Xzx8p9ZhZtaMGj366O7AWLIH1U4BLpU0pLxTRMyNiIkRMbG1tbXOJZqZNbcejwgkvQOYAexf2j8ijuph0XZgZMn0iLyt1Frg/oh4jeyZhcfJgmFxz6WbmVlvqOXU0I3A94FLga099C21GBgraQxZAEwDTi3rM5/sSODfJQ0jO1W0ege2YWZmu6iWINgSEd/b0RVHxBZJpwOLyM7/Xx4RyyWdDyyJiAX5vD+VtIIsZGZExPM7ui0zM9t5tQTBLZL+GvgR8GpnY0S80NOCEbEQWFjWdm7J5wDOyr/MzKwBagmCz+TfZ5S0BfD23i/HzMzqrZYHysbUoxAzM2uMWu4aGkA2JPVH86Y7gf+f3+ljZmb9XC2nhr4HDAC+m0//Rd72+aKKMjOz+qklCN4fEe8pmb5d0q+KKsjMzOqrlieLt5a+slLS29mx5wnMzKwPq+WIYAZwh6TVZK+u3B84rdCqzMysbmq5a+hn+XDRnWM2r4yIV6stY2Zm/Ue3QSDpqIi4XdKnymYdIImImFdwbWZmVgfVjgg+BtwOfKLCvCB7c5mZmfVz3QZBRHwz/3h+RDxZOi8fSM7MzJpALXcN/bBC2029XYiZmTVGtWsE7wQOBPYuu04wGBhYdGFmZlYf1a4RjAOOB4aw/XWCTWTvGjYzsyZQ7RrBzcDNkg6PiPvqWJOZmdVRLQ+ULZX0ZbLTRK+fEoqIzxVWlZmZ1U0tF4t/APwRMAm4i+zdw5uKLMrMzOqnliA4ICLOAV6JiCuBjwOHFVuWmZnVSy1B0PnegY2S3g3sDbytuJLMzKyearlGMFfSPsA5wALgLcC51RcxM7P+opZB5y7LP96F31NsZtZ0qj1Qdla1BSPi4t4vx8zM6q3aEcFe+fdxwPvJTgtB9nDZA0UWZWZm9VPtgbJ/AJB0N/DeiNiUT58H/KQu1ZmZWeFquWtoX2BzyfTmvM3MzJpALUFwFfCApPPyo4H7gStqWbmkyZJWSlolaWaF+Z+VtF7Sw/nX53ekeDMz23W13DV0oaSfAh/Jm06LiKU9LSepBZgDHAOsBRZLWhARK8q6Xh8Rp+9g3WZm1kuq3TU0OCJeljQUeCr/6pw3NCJe6GHdhwKrImJ1vsx1wBSgPAjMzKyBqh0RXEM2DPWDZK+m7KR8uqdnCtqANSXTa6k8NMWJkj4KPA6cGRFryjtImg5MBxg1alQPmzUzsx3R7TWCiDg+/z4mIt5e8jUmInrrwbJbgNERcTBwG3BlN7XMjYiJETGxtbW1lzZtZmZQ/dTQe6stGBEP9bDudmBkyfSIvK10Hc+XTF4G/FMP6zQzs15W7dTQv1SZF8BRPax7MTA2f9F9OzANOLW0g6ThEfFMPnkC8FgP6zQzs15W7YGyI3dlxRGxRdLpwCKgBbg8IpZLOh9YEhELgL+RdAKwBXgB+OyubNPMzHZcLaOPkg8/PZ7t31B2VU/LRcRCYGFZ27kln2cBs2ot1szMel+PQSDpm8ARZEGwEDgW+AXZg2ZmZtbP1fJk8UnAnwC/jYjTgPeQvZzGzMyaQC1B0BER24AtkgYDz7H93UBmZtaP1XKNYImkIcClZA+X/Q64r9CqzMysbqo9RzAHuCYi/jpv+r6kW4HBEfFIXaozM7PCVTsieBz4Z0nDgRuAa2sZbM7MzPqXakNMXBIRhwMfA54HLpf0a0nflPSOulVoZmaF6vFicUT8JiK+FRETgFOAqfgJYDOzptFjEEjaXdInJF0N/BRYCXyq8MrMzKwuql0sPobsCOA4spfVXwdMj4hX6lSbmZnVQbWLxbPI3knw1Yh4sU71mJlZnVUbdK6n0UXNzKwJ1PJksZmZNTEHgZlZ4hwEZmaJcxCYmSXOQWBmljgHgZlZ4hwEZmaJcxCYmSXOQWBmljgHgZlZ4hwEZmaJcxCYmSXOQWBmlrhCg0DSZEkrJa2SNLNKvxMlhaSJRdZjZmZdFRYEklqAOcCxwHjgFEnjK/TbC/gKcH9RtZiZWfeKPCI4FFgVEasjYjPZG86mVOh3AfAt4A8F1mJmZt0oMgjagDUl02vzttdJei8wMiJ+UmAdZmZWRcMuFkvaDbgY+GoNfadLWiJpyfr164svzswsIUUGQTswsmR6RN7WaS/g3cCdkp4CPgAsqHTBOCLmRsTEiJjY2tpaYMlmZukpMggWA2MljZG0BzANWNA5MyJeiohhETE6IkYDvwROiIglBdZkZmZlCguCiNgCnA4sAh4DboiI5ZLOl3RCUds1M7Mds3uRK4+IhcDCsrZzu+l7RJG1mJlZZX6y2MwscQ4CM7PEOQjMzBLnIDAzS5yDwMwscQ4CM7PEOQjMzBLnIDAzS5yDwMwscQ4CM7PEOQjMzBLnIDAzS5yDwMwscQ4CM7PEOQjMzBLnIDAzS5yDwMwscQ4CM7PEOQjMzBLnIDAzS5yDwMwscQ4CM7PEOQjMzBLnIDAzS5yDwMwscQ4CM7PEFRoEkiZLWilplaSZFeZ/SdIySQ9L+oWk8UXWY2ZmXRUWBJJagDnAscB44JQKv+iviYiDIuIQ4J+Ai4uqx8zMKivyiOBQYFVErI6IzcB1wJTSDhHxcsnkm4EosB4zM6tg9wLX3QasKZleCxxW3knSl4GzgD2AoyqtSNJ0YDrAqFGjer1QM7OUNfxicUTMiYg/Br4OnN1Nn7kRMTEiJra2tta3QDOzJldkELQDI0umR+Rt3bkOmFpgPWZmVkGRQbAYGCtpjKQ9gGnAgtIOksaWTH4c+O8C6zEzswoKu0YQEVsknQ4sAlqAyyNiuaTzgSURsQA4XdLRwGvAi8BniqrHzMwqK/JiMRGxEFhY1nZuyeevFLl9MzPrWcMvFpuZWWM5CMzMEucgMDNLnIPAzCxxSQTB/KXtfOii27n/yRdY+vRG5i+t9jiDmVlaCr1rqC+Yv7SdWfOW0fHaVgA2b93GrHnLAJg6oa2RpZmZ9QlNf0Qwe9HK10OgU8drW5m9aGWDKjIz61uaPgjWbezYoXYzs9Q0fRDsN2TQDrWbmaWm6YNgxqRxDBrQsl3boAEtzJg0rkEVmZn1LU1/sbjzgvDsRStZt7GD/YYMYsakcb5QbGaWa/oggCwM/IvfzKyypj81ZGZm1TkIzMwS5yAwM0ucg8DMLHEOAjOzxCkiGl3DDpG0HvjNTiw6DNjQy+X0d94nXXmfdOV90lV/3Cf7R0RrpRn9Lgh2lqQlETGx0XX0Jd4nXXmfdOV90lWz7ROfGjIzS5yDwMwscSkFwdxGF9AHeZ905X3SlfdJV021T5K5RmBmZpWldERgZmYVOAjMzBKXRBBImixppaRVkmY2up5GkHS5pOckPVrSNlTSbZL+O/++TyNrrDdJIyXdIWmFpOWSvpK3J7tfJA2U9ICkX+X75B/y9jGS7s//DV0vaY9G11pPklokLZX043y6qfZH0weBpBZgDnAsMB44RdL4xlbVEFcAk8vaZgI/i4ixwM/y6ZRsAb4aEeOBDwBfzv/fSHm/vAocFRHvAQ4BJkv6APAt4NsRcQDwIvCXDayxEb4CPFYy3VT7o+mDADgUWBURqyNiM3AdMKXBNdVdRNwNvFDWPAW4Mv98JTC1rkU1WEQ8ExEP5Z83kf1DbyPh/RKZ3+WTA/KvAI4Cbsrbk9onkkYAHwcuy6dFk+2PFIKgDVhTMr02bzPYNyKeyT//Fti3kcU0kqTRwATgfhLfL/lpkIeB54DbgCeAjRGxJe+S2r+hfwW+BmzLp99Kk+2PFILAahDZfcRJ3kss6S3AD4G/jYiXS+eluF8iYmtEHAKMIDuifmeDS2oYSccDz0XEg42upUgpvKqyHRhZMj0ibzN4VtLwiHhG0nCyvwCTImkAWQhcHRHz8ubk9wtARGyUdAdwODBE0u75X8Ep/Rv6EHCCpOOAgcBg4BKabH+kcESwGBibX+XfA5gGLGhwTX3FAuAz+efPADc3sJa6y8/1/hvwWERcXDIr2f0iqVXSkPzzIOAYsmsndwAn5d2S2ScRMSsiRkTEaLLfHbdHxJ/TZPsjiSeL8zT/V6AFuDwiLmxwSXUn6VrgCLLhc58FvgnMB24ARpEN7X1yRJRfUG5akj4M/BxYxhvnf79Bdp0gyf0i6WCyi58tZH8o3hAR50t6O9mNFkOBpcD/iohXG1dp/Uk6Avi7iDi+2fZHEkFgZmbdS+HUkJmZVeEgMDNLnIPAzCxxDgIzs8Q5CMzMEucgsD5F0rcl/W3J9CJJl5VM/4uks6osf4Wkk/LPd0rq8oJxSQMkXZSPLvqQpPskHZvPe0rSsJ2o+/XtdjN/jqSH85FOO/LPD0s6SdLCznv3e5Ok4Z2jZXYzfw9Jd0tK4cFSq8JBYH3NPcAHASTtRvbcw4El8z8I3LuL27gAGA68OyLeSzZg2F67uM6qIuLL+bANxwFPRMQh+ddNEXFcRGwsYLNnAZdWqWkz2eiqf1bAtq0fcRBYX3Mv2ZAGkAXAo8AmSftIehPwLuAhSedKWizpUUlz86eEeyRpT+ALwBmdDwBFxLMRcUOFvmfl63+07Cjlf0t6JB+z/wcVlrsgP0JoqbGmpyQNkzRa0q/zZR+XdLWkoyXdkx+9HJr3f7Oy90s8kI+R391ouicCt+bLHJj3fzivfWzeZz7w57XUac3Lh4TWp0TEOklbJI0i++v/PrKRHQ8HXgKWRcRmSf8vIs4HyH8ZHw/cUsMmDgCeLh9crpyk9wGnAYcBAu6XdBewGTgb+GBEbJA0tGy52WRHF6fFzj2teQDwaeBzZMOjnAp8GDiB7KnnqcDfkw118Ln8lNIDkv4rIl4pqWMM8GLJ065fAi6JiKvzoVY6Q+pR4P07Uac1ER8RWF90L1kIdAbBfSXT9+R9jlT2hqhlZGPDH1hpRbvgw8CPIuKVfHz+ecBH8m3dGBEbAMqGnjgH2DsivrSTIQDwZEQsi4htwHKyF+QE2TAYo/M+fwrMzIeKvpNsMLRRZesZDqwvmb4P+IakrwP7R0RHXv9WYLOkQk+NWd/mILC+qPM6wUFkf7H+kuyI4IPAvZIGAt8FToqIg8jOgw+scd2rgFGSBvd61dlf8O8rP0rYQaXj1Wwrmd7GG0fwAk4suc4wKiJK354F0EHJPomIa8iOKjqAhZKOKun7JuAPu1Cz9XMOAuuL7iU71fNCPjb+C8AQsjC4lzd+wW3I3yXQ7d065SLi92Qjjl6SnyLpHHHz02Vdfw5MlbSnpDcDn8zbbgc+Lemt+bKlv/RvBS4CflLwX9iLgDM6r4tImlChz+O8cQRBPkja6oj4DtlImQfn7W8FNkTEawXWa32cg8D6omVkdwv9sqztpYjYkN9hcynZ0cIisr/Ed8TZZKdNVkh6FPgxUP5CmofI3vP8ANlopJdFxNKIWA5cCNwl6VfAxWXL3ZjXtiAfxrkIF5C9QvIRScvz6e3k1wuekHRA3nQy8Gh+OundwFV5+5HATwqq0/oJjz5q1qQkfRJ4X0ScXaXPPGBmRDxev8qsr/FdQ2ZNKiJ+1HkKq5L81Nh8h4D5iMDMLHG+RmBmljgHgZlZ4hwEZmaJcxCYmSXOQWBmlrj/AXNBBkws2oVMAAAAAElFTkSuQmCC\n" + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAeaElEQVR4nO3dfbxVZZ338c/XI8gpxZNBDRxAMInCLLCTjtmUOhloJWRm6LzmLpvCmmyabDDowXFsvLWY7LbXzdSQt2mNz4RERdGDZpNagKI8GYZkwoESU9TsJE+/+4+1Di63+2z2OZy19z57fd+v136dva51rbV+Z8HZv31d11rXUkRgZmbFdUC9AzAzs/pyIjAzKzgnAjOzgnMiMDMrOCcCM7OCcyIwMys4JwKzCiT9jaT19Y7DLE9OBNawJD0s6a31jCEi/iciJuS1f0lTJP1c0tOStkm6Q9LpeR3PrBwnAis0SS11PPaZwC3AN4FRwMuBi4B39mFfkuS/Z+sT/8exAUfSAZJmS3pI0h8l3SzpsMz6WyT9XtKT6bftozLrrpH0VUlLJD0DnJS2PP5F0qp0m5skDUnrnyhpc2b7Huum6y+UtFXSFkkflBSSjizzOwi4Avh8RFwVEU9GxJ6IuCMiPpTWuVjSf2e2GZvu78B0+WeSLpV0J/BnYJakFSXH+YSkxen7gyT9h6RHJP1B0tckte7nP4c1AScCG4g+BkwH3gKMBJ4A5mXW/wAYD7wMuBe4rmT7c4BLgUOAX6RlZwFTgXHAa4H3Vzh+2bqSpgIXAG8FjgROrLCPCcBoYEGFOtX4e2Amye/yNWCCpPGZ9ecA16fvLwdeCUxK42snaYFYwTkR2ED0YeAzEbE5Ip4FLgbO7P6mHBFXR8TTmXWvk3RoZvvvRMSd6Tfwv6RlX4mILRHxOPBdkg/LnvRU9yzgGxGxNiL+nB67Jy9Nf26t9pfuwTXp8XZFxJPAd4CzAdKE8CpgcdoCmQl8IiIej4ingf8NzNjP41sTcCKwgehw4FZJ2yVtBx4AdgMvl9Qi6fK02+gp4OF0m2GZ7TeV2efvM+//DBxc4fg91R1Zsu9yx+n2x/TniAp1qlF6jOtJEwFJa2BRmpSGAy8C7smctx+m5VZwTgQ2EG0CTo2ItsxrSER0knz4TSPpnjkUGJtuo8z2eU25u5Vk0Lfb6Ap115P8Hu+uUOcZkg/vbn9Vpk7p7/JjYLikSSQJobtb6DGgCzgqc84OjYhKCc8KwonAGt0gSUMyrwNJ+sIvlXQ4gKThkqal9Q8BniX5xv0iku6PWrkZOFfSqyW9CPhcTxUjmf/9AuBzks6VNDQdBH+TpPlptfuAN0sak3ZtzdlXABGxk+RKpLnAYSSJgYjYA3wd+LKklwFIapc0pc+/rTUNJwJrdEtIvsl2vy4GrgQWAz+S9DTwS+C4tP43gd8BncC6dF1NRMQPgK8AtwMbMsd+tof6C4D3Ah8AtgB/AP6dpJ+fiPgxcBOwCrgH+F6VoVxP0iK6JSJ2Zco/1R1X2m32E5JBays4+cE0ZvmQ9GpgDXBQyQeyWUNxi8CsH0l6V3q9/kuALwDfdRKwRudEYNa/zgMeBR4iuZLpI/UNx2zf3DVkZlZwbhGYmRXcgfUOoLeGDRsWY8eOrXcYZmYDyj333PNYRJS9gXDAJYKxY8eyYsWKfVc0M7O9JP2up3XuGjIzKzgnAjOzgnMiMDMrOCcCM7OCcyIwMyu4AXfVkJlZ0Sxa2cncpevZsr2LkW2tzJoygemT2/tt/04EZmYNbNHKTuYsXE3Xzt0AdG7vYs7C1QD9lgzcNWRm1sDmLl2/Nwl069q5m7lL1/fbMZwIzMwa2JbtXb0q7wsnAjOzBjayrbVX5X3hMQKzJtLXQcW8ByOt72ZNmfC8MQKA1kEtzJrSfw+XcyIwaxJ9HVSsxWCk9V33v8GFC1axY/ce2nNI1APueQQdHR3hSefMXuiEy2+js0y/8eCWA5g8pq3H7VY+sp0du/f0ejurrXVbn2LiiKHcdN7xfdpe0j0R0VFunVsEBeMugObV0+BhuQ/5atbvazurrYkjhjJtUj5/q04EBeIugOY2sq21bIugva214rfInloS+9rOmocTQYH0dD3yhQtWccOyR+oUlfWXIYMO4ADBnkxvbzWDirUYjLTG5kRQIH3tOrCBYdjBBwGw6fGuXg0qdq93l2FxOREMIPvbv9/XrgNrftMnt/uDv8B8Q1kdLVrZyQmX38a42d/nhMtvY9HKzop15yxcTef2LoLn+vcrbVNq1pQJtA5qeV6ZuwDMLNcWgaSpwJVAC3BVRFxesn4McC3QltaZHRFL8oypUfR24La/+vdHtg1h47ZnCMjlemQzG3hySwSSWoB5wCnAZmC5pMURsS5T7bPAzRHxVUkTgSXA2LxiaiS9/WAv16UDve/fH3bwQQw7+CCmTWrnnOPG9GpbM2tOebYIjgU2RMRGAEk3AtOAbCIIYGj6/lBgS47xNJTeDtwObjmg7Dr375vZ/sozEbQDmzLLm4HjSupcDPxI0seAFwNvLbcjSTOBmQBjxjTHt9jeDtyWdiWB+/fNrH/Ue7D4bOCaiBgFnAZ8S9ILYoqI+RHREREdw4cPr3mQeejtwO30ye1cdsbRtLe1IpKEcdkZR7t/38z2W54tgk5gdGZ5VFqW9Q/AVICIuFvSEGAY8GiOcTWEvkwk5Uv8zCwPeSaC5cB4SeNIEsAM4JySOo8AfwtcI+nVwBBgW44xNZTpk9v3Dgy7n9/M6iW3rqGI2AWcDywFHiC5OmitpEsknZ5W+yTwIUn3AzcA74+BNh2qmdkAl+t9BOk9AUtKyi7KvF8HnJBnDGZmVlm9B4vNzKzOnAjMzArOk871Mz/4xcwGGieCfuQHv5jZQORE0I/6MjFc93NIzczqxWME/agvD37J8zmkZmbVcIugH/nBL2Y2ELlF0I/84BczG4jcIuhHfZk/yMys3pwI+pnnDzKzgcZdQ2ZmBedEYGZWcE4EZmYF5zGCXvIUEmbWbJwIesFTSJhZM3Ii6IVqp5DwtBFmNpB4jKAXqp1CwtNGmNlA4hZBL3gKCTNrRm4R9IKnkDCzZuQWQS94Cgkza0ZOBL3kKSTMrNm4a8jMrOCcCMzMCs6JwMys4JwIzMwKLtdEIGmqpPWSNkiaXWb9lyXdl74elLQ9z3jMzOyFcrtqSFILMA84BdgMLJe0OCLWddeJiE9k6n8MmJxXPH1VbpI5M7NmkmeL4FhgQ0RsjIgdwI3AtAr1zwZuyDGeXuueZK5zexfBc5PMPfanZ+sdmplZv8kzEbQDmzLLm9OyF5B0ODAOuC3HeHqtp0nmNm57pk4RmZn1v0YZLJ4BLIiI3eVWSpopaYWkFdu2batZUD1NMhfgSeXMrGnkmQg6gdGZ5VFpWTkzqNAtFBHzI6IjIjqGDx/ejyFWNrKttWx5e1sr5xw3pmZxmJnlKc9EsBwYL2mcpMEkH/aLSytJehXwEuDuHGPpE08yZ2ZFkFsiiIhdwPnAUuAB4OaIWCvpEkmnZ6rOAG6MiMgrlr6aPrmdy844msEtyWlqb2vlsjOO9iRzZtZU1ICfvxV1dHTEihUranrM9/5X0ljxJHNmNlBJuiciOsqta5TBYjMzqxMnAjOzgnMiMDMrOCcCM7OCcyIwMys4JwIzs4JzIjAzKzgnAjOzgnMiMDMrOCcCM7OCcyIwMys4JwIzs4JzIjAzKzgnAjOzgnMiMDMrOCcCM7OCq5gIJA2V9Ioy5a/NLyQzM6ulHhOBpLOAXwPflrRW0hsyq6/JOzAzM6uNSi2CTwOvj4hJwLnAtyS9K12n3CMzM7OaOLDCupaI2AoQEcsknQR8T9JoYGA96NjMzHpUqUXwdHZ8IE0KJwLTgKNyjsvMzGqkUovgI5R0AUXE05KmAmflGpWZmdVMj4kgIu6X1CLp9og4KVO+E7iuJtHVyKKVncxdup4t27sY2dbKrCkTmD65vd5hmZnVRKUWARGxW9IeSYdGxJO1CqqWFq3sZM7C1XTt3A1A5/Yu5ixcDeBkYGaFUDERpP4ErJb0Y+CZ7sKI+KfcoqqhuUvX700C3bp27ubCBau4YdkjAKzb+hQTRwytR3hmZrmrJhEsTF+9lo4nXAm0AFdFxOVl6pwFXExyJdL9EXFOX47VV1u2d5Ut37F7z973E0cMZdoktw7MrDntMxFExLV92bGkFmAecAqwGVguaXFErMvUGQ/MAU6IiCckvawvx9ofI9ta6SyTDNrbWrnpvONrHY6ZWc3lOdfQscCGiNgYETuAG0kuPc36EDAvIp4AiIhHc4ynrFlTJtA6qOV5Za2DWpg1ZUKtQzEzq4s8E0E7sCmzvDkty3ol8EpJd0r6ZdqVVFPTJ7dz2RlHM7glORXtba1cdsbRHig2s8KoZowg7+OPJ7lRbRTwc0lHR8T2bCVJM4GZAGPGjOn3IKZPbt87MOzuIDMrmn0mAkmvBGYBh2frR8TJ+9i0ExidWR6VlmVtBn6V3pvwW0kPkiSG5dlKETEfmA/Q0dHh6S3MzPpRNS2CW4CvAV8Hdu+jbtZyYLykcSQJYAZQekXQIuBs4BuShpF0FW3sxTHMzGw/VZMIdkXEV3u744jYJel8YCnJ5aNXR8RaSZcAKyJicbrubZLWkSSZWRHxx94ey8zM+q6aRPBdSf8I3Ao8210YEY/va8OIWAIsKSm7KPM+gAvSl5mZ1UE1ieB96c9ZmbIAjuj/cMzMrNaquaFsXC0CMTOz+qjmqqFBJFNSvzkt+hnwX+mVPmZmNsBV0zX0VWAQ8J/p8t+nZR/MKygzM6udahLBGyLidZnl2yTdn1dAZmZWW9VMMbE7+8hKSUfQu/sJzMysgVXTIpgF3C5pI8mjKw8Hzs01KjMzq5lqrhr6aTpddPd0nOsj4tlK25iZ2cDRYyKQdHJE3CbpjJJVR0oiIvr0sBozM2sslVoEbwFuA95ZZl3Qx6eWmZlZY+kxEUTEv6ZvL4mI32bXpRPJmZlZE6jmqqFvlylb0N+BmJlZfVQaI3gVcBRwaMk4wVBgSN6BmZlZbVQaI5gAvANo4/njBE+TPGvYzMyaQKUxgu8A35F0fETcXcOYzMyshqq5oWylpI+SdBPt7RKKiA/kFpWZmdVMNYPF3wL+CpgC3EHy7OGn8wzKzMxqp5pEcGREfA54JiKuBd4OHJdvWGZmVivVJILu5w5sl/Qa4FDgZfmFZGZmtVTNGMF8SS8BPgcsBg4GLqq8iZmZDRTVTDp3Vfr2DvycYjOzplPphrILKm0YEVf0fzhmZlZrlVoEh6Q/JwBvIOkWguTmsmV5BmVmZrVT6YayfwOQ9HPgmIh4Ol2+GPh+TaIzM7PcVXPV0MuBHZnlHWmZmZk1gWquGvomsEzSrenydOCa3CIyM7Oa2meLICIuJXlG8RPp69yIuKyanUuaKmm9pA2SZpdZ/35J2yTdl74+2NtfwMzM9k+lq4aGRsRTkg4DHk5f3esOi4jHK+1YUgswDzgF2Awsl7Q4ItaVVL0pIs7vY/xmZrafKnUNXU8yDfU9JI+m7KZ0eV/3FBwLbIiIjQCSbgSmAaWJwMzM6qjSVUPvSH/29bGU7cCmzPJmys9R9G5JbwYeBD4REZtKK0iaCcwEGDNmTB/DMTOzcip1DR1TacOIuLcfjv9d4IaIeFbSecC1wMlljjUfmA/Q0dERpevNzKzvKnUNfanCuqDMB3aJTmB0ZnlUWvbcTiL+mFm8CvjiPvZpZmb9rFLX0En7ue/lwHhJ40gSwAzgnGwFSSMiYmu6eDrwwH4e08zMeqma+whIp5+eyPOfUPbNSttExC5J5wNLgRbg6ohYK+kSYEVELAb+SdLpwC7gceD9ffotzMysz/aZCCT9K3AiSSJYApwK/ILkRrOKImJJuk227KLM+znAnF5FbGZm/aqaKSbOBP4W+H1EnAu8juThNGZm1gSqSQRdEbEH2CVpKPAozx8ENjOzAayaMYIVktqAr5PcXPYn4O5cozIzs5qpdB/BPOD6iPjHtOhrkn4IDI2IVTWJzszMclepRfAg8B+SRgA3k9z4tbI2YZmZWa30OEYQEVdGxPHAW4A/AldL+rWkf5X0yppFaGZmuapmGurfRcQXImIycDbJ8wh845eZWZPYZyKQdKCkd0q6DvgBsB44I/fIzMysJioNFp9C0gI4jeRh9TcCMyPimRrFZmZmNVBpsHgOyTMJPhkRT9QonppZtLKTuUvXs2V7FyPbWhky6ACGHXxQvcMyM6u5SpPO7Wt20QFr0cpO5ixcTdfO3QB0bu/iANU5KDOzOqnmzuKmM3fp+r1JoNuegE2Pd9UpIjOz+ilkItiyvfwH/o7de2ociZlZ/RUyEYxsay1b3t5DuZlZMytkIpg1ZQKtg1qeV9Y6qIVZUybUKSIzs/qp6sE0zWb65HYALlywih2799De1sqsKRP2lpuZFUkhEwEkyeCGZY8AcNN5x9c5GjOz+ilk15CZmT3HicDMrOCcCMzMCs6JwMys4JwIzMwKzonAzKzgnAjMzArOicDMrOByTQSSpkpaL2mDpNkV6r1bUkjqyDMeMzN7odwSgaQWYB5wKjAROFvSxDL1DgE+Dvwqr1jMzKxnebYIjgU2RMTGiNhB8qjLaWXqfR74AvCXHGMxM7Me5JkI2oFNmeXNadleko4BRkfE9yvtSNJMSSskrdi2bVv/R2pmVmB1GyyWdABwBfDJfdWNiPkR0RERHcOHD88/ODOzAskzEXQCozPLo9KybocArwF+Julh4K+BxR4wNjOrrTwTwXJgvKRxkgYDM4DF3Ssj4smIGBYRYyNiLPBL4PSIWJFjTGZmViK3RBARu4DzgaXAA8DNEbFW0iWSTs/ruGZm1ju5PpgmIpYAS0rKLuqh7ol5xmJmZuX5zmIzs4JzIjAzKzgnAjOzgnMiMDMrOCcCM7OCcyIwMys4JwIzs4JzIjAzKzgnAjOzgnMiMDMrOCcCM7OCcyIwMys4JwIzs4JzIjAzKzgnAjOzgnMiMDMrOCcCM7OCcyIwMys4JwIzs4JzIjAzKzgnAjOzgnMiMDMrOCcCM7OCcyIwMys4JwIzs4LLNRFImippvaQNkmaXWf9hSasl3SfpF5Im5hmPmZm9UG6JQFILMA84FZgInF3mg/76iDg6IiYBXwSuyCseMzMrL88WwbHAhojYGBE7gBuBadkKEfFUZvHFQOQYj5mZlXFgjvtuBzZlljcDx5VWkvRR4AJgMHByuR1JmgnMBBgzZky/B2pmVmR1HyyOiHkR8QrgU8Bne6gzPyI6IqJj+PDhtQ3QzKzJ5ZkIOoHRmeVRaVlPbgSm5xiPmZmVkWciWA6MlzRO0mBgBrA4W0HS+Mzi24Hf5BiPmZmVkdsYQUTsknQ+sBRoAa6OiLWSLgFWRMRi4HxJbwV2Ak8A78srHjMzKy/PwWIiYgmwpKTsosz7j+d5fDMz27e6DxabmVl9ORGYmRWcE4GZWcE5EZiZFVyug8WNYtHKTuYuXc+W7V2MbGtl1pQJTJ/cXu+wzMwaQtMngkUrO5mzcDVdO3cD0Lm9izkLV9c5KjOzxtH0XUNzl67fmwS6de3czYULVrFu61M9bGVmVhxNnwi2bO8qW75j9x4mjhjKtEnuIjKzYmv6rqGRba10lkkG7W2t3HTe8XWIyMyssTR9i2DWlAm0Dmp5XlnroBZmTZlQp4jMzBpL07cIuq8O8lVDZmblNX0igCQZ+IPfzKy8pu8aMjOzypwIzMwKzonAzKzgnAjMzArOicDMrOAUEfWOoVckbQN+V2X1YcBjOYazPxxb3zi2vnFsvdeocUHfYjs8IoaXWzHgEkFvSFoRER31jqMcx9Y3jq1vHFvvNWpc0P+xuWvIzKzgnAjMzAqu2RPB/HoHUIFj6xvH1jeOrfcaNS7o59iaeozAzMz2rdlbBGZmtg9OBGZmBdeUiUDSVEnrJW2QNLve8WRJeljSakn3SVrRAPFcLelRSWsyZYdJ+rGk36Q/X9IgcV0sqTM9d/dJOq3WcaVxjJZ0u6R1ktZK+nha3gjnrafY6n7uJA2RtEzS/Wls/5aWj5P0q/Tv9SZJgxsotmsk/TZz3ibVOrZMjC2SVkr6Xrrcf+ctIprqBbQADwFHAIOB+4GJ9Y4rE9/DwLB6x5GJ583AMcCaTNkXgdnp+9nAFxokrouBf2mAczYCOCZ9fwjwIDCxQc5bT7HV/dwBAg5O3w8CfgX8NXAzMCMt/xrwkQaK7RrgzHr/n0vjugC4Hvheutxv560ZWwTHAhsiYmNE7ABuBKbVOaaGFRE/Bx4vKZ4GXJu+vxaYXtOg6DGuhhARWyPi3vT908ADQDuNcd56iq3uIvGndHFQ+grgZGBBWl6v89ZTbA1B0ijg7cBV6bLox/PWjImgHdiUWd5Mg/whpAL4kaR7JM2sdzA9eHlEbE3f/x54eT2DKXG+pFVp11HNu15KSRoLTCb5BtlQ560kNmiAc5d2b9wHPAr8mKT1vj0idqVV6vb3WhpbRHSft0vT8/ZlSQfVIzbg/wAXAnvS5ZfSj+etGRNBo3tTRBwDnAp8VNKb6x1QJZG0Oxvlm9FXgVcAk4CtwJfqGYykg4FvA/8cEU9l19X7vJWJrSHOXUTsjohJwCiS1vur6hFHOaWxSXoNMIckxjcAhwGfqnVckt4BPBoR9+R1jGZMBJ3A6MzyqLSsIUREZ/rzUeBWkj+GRvMHSSMA0p+P1jkeACLiD+kf6x7g69Tx3EkaRPJBe11ELEyLG+K8lYutkc5dGs924HbgeKBNUvdjc+v+95qJbWra1RYR8SzwDepz3k4ATpf0MElX98nAlfTjeWvGRLAcGJ+OqA8GZgCL6xwTAJJeLOmQ7vfA24A1lbeqi8XA+9L37wO+U8dY9ur+kE29izqdu7R/9v8BD0TEFZlVdT9vPcXWCOdO0nBJben7VuAUkjGM24Ez02r1Om/lYvt1JrGLpA++5uctIuZExKiIGEvyeXZbRPwd/Xne6j0SnscLOI3kaomHgM/UO55MXEeQXMV0P7C2EWIDbiDpKthJ0s/4DyT9jz8FfgP8BDisQeL6FrAaWEXyoTuiTufsTSTdPquA+9LXaQ1y3nqKre7nDngtsDKNYQ1wUVp+BLAM2ADcAhzUQLHdlp63NcB/k15ZVK8XcCLPXTXUb+fNU0yYmRVcM3YNmZlZLzgRmJkVnBOBmVnBORGYmRWcE4GZWcE5EVhDSW/j/+fM8lJJV2WWvyTpggrbXyPpzPT9zyS94AHfkgZJujydJfReSXdLOjVd97CkYX2Ie+9xe1g/L529cp2krsxslmdKWtJ9DXt/kjSie6bKHtYPlvTzzE1JVlBOBNZo7gTeCCDpAGAYcFRm/RuBu/bzGJ8nmaXzNZFM9zGdZKbO3ETERyOZvuA04KGImJS+FkTEaZHczdrfLiC5i7inmHaQ3Pfw3hyObQOIE4E1mrtIph2AJAGsAZ6W9JJ0wq9XA/dKukjScklrJM1P7/zcJ0kvAj4EfCySaQOIZPqFm8vUvSDd/5qSVsr/Sichu1/St8ps9/m0hdBSZUwPSxomaaykX6fbPijpOklvlXRn2no5Nq3/4nTiuGVK5qfvaXbddwM/TLc5Kq1/Xxr7+LTOIuDvqonTmpebhNZQImKLpF2SxpB8+7+bZFbF44EngdURsUPS/42ISwDSD+N3AN+t4hBHAo9EySRxpSS9HjgXOI5krvpfSboD2AF8FnhjRDwm6bCS7eaStC7Ojb7drXkk8B7gAyTTpZxDcrfw6cCnSVovnyGZZuADaZfSMkk/iYhnMnGMA57oTnbAh4ErI+K6dOqV7iS1hmRCNSswtwisEd1FkgS6E8HdmeU70zonKXk602qSSbiOKrej/fAm4NaIeCaSeeoXAn+THuuWiHgMICKyz0z4HHBoRHy4j0kA4LcRsTqSyeHWAj9N97UaGJvWeRswW8mUyT8DhgBjSvYzAtiWWb4b+LSkTwGHR0RXGv9uYEf3HFhWTE4E1oi6xwmOJvnG+kuSFsEbgbskDQH+k+TJUUeT9IMPqXLfG4Axkob2e9TJN/jXl7YSeunZzPs9meU9PNeCF/DuzDjDmIh4oGQ/XWTOSURcT9Kq6AKWSDo5U/cg4C/7EbMNcE4E1ojuIunqeTySqZMfB9pIksFdPPcB95iSefd7vFqnVET8mWR2zivTLpLumSffU1L1f4Dpkl6UzhT7rrTsNuA9kl6abpv90P8hcDnw/Zy/YS8FPtY9LiJpcpk6D/JcCwJJRwAbI+IrJLNUvjYtfynwWETszDFea3BOBNaIVpNcLfTLkrInI+Kx9Aqbr5O0FpaSfBPvjc+SdJusk7QG+B5Q+mCZe0meV7uM5AlfV0XEyohYC1wK3CHpfuCKku1uSWNbnE5nnIfPkzxKcZWkteny86TjBQ9JOjItOgtYk3YnvQb4Zlp+EvD9nOK0AcKzj5o1KUnvAl4fEZ+tUGchMDsiHqxdZNZofNWQWZOKiFu7u7DKSbvGFjkJmFsEZmYF5zECM7OCcyIwMys4JwIzs4JzIjAzKzgnAjOzgvv/j7gJpZuPitQAAAAASUVORK5CYII=\n" }, "metadata": { "needs_background": "light" @@ -333,7 +422,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 12, "metadata": { "tags": [] }, @@ -341,7 +430,9 @@ { "output_type": "stream", "name": "stdout", - "text": "flaml r2=0.8500929784828137\n" + "text": [ + "flaml r2 = 0.8491961402689281\n" + ] } ], "source": [ @@ -357,7 +448,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ @@ -367,16 +458,18 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 14, "metadata": {}, "outputs": [ { "output_type": "execute_result", "data": { - "text/plain": "LGBMRegressor()" + "text/plain": [ + "LGBMRegressor()" + ] }, "metadata": {}, - "execution_count": 17 + "execution_count": 14 } ], "source": [ @@ -385,7 +478,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 15, "metadata": { "tags": [] }, @@ -393,7 +486,9 @@ { "output_type": "stream", "name": "stdout", - "text": "default lgbm r2=0.8296179648694404\n" + "text": [ + "default lgbm r2 = 0.8296179648694404\n" + ] } ], "source": [ @@ -411,7 +506,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -420,7 +515,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -438,15 +533,93 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 18, "metadata": { - "tags": [] + "tags": [ + "outputPrepend" + ] }, "outputs": [ { "output_type": "stream", "name": "stderr", - "text": "\u001b[32m[I 2021-04-09 19:56:13,788]\u001b[0m A new study created in memory with name: no-name-be796674-63fe-4736-9436-82e0a952f36b\u001b[0m\nfeature_fraction, val_score: 2001137767.143790: 14%|#4 | 1/7 [00:02<00:13, 2.30s/it]\u001b[32m[I 2021-04-09 19:56:16,095]\u001b[0m Trial 0 finished with value: 2001137767.14379 and parameters: {'feature_fraction': 0.7}. Best is trial 0 with value: 2001137767.14379.\u001b[0m\nfeature_fraction, val_score: 2001137767.143790: 29%|##8 | 2/7 [00:04<00:11, 2.24s/it]\u001b[32m[I 2021-04-09 19:56:18,289]\u001b[0m Trial 1 finished with value: 2009099143.533758 and parameters: {'feature_fraction': 0.6}. Best is trial 0 with value: 2001137767.14379.\u001b[0m\nfeature_fraction, val_score: 2001137767.143790: 43%|####2 | 3/7 [00:06<00:09, 2.27s/it]\u001b[32m[I 2021-04-09 19:56:20,588]\u001b[0m Trial 2 finished with value: 2001137767.14379 and parameters: {'feature_fraction': 0.8}. Best is trial 0 with value: 2001137767.14379.\u001b[0m\nfeature_fraction, val_score: 2001137767.143790: 57%|#####7 | 4/7 [00:09<00:07, 2.38s/it]\u001b[32m[I 2021-04-09 19:56:23,148]\u001b[0m Trial 3 finished with value: 2017941196.0559783 and parameters: {'feature_fraction': 1.0}. Best is trial 0 with value: 2001137767.14379.\u001b[0m\nfeature_fraction, val_score: 1977065482.707781: 71%|#######1 | 5/7 [00:11<00:04, 2.27s/it]\u001b[32m[I 2021-04-09 19:56:25,222]\u001b[0m Trial 4 finished with value: 1977065482.7077813 and parameters: {'feature_fraction': 0.5}. Best is trial 4 with value: 1977065482.7077813.\u001b[0m\nfeature_fraction, val_score: 1977065482.707781: 71%|#######1 | 5/7 [00:11<00:04, 2.27s/it]" + "text": [ + "n': 0.5}. Best is trial 1 with value: 2012422177.233508.\u001b[0m\n", + "feature_fraction, val_score: 2012422177.233508: 43%|####2 | 3/7 [00:07<00:11, 2.76s/it]\u001b[32m[I 2021-05-01 16:56:19,694]\u001b[0m Trial 2 finished with value: 2070320819.099197 and parameters: {'feature_fraction': 0.6}. Best is trial 1 with value: 2012422177.233508.\u001b[0m\n", + "feature_fraction, val_score: 2012422177.233508: 57%|#####7 | 4/7 [00:10<00:08, 2.75s/it]\u001b[32m[I 2021-05-01 16:56:22,414]\u001b[0m Trial 3 finished with value: 2090738130.975806 and parameters: {'feature_fraction': 0.8999999999999999}. Best is trial 1 with value: 2012422177.233508.\u001b[0m\n", + "feature_fraction, val_score: 2012422177.233508: 71%|#######1 | 5/7 [00:13<00:05, 2.67s/it]\u001b[32m[I 2021-05-01 16:56:24,909]\u001b[0m Trial 4 finished with value: 2041753467.8813415 and parameters: {'feature_fraction': 0.8}. Best is trial 1 with value: 2012422177.233508.\u001b[0m\n", + "feature_fraction, val_score: 2012422177.233508: 86%|########5 | 6/7 [00:15<00:02, 2.68s/it]\u001b[32m[I 2021-05-01 16:56:27,596]\u001b[0m Trial 5 finished with value: 2041753467.8813415 and parameters: {'feature_fraction': 0.7}. Best is trial 1 with value: 2012422177.233508.\u001b[0m\n", + "feature_fraction, val_score: 2012422177.233508: 100%|##########| 7/7 [00:18<00:00, 2.59s/it]\u001b[32m[I 2021-05-01 16:56:29,991]\u001b[0m Trial 6 finished with value: 2268739005.2074604 and parameters: {'feature_fraction': 0.4}. Best is trial 1 with value: 2012422177.233508.\u001b[0m\n", + "feature_fraction, val_score: 2012422177.233508: 100%|##########| 7/7 [00:18<00:00, 2.59s/it]\n", + "num_leaves, val_score: 2012422177.233508: 5%|5 | 1/20 [00:05<01:37, 5.15s/it]\u001b[32m[I 2021-05-01 16:56:35,147]\u001b[0m Trial 7 finished with value: 2101942667.8301136 and parameters: {'num_leaves': 100}. Best is trial 7 with value: 2101942667.8301136.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 10%|# | 2/20 [00:11<01:39, 5.53s/it]\u001b[32m[I 2021-05-01 16:56:41,578]\u001b[0m Trial 8 finished with value: 2116990487.8274357 and parameters: {'num_leaves': 170}. Best is trial 7 with value: 2101942667.8301136.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 15%|#5 | 3/20 [00:14<01:19, 4.65s/it]\u001b[32m[I 2021-05-01 16:56:44,174]\u001b[0m Trial 9 finished with value: 2068285393.500253 and parameters: {'num_leaves': 53}. Best is trial 9 with value: 2068285393.500253.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 20%|## | 4/20 [00:25<01:48, 6.75s/it]\u001b[32m[I 2021-05-01 16:56:55,835]\u001b[0m Trial 10 finished with value: 2155721300.061022 and parameters: {'num_leaves': 247}. Best is trial 9 with value: 2068285393.500253.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 25%|##5 | 5/20 [00:27<01:20, 5.36s/it]\u001b[32m[I 2021-05-01 16:56:57,939]\u001b[0m Trial 11 finished with value: 2110152521.9026961 and parameters: {'num_leaves': 14}. Best is trial 9 with value: 2068285393.500253.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 30%|### | 6/20 [00:43<01:59, 8.55s/it]\u001b[32m[I 2021-05-01 16:57:13,922]\u001b[0m Trial 12 finished with value: 2155721300.061022 and parameters: {'num_leaves': 247}. Best is trial 9 with value: 2068285393.500253.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 35%|###5 | 7/20 [00:53<01:53, 8.74s/it]\u001b[32m[I 2021-05-01 16:57:23,128]\u001b[0m Trial 13 finished with value: 2170705249.4392734 and parameters: {'num_leaves': 180}. Best is trial 9 with value: 2068285393.500253.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 40%|#### | 8/20 [00:53<01:16, 6.37s/it]\u001b[32m[I 2021-05-01 16:57:23,950]\u001b[0m Trial 14 finished with value: 3322965157.380943 and parameters: {'num_leaves': 2}. Best is trial 9 with value: 2068285393.500253.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 45%|####5 | 9/20 [01:00<01:09, 6.34s/it]\u001b[32m[I 2021-05-01 16:57:30,222]\u001b[0m Trial 15 finished with value: 2078188917.1665275 and parameters: {'num_leaves': 112}. Best is trial 9 with value: 2068285393.500253.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 50%|##### | 10/20 [01:09<01:12, 7.26s/it]\u001b[32m[I 2021-05-01 16:57:39,631]\u001b[0m Trial 16 finished with value: 2149952453.251796 and parameters: {'num_leaves': 194}. Best is trial 9 with value: 2068285393.500253.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 55%|#####5 | 11/20 [01:12<00:53, 6.00s/it]\u001b[32m[I 2021-05-01 16:57:42,688]\u001b[0m Trial 17 finished with value: 2039014776.0863047 and parameters: {'num_leaves': 50}. Best is trial 17 with value: 2039014776.0863047.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 60%|###### | 12/20 [01:16<00:43, 5.39s/it]\u001b[32m[I 2021-05-01 16:57:46,660]\u001b[0m Trial 18 finished with value: 2028177421.7466378 and parameters: {'num_leaves': 70}. Best is trial 18 with value: 2028177421.7466378.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 65%|######5 | 13/20 [01:21<00:36, 5.21s/it]\u001b[32m[I 2021-05-01 16:57:51,453]\u001b[0m Trial 19 finished with value: 2083200978.2816963 and parameters: {'num_leaves': 72}. Best is trial 18 with value: 2028177421.7466378.\u001b[0m\n", + "num_leaves, val_score: 2012422177.233508: 70%|####### | 14/20 [01:29<00:37, 6.19s/it]\u001b[32m[I 2021-05-01 16:57:59,920]\u001b[0m Trial 20 finished with value: 2121588087.918161 and parameters: {'num_leaves': 129}. Best is trial 18 with value: 2028177421.7466378.\u001b[0m\n", + "num_leaves, val_score: 1997587162.470951: 75%|#######5 | 15/20 [01:33<00:27, 5.55s/it]\u001b[32m[I 2021-05-01 16:58:03,962]\u001b[0m Trial 21 finished with value: 1997587162.470951 and parameters: {'num_leaves': 43}. Best is trial 21 with value: 1997587162.470951.\u001b[0m\n", + "num_leaves, val_score: 1997587162.470951: 80%|######## | 16/20 [01:36<00:18, 4.74s/it]\u001b[32m[I 2021-05-01 16:58:06,830]\u001b[0m Trial 22 finished with value: 2052604443.4670672 and parameters: {'num_leaves': 28}. Best is trial 21 with value: 1997587162.470951.\u001b[0m\n", + "num_leaves, val_score: 1997587162.470951: 85%|########5 | 17/20 [01:43<00:15, 5.24s/it]\u001b[32m[I 2021-05-01 16:58:13,218]\u001b[0m Trial 23 finished with value: 2043471294.5650334 and parameters: {'num_leaves': 80}. Best is trial 21 with value: 1997587162.470951.\u001b[0m\n", + "num_leaves, val_score: 1997587162.470951: 90%|######### | 18/20 [01:51<00:12, 6.21s/it]\u001b[32m[I 2021-05-01 16:58:21,714]\u001b[0m Trial 24 finished with value: 2134499770.7451386 and parameters: {'num_leaves': 138}. Best is trial 21 with value: 1997587162.470951.\u001b[0m\n", + "num_leaves, val_score: 1997587162.470951: 95%|#########5| 19/20 [01:55<00:05, 5.53s/it]\u001b[32m[I 2021-05-01 16:58:25,660]\u001b[0m Trial 25 finished with value: 2044138761.5237503 and parameters: {'num_leaves': 42}. Best is trial 21 with value: 1997587162.470951.\u001b[0m\n", + "num_leaves, val_score: 1997587162.470951: 100%|##########| 20/20 [02:00<00:00, 5.29s/it]\u001b[32m[I 2021-05-01 16:58:30,376]\u001b[0m Trial 26 finished with value: 2043471294.5650334 and parameters: {'num_leaves': 80}. Best is trial 21 with value: 1997587162.470951.\u001b[0m\n", + "num_leaves, val_score: 1997587162.470951: 100%|##########| 20/20 [02:00<00:00, 6.02s/it]\n", + "bagging, val_score: 1997587162.470951: 10%|# | 1/10 [00:03<00:31, 3.52s/it]\u001b[32m[I 2021-05-01 16:58:33,911]\u001b[0m Trial 27 finished with value: 2013108212.2667012 and parameters: {'bagging_fraction': 0.7107931665183529, 'bagging_freq': 4}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 20%|## | 2/10 [00:06<00:27, 3.49s/it]\u001b[32m[I 2021-05-01 16:58:37,305]\u001b[0m Trial 28 finished with value: 2115850610.4036384 and parameters: {'bagging_fraction': 0.7110669495016676, 'bagging_freq': 4}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 30%|### | 3/10 [00:10<00:25, 3.66s/it]\u001b[32m[I 2021-05-01 16:58:41,369]\u001b[0m Trial 29 finished with value: 2195082524.11466 and parameters: {'bagging_fraction': 0.4223130416728271, 'bagging_freq': 3}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 40%|#### | 4/10 [00:14<00:21, 3.67s/it]\u001b[32m[I 2021-05-01 16:58:45,049]\u001b[0m Trial 30 finished with value: 2040265763.438056 and parameters: {'bagging_fraction': 0.9997992429240515, 'bagging_freq': 7}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 50%|##### | 5/10 [00:18<00:18, 3.73s/it]\u001b[32m[I 2021-05-01 16:58:48,928]\u001b[0m Trial 31 finished with value: 2131241507.3480675 and parameters: {'bagging_fraction': 0.6896585879210911, 'bagging_freq': 6}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 60%|###### | 6/10 [00:21<00:14, 3.62s/it]\u001b[32m[I 2021-05-01 16:58:52,305]\u001b[0m Trial 32 finished with value: 2103907334.0925496 and parameters: {'bagging_fraction': 0.7164061602702391, 'bagging_freq': 1}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 70%|####### | 7/10 [00:26<00:11, 3.77s/it]\u001b[32m[I 2021-05-01 16:58:56,405]\u001b[0m Trial 33 finished with value: 2036444350.9989514 and parameters: {'bagging_fraction': 0.8892052985573371, 'bagging_freq': 4}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 80%|######## | 8/10 [00:30<00:07, 3.88s/it]\u001b[32m[I 2021-05-01 16:59:00,560]\u001b[0m Trial 34 finished with value: 2225281871.3367276 and parameters: {'bagging_fraction': 0.49661561085854733, 'bagging_freq': 2}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 90%|######### | 9/10 [00:34<00:03, 3.91s/it]\u001b[32m[I 2021-05-01 16:59:04,536]\u001b[0m Trial 35 finished with value: 2143704197.0784042 and parameters: {'bagging_fraction': 0.5655413899704534, 'bagging_freq': 5}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 100%|##########| 10/10 [00:37<00:00, 3.84s/it]\u001b[32m[I 2021-05-01 16:59:08,211]\u001b[0m Trial 36 finished with value: 2110700689.1702607 and parameters: {'bagging_fraction': 0.8700737972459625, 'bagging_freq': 6}. Best is trial 27 with value: 2013108212.2667012.\u001b[0m\n", + "bagging, val_score: 1997587162.470951: 100%|##########| 10/10 [00:37<00:00, 3.78s/it]\n", + "feature_fraction_stage2, val_score: 1997587162.470951: 17%|#6 | 1/6 [00:02<00:13, 2.66s/it]\u001b[32m[I 2021-05-01 16:59:10,871]\u001b[0m Trial 37 finished with value: 2089539253.8077588 and parameters: {'feature_fraction': 0.58}. Best is trial 37 with value: 2089539253.8077588.\u001b[0m\n", + "feature_fraction_stage2, val_score: 1997587162.470951: 33%|###3 | 2/6 [00:05<00:10, 2.62s/it]\u001b[32m[I 2021-05-01 16:59:13,403]\u001b[0m Trial 38 finished with value: 1997587162.470951 and parameters: {'feature_fraction': 0.484}. Best is trial 38 with value: 1997587162.470951.\u001b[0m\n", + "feature_fraction_stage2, val_score: 1997587162.470951: 50%|##### | 3/6 [00:07<00:07, 2.64s/it]\u001b[32m[I 2021-05-01 16:59:16,077]\u001b[0m Trial 39 finished with value: 1997587162.470951 and parameters: {'feature_fraction': 0.516}. Best is trial 38 with value: 1997587162.470951.\u001b[0m\n", + "feature_fraction_stage2, val_score: 1997587162.470951: 67%|######6 | 4/6 [00:10<00:05, 2.67s/it]\u001b[32m[I 2021-05-01 16:59:18,827]\u001b[0m Trial 40 finished with value: 2284254046.781229 and parameters: {'feature_fraction': 0.42}. Best is trial 38 with value: 1997587162.470951.\u001b[0m\n", + "feature_fraction_stage2, val_score: 1997587162.470951: 83%|########3 | 5/6 [00:14<00:02, 2.93s/it]\u001b[32m[I 2021-05-01 16:59:22,357]\u001b[0m Trial 41 finished with value: 1997587162.470951 and parameters: {'feature_fraction': 0.5479999999999999}. Best is trial 38 with value: 1997587162.470951.\u001b[0m\n", + "feature_fraction_stage2, val_score: 1997587162.470951: 100%|##########| 6/6 [00:17<00:00, 3.01s/it]\u001b[32m[I 2021-05-01 16:59:25,574]\u001b[0m Trial 42 finished with value: 1997587162.470951 and parameters: {'feature_fraction': 0.45199999999999996}. Best is trial 38 with value: 1997587162.470951.\u001b[0m\n", + "feature_fraction_stage2, val_score: 1997587162.470951: 100%|##########| 6/6 [00:17<00:00, 2.89s/it]\n", + "regularization_factors, val_score: 1997587070.360476: 5%|5 | 1/20 [00:03<01:02, 3.31s/it]\u001b[32m[I 2021-05-01 16:59:28,890]\u001b[0m Trial 43 finished with value: 1997587070.3604763 and parameters: {'lambda_l1': 1.3325881401359536e-06, 'lambda_l2': 2.012676569064997e-05}. Best is trial 43 with value: 1997587070.3604763.\u001b[0m\n", + "regularization_factors, val_score: 1997587070.360476: 10%|# | 2/20 [00:06<00:58, 3.27s/it]\u001b[32m[I 2021-05-01 16:59:32,081]\u001b[0m Trial 44 finished with value: 1997587071.8468173 and parameters: {'lambda_l1': 3.416658142750445e-07, 'lambda_l2': 1.971124403055093e-05}. Best is trial 43 with value: 1997587070.3604763.\u001b[0m\n", + "regularization_factors, val_score: 1997587070.360476: 15%|#5 | 3/20 [00:09<00:55, 3.28s/it]\u001b[32m[I 2021-05-01 16:59:35,368]\u001b[0m Trial 45 finished with value: 1997587102.3779635 and parameters: {'lambda_l1': 3.267399616442553e-07, 'lambda_l2': 1.3087509647016092e-05}. Best is trial 43 with value: 1997587070.3604763.\u001b[0m\n", + "regularization_factors, val_score: 1997587070.360476: 20%|## | 4/20 [00:13<00:52, 3.30s/it]\u001b[32m[I 2021-05-01 16:59:38,720]\u001b[0m Trial 46 finished with value: 1997587075.3923492 and parameters: {'lambda_l1': 2.682703999444416e-07, 'lambda_l2': 1.8913639824313343e-05}. Best is trial 43 with value: 1997587070.3604763.\u001b[0m\n", + "regularization_factors, val_score: 1997587070.360476: 25%|##5 | 5/20 [00:16<00:49, 3.32s/it]\u001b[32m[I 2021-05-01 16:59:42,085]\u001b[0m Trial 47 finished with value: 1997587070.787969 and parameters: {'lambda_l1': 1.4561401674574448e-07, 'lambda_l2': 1.9976154048638757e-05}. Best is trial 43 with value: 1997587070.3604763.\u001b[0m\n", + "regularization_factors, val_score: 1997587070.360476: 30%|### | 6/20 [00:19<00:46, 3.35s/it]\u001b[32m[I 2021-05-01 16:59:45,512]\u001b[0m Trial 48 finished with value: 1997587087.4850538 and parameters: {'lambda_l1': 2.9044467527482266e-07, 'lambda_l2': 1.6280849368362258e-05}. Best is trial 43 with value: 1997587070.3604763.\u001b[0m\n", + "regularization_factors, val_score: 1997587057.813578: 35%|###5 | 7/20 [00:23<00:43, 3.34s/it]\u001b[32m[I 2021-05-01 16:59:48,814]\u001b[0m Trial 49 finished with value: 1997587057.8135784 and parameters: {'lambda_l1': 2.736584478611428e-07, 'lambda_l2': 2.2832344774742773e-05}. Best is trial 49 with value: 1997587057.8135784.\u001b[0m\n", + "regularization_factors, val_score: 1997586977.666038: 40%|#### | 8/20 [00:26<00:39, 3.32s/it]\u001b[32m[I 2021-05-01 16:59:52,105]\u001b[0m Trial 50 finished with value: 1997586977.666038 and parameters: {'lambda_l1': 2.2552818901556212e-07, 'lambda_l2': 4.0459809426159216e-05}. Best is trial 50 with value: 1997586977.666038.\u001b[0m\n", + "regularization_factors, val_score: 1997586977.666038: 45%|####5 | 9/20 [00:29<00:36, 3.29s/it]\u001b[32m[I 2021-05-01 16:59:55,317]\u001b[0m Trial 51 finished with value: 1997587069.0728564 and parameters: {'lambda_l1': 2.269836233537227e-07, 'lambda_l2': 2.0400060529051817e-05}. Best is trial 50 with value: 1997586977.666038.\u001b[0m\n", + "regularization_factors, val_score: 1997586977.666038: 50%|##### | 10/20 [00:33<00:33, 3.32s/it]\u001b[32m[I 2021-05-01 16:59:58,719]\u001b[0m Trial 52 finished with value: 1997587038.1018682 and parameters: {'lambda_l1': 2.1190142795602203e-07, 'lambda_l2': 2.7161997048896454e-05}. Best is trial 50 with value: 1997586977.666038.\u001b[0m\n", + "regularization_factors, val_score: 1997586977.666038: 55%|#####5 | 11/20 [00:36<00:30, 3.43s/it]\u001b[32m[I 2021-05-01 17:00:02,410]\u001b[0m Trial 53 finished with value: 1997586992.6494768 and parameters: {'lambda_l1': 2.1728160447318185e-07, 'lambda_l2': 3.710639213958161e-05}. Best is trial 50 with value: 1997586977.666038.\u001b[0m\n", + "regularization_factors, val_score: 1996449931.514239: 60%|###### | 12/20 [00:40<00:27, 3.40s/it]\u001b[32m[I 2021-05-01 17:00:05,742]\u001b[0m Trial 54 finished with value: 1996449931.514239 and parameters: {'lambda_l1': 2.1748994754196613e-07, 'lambda_l2': 0.0001457763270993375}. Best is trial 54 with value: 1996449931.514239.\u001b[0m\n", + "regularization_factors, val_score: 1996449931.514239: 65%|######5 | 13/20 [00:44<00:26, 3.79s/it]\u001b[32m[I 2021-05-01 17:00:10,421]\u001b[0m Trial 55 finished with value: 2067232492.4956243 and parameters: {'lambda_l1': 1.024147819302013e-08, 'lambda_l2': 0.00295027742437926}. Best is trial 54 with value: 1996449931.514239.\u001b[0m\n", + "regularization_factors, val_score: 1996449532.606333: 70%|####### | 14/20 [00:49<00:23, 3.91s/it]\u001b[32m[I 2021-05-01 17:00:14,622]\u001b[0m Trial 56 finished with value: 1996449532.606333 and parameters: {'lambda_l1': 0.0738445887576454, 'lambda_l2': 0.00022249854014829427}. Best is trial 56 with value: 1996449532.606333.\u001b[0m\n", + "regularization_factors, val_score: 1996449532.606333: 75%|#######5 | 15/20 [00:51<00:17, 3.50s/it]\u001b[32m[I 2021-05-01 17:00:17,173]\u001b[0m Trial 57 finished with value: 2030166678.6715233 and parameters: {'lambda_l1': 0.4206025220395843, 'lambda_l2': 0.0013715337299642163}. Best is trial 56 with value: 1996449532.606333.\u001b[0m\n", + "regularization_factors, val_score: 1996449532.606333: 80%|######## | 16/20 [00:54<00:13, 3.26s/it]\u001b[32m[I 2021-05-01 17:00:19,852]\u001b[0m Trial 58 finished with value: 2035151132.9680371 and parameters: {'lambda_l1': 0.04874741366424845, 'lambda_l2': 7.210205334409902}. Best is trial 56 with value: 1996449532.606333.\u001b[0m\n", + "regularization_factors, val_score: 1996449532.606333: 85%|########5 | 17/20 [00:56<00:08, 2.99s/it]\u001b[32m[I 2021-05-01 17:00:22,219]\u001b[0m Trial 59 finished with value: 2030171133.0539286 and parameters: {'lambda_l1': 3.325828713424872e-05, 'lambda_l2': 0.000613610913339345}. Best is trial 56 with value: 1996449532.606333.\u001b[0m\n", + "regularization_factors, val_score: 1996449532.606333: 90%|######### | 18/20 [00:58<00:05, 2.79s/it]\u001b[32m[I 2021-05-01 17:00:24,556]\u001b[0m Trial 60 finished with value: 1997587159.9270165 and parameters: {'lambda_l1': 1.122670575237426e-08, 'lambda_l2': 4.7679572155360673e-07}. Best is trial 56 with value: 1996449532.606333.\u001b[0m\n", + "regularization_factors, val_score: 1996449532.606333: 95%|#########5| 19/20 [01:01<00:02, 2.64s/it]\u001b[32m[I 2021-05-01 17:00:26,854]\u001b[0m Trial 61 finished with value: 1996449770.564637 and parameters: {'lambda_l1': 6.446613270805078e-06, 'lambda_l2': 0.00017942732358506184}. Best is trial 56 with value: 1996449532.606333.\u001b[0m\n", + "regularization_factors, val_score: 1996449418.529521: 100%|##########| 20/20 [01:03<00:00, 2.55s/it]\u001b[32m[I 2021-05-01 17:00:29,173]\u001b[0m Trial 62 finished with value: 1996449418.5295208 and parameters: {'lambda_l1': 8.868453484243689e-06, 'lambda_l2': 0.000252862182277996}. Best is trial 62 with value: 1996449418.5295208.\u001b[0m\n", + "regularization_factors, val_score: 1996449418.529521: 100%|##########| 20/20 [01:03<00:00, 3.18s/it]\n", + "min_data_in_leaf, val_score: 1996449418.529521: 20%|## | 1/5 [00:02<00:08, 2.15s/it]\u001b[32m[I 2021-05-01 17:00:31,325]\u001b[0m Trial 63 finished with value: 2035984658.8333156 and parameters: {'min_child_samples': 5}. Best is trial 63 with value: 2035984658.8333156.\u001b[0m\n", + "min_data_in_leaf, val_score: 1996449418.529521: 40%|#### | 2/5 [00:04<00:07, 2.34s/it]\u001b[32m[I 2021-05-01 17:00:34,112]\u001b[0m Trial 64 finished with value: 2047790552.496713 and parameters: {'min_child_samples': 50}. Best is trial 63 with value: 2035984658.8333156.\u001b[0m\n", + "min_data_in_leaf, val_score: 1996449418.529521: 60%|###### | 3/5 [00:07<00:04, 2.27s/it]\u001b[32m[I 2021-05-01 17:00:36,236]\u001b[0m Trial 65 finished with value: 2022941263.9641247 and parameters: {'min_child_samples': 10}. Best is trial 65 with value: 2022941263.9641247.\u001b[0m\n", + "min_data_in_leaf, val_score: 1996449418.529521: 80%|######## | 4/5 [00:09<00:02, 2.31s/it]\u001b[32m[I 2021-05-01 17:00:38,623]\u001b[0m Trial 66 finished with value: 2034136324.3695369 and parameters: {'min_child_samples': 25}. Best is trial 65 with value: 2022941263.9641247.\u001b[0m\n", + "min_data_in_leaf, val_score: 1996449418.529521: 100%|##########| 5/5 [00:12<00:00, 2.60s/it]\u001b[32m[I 2021-05-01 17:00:41,911]\u001b[0m Trial 67 finished with value: 2038947709.9319875 and parameters: {'min_child_samples': 100}. Best is trial 65 with value: 2022941263.9641247.\u001b[0m\n", + "min_data_in_leaf, val_score: 1996449418.529521: 100%|##########| 5/5 [00:12<00:00, 2.55s/it]CPU times: user 4min 10s, sys: 14.6 s, total: 4min 25s\n", + "Wall time: 4min 30s\n", + "\n" + ] } ], "source": [ @@ -461,11 +634,19 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Optuna LightGBM Tuner r2 = 0.8390948396448961\n" + ] + } + ], "source": [ "y_pred = model.predict(X_test)\n", "from flaml.ml import sklearn_metric_loss_score\n", @@ -537,7 +718,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 21, "metadata": { "tags": [] }, @@ -545,7 +726,81 @@ { "output_type": "stream", "name": "stderr", - "text": "[flaml.automl: 04-09 21:16:36] {890} INFO - Evaluation method: cv\n[flaml.automl: 04-09 21:16:36] {606} INFO - Using RepeatedKFold\n[flaml.automl: 04-09 21:16:36] {911} INFO - Minimizing error metric: 1-r2\n[flaml.automl: 04-09 21:16:36] {930} INFO - List of ML learners in AutoML Run: ['my_lgbm']\n[flaml.automl: 04-09 21:16:36] {994} INFO - iteration 0, current learner my_lgbm\n[flaml.automl: 04-09 21:16:37] {1147} INFO - at 0.2s,\tbest my_lgbm's error=2.9883,\tbest my_lgbm's error=2.9883\n[flaml.automl: 04-09 21:16:37] {994} INFO - iteration 1, current learner my_lgbm\n[flaml.automl: 04-09 21:16:37] {1147} INFO - at 0.3s,\tbest my_lgbm's error=2.9883,\tbest my_lgbm's error=2.9883\n[flaml.automl: 04-09 21:16:37] {994} INFO - iteration 2, current learner my_lgbm\n[flaml.automl: 04-09 21:16:37] {1147} INFO - at 0.4s,\tbest my_lgbm's error=0.4472,\tbest my_lgbm's error=0.4472\n[flaml.automl: 04-09 21:16:37] {994} INFO - iteration 3, current learner my_lgbm\n[flaml.automl: 04-09 21:16:37] {1147} INFO - at 0.5s,\tbest my_lgbm's error=0.4472,\tbest my_lgbm's error=0.4472\n[flaml.automl: 04-09 21:16:37] {994} INFO - iteration 4, current learner my_lgbm\n[flaml.automl: 04-09 21:16:37] {1147} INFO - at 0.7s,\tbest my_lgbm's error=0.2682,\tbest my_lgbm's error=0.2682\n[flaml.automl: 04-09 21:16:37] {994} INFO - iteration 5, current learner my_lgbm\n[flaml.automl: 04-09 21:16:37] {1147} INFO - at 0.9s,\tbest my_lgbm's error=0.2682,\tbest my_lgbm's error=0.2682\n[flaml.automl: 04-09 21:16:37] {994} INFO - iteration 6, current learner my_lgbm\n[flaml.automl: 04-09 21:16:37] {1147} INFO - at 1.1s,\tbest my_lgbm's error=0.2682,\tbest my_lgbm's error=0.2682\n[flaml.automl: 04-09 21:16:37] {994} INFO - iteration 7, current learner my_lgbm\n[flaml.automl: 04-09 21:16:38] {1147} INFO - at 1.3s,\tbest my_lgbm's error=0.2256,\tbest my_lgbm's error=0.2256\n[flaml.automl: 04-09 21:16:38] {994} INFO - iteration 8, current learner my_lgbm\n[flaml.automl: 04-09 21:16:38] {1147} INFO - at 1.5s,\tbest my_lgbm's error=0.2256,\tbest my_lgbm's error=0.2256\n[flaml.automl: 04-09 21:16:38] {994} INFO - iteration 9, current learner my_lgbm\n[flaml.automl: 04-09 21:16:38] {1147} INFO - at 1.6s,\tbest my_lgbm's error=0.2256,\tbest my_lgbm's error=0.2256\n[flaml.automl: 04-09 21:16:38] {994} INFO - iteration 10, current learner my_lgbm\n[flaml.automl: 04-09 21:16:38] {1147} INFO - at 1.8s,\tbest my_lgbm's error=0.2256,\tbest my_lgbm's error=0.2256\n[flaml.automl: 04-09 21:16:38] {994} INFO - iteration 11, current learner my_lgbm\n[flaml.automl: 04-09 21:16:39] {1147} INFO - at 2.3s,\tbest my_lgbm's error=0.1866,\tbest my_lgbm's error=0.1866\n[flaml.automl: 04-09 21:16:39] {994} INFO - iteration 12, current learner my_lgbm\n[flaml.automl: 04-09 21:16:39] {1147} INFO - at 2.9s,\tbest my_lgbm's error=0.1866,\tbest my_lgbm's error=0.1866\n[flaml.automl: 04-09 21:16:39] {994} INFO - iteration 13, current learner my_lgbm\n[flaml.automl: 04-09 21:16:39] {1147} INFO - at 3.1s,\tbest my_lgbm's error=0.1866,\tbest my_lgbm's error=0.1866\n[flaml.automl: 04-09 21:16:39] {994} INFO - iteration 14, current learner my_lgbm\n[flaml.automl: 04-09 21:16:41] {1147} INFO - at 5.0s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:16:41] {994} INFO - iteration 15, current learner my_lgbm\n[flaml.automl: 04-09 21:16:42] {1147} INFO - at 5.6s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:16:42] {994} INFO - iteration 16, current learner my_lgbm\n[flaml.automl: 04-09 21:16:48] {1147} INFO - at 11.9s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:16:48] {994} INFO - iteration 17, current learner my_lgbm\n[flaml.automl: 04-09 21:16:49] {1147} INFO - at 13.1s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:16:49] {994} INFO - iteration 18, current learner my_lgbm\n[flaml.automl: 04-09 21:16:54] {1147} INFO - at 17.7s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:16:54] {994} INFO - iteration 19, current learner my_lgbm\n[flaml.automl: 04-09 21:16:55] {1147} INFO - at 18.3s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:16:55] {994} INFO - iteration 20, current learner my_lgbm\n[flaml.automl: 04-09 21:16:57] {1147} INFO - at 20.2s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:16:57] {994} INFO - iteration 21, current learner my_lgbm\n[flaml.automl: 04-09 21:16:57] {1147} INFO - at 20.5s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:16:57] {994} INFO - iteration 22, current learner my_lgbm\n[flaml.automl: 04-09 21:17:00] {1147} INFO - at 23.9s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:00] {994} INFO - iteration 23, current learner my_lgbm\n[flaml.automl: 04-09 21:17:03] {1147} INFO - at 26.3s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:03] {994} INFO - iteration 24, current learner my_lgbm\n[flaml.automl: 04-09 21:17:04] {1147} INFO - at 27.2s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:04] {994} INFO - iteration 25, current learner my_lgbm\n[flaml.automl: 04-09 21:17:06] {1147} INFO - at 29.6s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:06] {994} INFO - iteration 26, current learner my_lgbm\n[flaml.automl: 04-09 21:17:07] {1147} INFO - at 31.0s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:07] {994} INFO - iteration 27, current learner my_lgbm\n[flaml.automl: 04-09 21:17:08] {1147} INFO - at 31.7s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:08] {994} INFO - iteration 28, current learner my_lgbm\n[flaml.automl: 04-09 21:17:10] {1147} INFO - at 33.3s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:10] {994} INFO - iteration 29, current learner my_lgbm\n[flaml.automl: 04-09 21:17:16] {1147} INFO - at 39.7s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:16] {994} INFO - iteration 30, current learner my_lgbm\n[flaml.automl: 04-09 21:17:16] {1147} INFO - at 40.0s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:16] {994} INFO - iteration 31, current learner my_lgbm\n[flaml.automl: 04-09 21:17:20] {1147} INFO - at 44.1s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:20] {994} INFO - iteration 32, current learner my_lgbm\n[flaml.automl: 04-09 21:17:22] {1147} INFO - at 45.4s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:22] {994} INFO - iteration 33, current learner my_lgbm\n[flaml.automl: 04-09 21:17:22] {1147} INFO - at 45.6s,\tbest my_lgbm's error=0.1639,\tbest my_lgbm's error=0.1639\n[flaml.automl: 04-09 21:17:22] {994} INFO - iteration 34, current learner my_lgbm\n[flaml.automl: 04-09 21:17:35] {1147} INFO - at 59.1s,\tbest my_lgbm's error=0.1610,\tbest my_lgbm's error=0.1610\n[flaml.automl: 04-09 21:17:35] {994} INFO - iteration 35, current learner my_lgbm\n[flaml.automl: 04-09 21:17:59] {1147} INFO - at 82.8s,\tbest my_lgbm's error=0.1610,\tbest my_lgbm's error=0.1610\n[flaml.automl: 04-09 21:17:59] {994} INFO - iteration 36, current learner my_lgbm\n[flaml.automl: 04-09 21:18:06] {1147} INFO - at 89.3s,\tbest my_lgbm's error=0.1610,\tbest my_lgbm's error=0.1610\n[flaml.automl: 04-09 21:18:06] {994} INFO - iteration 37, current learner my_lgbm\n[flaml.automl: 04-09 21:18:12] {1147} INFO - at 95.7s,\tbest my_lgbm's error=0.1610,\tbest my_lgbm's error=0.1610\n[flaml.automl: 04-09 21:18:12] {994} INFO - iteration 38, current learner my_lgbm\n[flaml.automl: 04-09 21:18:20] {1147} INFO - at 103.6s,\tbest my_lgbm's error=0.1610,\tbest my_lgbm's error=0.1610\n[flaml.automl: 04-09 21:18:20] {994} INFO - iteration 39, current learner my_lgbm\n[flaml.automl: 04-09 21:18:36] {1147} INFO - at 119.4s,\tbest my_lgbm's error=0.1610,\tbest my_lgbm's error=0.1610\n[flaml.automl: 04-09 21:18:36] {1187} INFO - selected model: LGBMRegressor(colsample_bytree=0.613734331916688,\n learning_rate=0.06283686776885493, max_bin=1023,\n min_child_samples=81, n_estimators=287, num_leaves=247,\n objective=,\n reg_alpha=0.006495889833184046, reg_lambda=0.005049036990045567,\n subsample=0.7669214501226506)\n[flaml.automl: 04-09 21:18:36] {944} INFO - fit succeeded\n" + "text": [ + "[flaml.automl: 05-01 17:00:42] {890} INFO - Evaluation method: cv\n", + "[flaml.automl: 05-01 17:00:42] {606} INFO - Using RepeatedKFold\n", + "[flaml.automl: 05-01 17:00:42] {911} INFO - Minimizing error metric: 1-r2\n", + "[flaml.automl: 05-01 17:00:42] {929} INFO - List of ML learners in AutoML Run: ['my_lgbm']\n", + "[flaml.automl: 05-01 17:00:42] {993} INFO - iteration 0, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:42] {1141} INFO - at 0.3s,\tbest my_lgbm's error=2.9883,\tbest my_lgbm's error=2.9883\n", + "[flaml.automl: 05-01 17:00:42] {993} INFO - iteration 1, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:43] {1141} INFO - at 0.4s,\tbest my_lgbm's error=2.9883,\tbest my_lgbm's error=2.9883\n", + "[flaml.automl: 05-01 17:00:43] {993} INFO - iteration 2, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:43] {1141} INFO - at 0.7s,\tbest my_lgbm's error=1.7530,\tbest my_lgbm's error=1.7530\n", + "[flaml.automl: 05-01 17:00:43] {993} INFO - iteration 3, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:43] {1141} INFO - at 0.9s,\tbest my_lgbm's error=0.4472,\tbest my_lgbm's error=0.4472\n", + "[flaml.automl: 05-01 17:00:43] {993} INFO - iteration 4, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:43] {1141} INFO - at 1.1s,\tbest my_lgbm's error=0.4472,\tbest my_lgbm's error=0.4472\n", + "[flaml.automl: 05-01 17:00:43] {993} INFO - iteration 5, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:43] {1141} INFO - at 1.3s,\tbest my_lgbm's error=0.4472,\tbest my_lgbm's error=0.4472\n", + "[flaml.automl: 05-01 17:00:43] {993} INFO - iteration 6, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:44] {1141} INFO - at 1.6s,\tbest my_lgbm's error=0.3146,\tbest my_lgbm's error=0.3146\n", + "[flaml.automl: 05-01 17:00:44] {993} INFO - iteration 7, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:44] {1141} INFO - at 2.0s,\tbest my_lgbm's error=0.2722,\tbest my_lgbm's error=0.2722\n", + "[flaml.automl: 05-01 17:00:44] {993} INFO - iteration 8, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:44] {1141} INFO - at 2.3s,\tbest my_lgbm's error=0.2722,\tbest my_lgbm's error=0.2722\n", + "[flaml.automl: 05-01 17:00:44] {993} INFO - iteration 9, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:45] {1141} INFO - at 2.7s,\tbest my_lgbm's error=0.2060,\tbest my_lgbm's error=0.2060\n", + "[flaml.automl: 05-01 17:00:45] {993} INFO - iteration 10, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:45] {1141} INFO - at 3.0s,\tbest my_lgbm's error=0.2060,\tbest my_lgbm's error=0.2060\n", + "[flaml.automl: 05-01 17:00:45] {993} INFO - iteration 11, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:45] {1141} INFO - at 3.2s,\tbest my_lgbm's error=0.2060,\tbest my_lgbm's error=0.2060\n", + "[flaml.automl: 05-01 17:00:45] {993} INFO - iteration 12, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:46] {1141} INFO - at 3.9s,\tbest my_lgbm's error=0.1864,\tbest my_lgbm's error=0.1864\n", + "[flaml.automl: 05-01 17:00:46] {993} INFO - iteration 13, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:46] {1141} INFO - at 4.4s,\tbest my_lgbm's error=0.1864,\tbest my_lgbm's error=0.1864\n", + "[flaml.automl: 05-01 17:00:46] {993} INFO - iteration 14, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:47] {1141} INFO - at 5.2s,\tbest my_lgbm's error=0.1864,\tbest my_lgbm's error=0.1864\n", + "[flaml.automl: 05-01 17:00:47] {993} INFO - iteration 15, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:48] {1141} INFO - at 5.6s,\tbest my_lgbm's error=0.1864,\tbest my_lgbm's error=0.1864\n", + "[flaml.automl: 05-01 17:00:48] {993} INFO - iteration 16, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:49] {1141} INFO - at 6.4s,\tbest my_lgbm's error=0.1856,\tbest my_lgbm's error=0.1856\n", + "[flaml.automl: 05-01 17:00:49] {993} INFO - iteration 17, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:49] {1141} INFO - at 6.8s,\tbest my_lgbm's error=0.1856,\tbest my_lgbm's error=0.1856\n", + "[flaml.automl: 05-01 17:00:49] {993} INFO - iteration 18, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:51] {1141} INFO - at 8.7s,\tbest my_lgbm's error=0.1724,\tbest my_lgbm's error=0.1724\n", + "[flaml.automl: 05-01 17:00:51] {993} INFO - iteration 19, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:00:51] {1141} INFO - at 9.3s,\tbest my_lgbm's error=0.1724,\tbest my_lgbm's error=0.1724\n", + "[flaml.automl: 05-01 17:00:51] {993} INFO - iteration 20, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:01:01] {1141} INFO - at 18.5s,\tbest my_lgbm's error=0.1724,\tbest my_lgbm's error=0.1724\n", + "[flaml.automl: 05-01 17:01:01] {993} INFO - iteration 21, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:01:02] {1141} INFO - at 19.5s,\tbest my_lgbm's error=0.1724,\tbest my_lgbm's error=0.1724\n", + "[flaml.automl: 05-01 17:01:02] {993} INFO - iteration 22, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:01:06] {1141} INFO - at 24.2s,\tbest my_lgbm's error=0.1696,\tbest my_lgbm's error=0.1696\n", + "[flaml.automl: 05-01 17:01:06] {993} INFO - iteration 23, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:01:08] {1141} INFO - at 25.8s,\tbest my_lgbm's error=0.1696,\tbest my_lgbm's error=0.1696\n", + "[flaml.automl: 05-01 17:01:08] {993} INFO - iteration 24, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:01:14] {1141} INFO - at 32.0s,\tbest my_lgbm's error=0.1696,\tbest my_lgbm's error=0.1696\n", + "[flaml.automl: 05-01 17:01:14] {993} INFO - iteration 25, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:01:15] {1141} INFO - at 32.9s,\tbest my_lgbm's error=0.1696,\tbest my_lgbm's error=0.1696\n", + "[flaml.automl: 05-01 17:01:15] {993} INFO - iteration 26, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:01:33] {1141} INFO - at 50.7s,\tbest my_lgbm's error=0.1607,\tbest my_lgbm's error=0.1607\n", + "[flaml.automl: 05-01 17:01:33] {993} INFO - iteration 27, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:01:45] {1141} INFO - at 62.6s,\tbest my_lgbm's error=0.1607,\tbest my_lgbm's error=0.1607\n", + "[flaml.automl: 05-01 17:01:45] {993} INFO - iteration 28, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:02:08] {1141} INFO - at 86.1s,\tbest my_lgbm's error=0.1607,\tbest my_lgbm's error=0.1607\n", + "[flaml.automl: 05-01 17:02:08] {993} INFO - iteration 29, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:02:13] {1141} INFO - at 91.0s,\tbest my_lgbm's error=0.1607,\tbest my_lgbm's error=0.1607\n", + "[flaml.automl: 05-01 17:02:13] {993} INFO - iteration 30, current learner my_lgbm\n", + "[flaml.automl: 05-01 17:02:32] {1141} INFO - at 109.7s,\tbest my_lgbm's error=0.1607,\tbest my_lgbm's error=0.1607\n", + "[flaml.automl: 05-01 17:02:32] {1187} INFO - selected model: LGBMRegressor(colsample_bytree=0.6261496118517905,\n", + " learning_rate=0.08869510109538115, max_bin=127,\n", + " min_child_samples=79, n_estimators=493, num_leaves=282,\n", + " objective=,\n", + " reg_alpha=0.023427326819484437, reg_lambda=3.676068046341948,\n", + " subsample=0.9152991332236934)\n", + "[flaml.automl: 05-01 17:02:32] {944} INFO - fit succeeded\n" + ] } ], "source": [ @@ -563,7 +818,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 22, "metadata": { "tags": [] }, @@ -571,7 +826,17 @@ { "output_type": "stream", "name": "stdout", - "text": "Best hyperparmeter config:{'n_estimators': 287.0, 'num_leaves': 247.0, 'min_child_samples': 81.0, 'learning_rate': 0.06283686776885493, 'subsample': 0.7669214501226506, 'log_max_bin': 10.0, 'colsample_bytree': 0.613734331916688, 'reg_alpha': 0.006495889833184046, 'reg_lambda': 0.005049036990045567}\nBest r2 on validation data: 0.839\nTraining duration of best run: 13.51 s\nPredicted labels[136183.28410995 260302.1656523 136575.03214257 ... 213737.94780122\n 248465.64921701 275744.71459095]\nTrue labels[136900. 241300. 200700. ... 160900. 227300. 265600.]\nr2=0.8449104679441721\nmse=2050051993.9844227\nmae=30061.65329294407\n" + "text": [ + "Best hyperparmeter config: {'n_estimators': 493.0, 'num_leaves': 282.0, 'min_child_samples': 79.0, 'learning_rate': 0.08869510109538115, 'subsample': 0.9152991332236934, 'log_max_bin': 7.0, 'colsample_bytree': 0.6261496118517905, 'reg_alpha': 0.023427326819484437, 'reg_lambda': 3.676068046341948}\n", + "Best r2 on validation data: 0.8393\n", + "Training duration of best run: 17.8 s\n", + "Predicted labels [145117.75593607 248133.51648268 134326.11799226 ... 197406.17210771\n", + " 245758.5509811 267784.87515589]\n", + "True labels [136900. 241300. 200700. ... 160900. 227300. 265600.]\n", + "r2 = 0.8467399586261989\n", + "mse = 2025868859.4366\n", + "mae = 29981.366220545584\n" + ] } ], "source": [ @@ -592,8 +857,8 @@ ], "metadata": { "kernelspec": { - "name": "python37764bitbsconda5b158f6acec0414d8c5c2401992dd9e1", - "display_name": "Python 3.7.7 64-bit ('bs': conda)", + "name": "python3", + "display_name": "Python 3.8.0 64-bit", "metadata": { "interpreter": { "hash": "0cfea3304185a9579d09e0953576b57c8581e46e6ebc6dfeb681bc5a511f7544" @@ -610,7 +875,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7-final" + "version": "3.8.0-final" } }, "nbformat": 4, diff --git a/notebook/flaml_xgboost.ipynb b/notebook/flaml_xgboost.ipynb index 300102af75..4ff75da9d2 100644 --- a/notebook/flaml_xgboost.ipynb +++ b/notebook/flaml_xgboost.ipynb @@ -69,7 +69,9 @@ { "output_type": "stream", "name": "stdout", - "text": "load dataset from./openml_ds537.pkl\nDataset name:houses\nX_train.shape: (15480, 8), y_train.shape: (15480,);\nX_test.shape: (5160, 8), y_test.shape: (5160,)\n" + "text": [ + "load dataset from ./openml_ds537.pkl\nDataset name: houses\nX_train.shape: (15480, 8), y_train.shape: (15480,);\nX_test.shape: (5160, 8), y_test.shape: (5160,)\n" + ] } ], "source": [ @@ -136,7 +138,64 @@ { "output_type": "stream", "name": "stderr", - "text": "[flaml.automl: 04-10 11:34:26] {890} INFO - Evaluation method: cv\n[flaml.automl: 04-10 11:34:26] {606} INFO - Using RepeatedKFold\n[flaml.automl: 04-10 11:34:26] {911} INFO - Minimizing error metric: 1-r2\n[flaml.automl: 04-10 11:34:26] {930} INFO - List of ML learners in AutoML Run: ['xgboost']\n[flaml.automl: 04-10 11:34:26] {994} INFO - iteration 0, current learner xgboost\n[flaml.automl: 04-10 11:34:26] {1147} INFO - at 0.2s,\tbest xgboost's error=2.1267,\tbest xgboost's error=2.1267\n[flaml.automl: 04-10 11:34:26] {994} INFO - iteration 1, current learner xgboost\n[flaml.automl: 04-10 11:34:27] {1147} INFO - at 0.4s,\tbest xgboost's error=2.1267,\tbest xgboost's error=2.1267\n[flaml.automl: 04-10 11:34:27] {994} INFO - iteration 2, current learner xgboost\n[flaml.automl: 04-10 11:34:27] {1147} INFO - at 0.6s,\tbest xgboost's error=0.3799,\tbest xgboost's error=0.3799\n[flaml.automl: 04-10 11:34:27] {994} INFO - iteration 3, current learner xgboost\n[flaml.automl: 04-10 11:34:27] {1147} INFO - at 0.8s,\tbest xgboost's error=0.3799,\tbest xgboost's error=0.3799\n[flaml.automl: 04-10 11:34:27] {994} INFO - iteration 4, current learner xgboost\n[flaml.automl: 04-10 11:34:27] {1147} INFO - at 1.2s,\tbest xgboost's error=0.2664,\tbest xgboost's error=0.2664\n[flaml.automl: 04-10 11:34:27] {994} INFO - iteration 5, current learner xgboost\n[flaml.automl: 04-10 11:34:28] {1147} INFO - at 1.6s,\tbest xgboost's error=0.2281,\tbest xgboost's error=0.2281\n[flaml.automl: 04-10 11:34:28] {994} INFO - iteration 6, current learner xgboost\n[flaml.automl: 04-10 11:34:28] {1147} INFO - at 1.8s,\tbest xgboost's error=0.2281,\tbest xgboost's error=0.2281\n[flaml.automl: 04-10 11:34:28] {994} INFO - iteration 7, current learner xgboost\n[flaml.automl: 04-10 11:34:28] {1147} INFO - at 2.1s,\tbest xgboost's error=0.2234,\tbest xgboost's error=0.2234\n[flaml.automl: 04-10 11:34:28] {994} INFO - iteration 8, current learner xgboost\n[flaml.automl: 04-10 11:34:28] {1147} INFO - at 2.3s,\tbest xgboost's error=0.2234,\tbest xgboost's error=0.2234\n[flaml.automl: 04-10 11:34:28] {994} INFO - iteration 9, current learner xgboost\n[flaml.automl: 04-10 11:34:29] {1147} INFO - at 2.5s,\tbest xgboost's error=0.2234,\tbest xgboost's error=0.2234\n[flaml.automl: 04-10 11:34:29] {994} INFO - iteration 10, current learner xgboost\n[flaml.automl: 04-10 11:34:29] {1147} INFO - at 2.8s,\tbest xgboost's error=0.2234,\tbest xgboost's error=0.2234\n[flaml.automl: 04-10 11:34:29] {994} INFO - iteration 11, current learner xgboost\n[flaml.automl: 04-10 11:34:30] {1147} INFO - at 3.5s,\tbest xgboost's error=0.2234,\tbest xgboost's error=0.2234\n[flaml.automl: 04-10 11:34:30] {994} INFO - iteration 12, current learner xgboost\n[flaml.automl: 04-10 11:34:30] {1147} INFO - at 3.7s,\tbest xgboost's error=0.2234,\tbest xgboost's error=0.2234\n[flaml.automl: 04-10 11:34:30] {994} INFO - iteration 13, current learner xgboost\n[flaml.automl: 04-10 11:34:30] {1147} INFO - at 3.9s,\tbest xgboost's error=0.2234,\tbest xgboost's error=0.2234\n[flaml.automl: 04-10 11:34:30] {994} INFO - iteration 14, current learner xgboost\n[flaml.automl: 04-10 11:34:31] {1147} INFO - at 4.6s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:31] {994} INFO - iteration 15, current learner xgboost\n[flaml.automl: 04-10 11:34:31] {1147} INFO - at 4.9s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:31] {994} INFO - iteration 16, current learner xgboost\n[flaml.automl: 04-10 11:34:33] {1147} INFO - at 6.4s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:33] {994} INFO - iteration 17, current learner xgboost\n[flaml.automl: 04-10 11:34:34] {1147} INFO - at 7.4s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:34] {994} INFO - iteration 18, current learner xgboost\n[flaml.automl: 04-10 11:34:35] {1147} INFO - at 9.3s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:35] {994} INFO - iteration 19, current learner xgboost\n[flaml.automl: 04-10 11:34:36] {1147} INFO - at 9.7s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:36] {994} INFO - iteration 20, current learner xgboost\n[flaml.automl: 04-10 11:34:36] {1147} INFO - at 10.3s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:36] {994} INFO - iteration 21, current learner xgboost\n[flaml.automl: 04-10 11:34:37] {1147} INFO - at 10.5s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:37] {994} INFO - iteration 22, current learner xgboost\n[flaml.automl: 04-10 11:34:38] {1147} INFO - at 11.5s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:38] {994} INFO - iteration 23, current learner xgboost\n[flaml.automl: 04-10 11:34:39] {1147} INFO - at 12.4s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:39] {994} INFO - iteration 24, current learner xgboost\n[flaml.automl: 04-10 11:34:39] {1147} INFO - at 12.8s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:39] {994} INFO - iteration 25, current learner xgboost\n[flaml.automl: 04-10 11:34:40] {1147} INFO - at 13.7s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:40] {994} INFO - iteration 26, current learner xgboost\n[flaml.automl: 04-10 11:34:40] {1147} INFO - at 14.3s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:40] {994} INFO - iteration 27, current learner xgboost\n[flaml.automl: 04-10 11:34:41] {1147} INFO - at 14.7s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:41] {994} INFO - iteration 28, current learner xgboost\n[flaml.automl: 04-10 11:34:41] {1147} INFO - at 15.2s,\tbest xgboost's error=0.1820,\tbest xgboost's error=0.1820\n[flaml.automl: 04-10 11:34:41] {994} INFO - iteration 29, current learner xgboost\n[flaml.automl: 04-10 11:34:43] {1147} INFO - at 17.4s,\tbest xgboost's error=0.1812,\tbest xgboost's error=0.1812\n[flaml.automl: 04-10 11:34:43] {994} INFO - iteration 30, current learner xgboost\n[flaml.automl: 04-10 11:34:44] {1147} INFO - at 17.8s,\tbest xgboost's error=0.1812,\tbest xgboost's error=0.1812\n[flaml.automl: 04-10 11:34:44] {994} INFO - iteration 31, current learner xgboost\n[flaml.automl: 04-10 11:34:49] {1147} INFO - at 22.8s,\tbest xgboost's error=0.1712,\tbest xgboost's error=0.1712\n[flaml.automl: 04-10 11:34:49] {994} INFO - iteration 32, current learner xgboost\n[flaml.automl: 04-10 11:34:52] {1147} INFO - at 26.2s,\tbest xgboost's error=0.1712,\tbest xgboost's error=0.1712\n[flaml.automl: 04-10 11:34:52] {994} INFO - iteration 33, current learner xgboost\n[flaml.automl: 04-10 11:34:53] {1147} INFO - at 26.9s,\tbest xgboost's error=0.1712,\tbest xgboost's error=0.1712\n[flaml.automl: 04-10 11:34:53] {994} INFO - iteration 34, current learner xgboost\n[flaml.automl: 04-10 11:35:07] {1147} INFO - at 41.0s,\tbest xgboost's error=0.1712,\tbest xgboost's error=0.1712\n[flaml.automl: 04-10 11:35:07] {994} INFO - iteration 35, current learner xgboost\n[flaml.automl: 04-10 11:35:16] {1147} INFO - at 50.1s,\tbest xgboost's error=0.1712,\tbest xgboost's error=0.1712\n[flaml.automl: 04-10 11:35:16] {994} INFO - iteration 36, current learner xgboost\n[flaml.automl: 04-10 11:35:19] {1147} INFO - at 52.9s,\tbest xgboost's error=0.1708,\tbest xgboost's error=0.1708\n[flaml.automl: 04-10 11:35:19] {994} INFO - iteration 37, current learner xgboost\n[flaml.automl: 04-10 11:35:21] {1147} INFO - at 55.1s,\tbest xgboost's error=0.1708,\tbest xgboost's error=0.1708\n[flaml.automl: 04-10 11:35:21] {994} INFO - iteration 38, current learner xgboost\n[flaml.automl: 04-10 11:35:24] {1147} INFO - at 57.9s,\tbest xgboost's error=0.1708,\tbest xgboost's error=0.1708\n[flaml.automl: 04-10 11:35:24] {1187} INFO - selected model: \n[flaml.automl: 04-10 11:35:24] {944} INFO - fit succeeded\n" + "text": [ + "[flaml.automl: 05-02 07:56:06] {890} INFO - Evaluation method: cv\n", + "[flaml.automl: 05-02 07:56:06] {606} INFO - Using RepeatedKFold\n", + "[flaml.automl: 05-02 07:56:06] {911} INFO - Minimizing error metric: 1-r2\n", + "[flaml.automl: 05-02 07:56:06] {929} INFO - List of ML learners in AutoML Run: ['xgboost']\n", + "[flaml.automl: 05-02 07:56:06] {993} INFO - iteration 0, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:07] {1141} INFO - at 0.5s,\tbest xgboost's error=2.1267,\tbest xgboost's error=2.1267\n", + "[flaml.automl: 05-02 07:56:07] {993} INFO - iteration 1, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:07] {1141} INFO - at 0.8s,\tbest xgboost's error=2.1267,\tbest xgboost's error=2.1267\n", + "[flaml.automl: 05-02 07:56:07] {993} INFO - iteration 2, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:07] {1141} INFO - at 1.1s,\tbest xgboost's error=0.8485,\tbest xgboost's error=0.8485\n", + "[flaml.automl: 05-02 07:56:07] {993} INFO - iteration 3, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:08] {1141} INFO - at 1.3s,\tbest xgboost's error=0.3799,\tbest xgboost's error=0.3799\n", + "[flaml.automl: 05-02 07:56:08] {993} INFO - iteration 4, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:08] {1141} INFO - at 1.5s,\tbest xgboost's error=0.3799,\tbest xgboost's error=0.3799\n", + "[flaml.automl: 05-02 07:56:08] {993} INFO - iteration 5, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:08] {1141} INFO - at 1.7s,\tbest xgboost's error=0.3799,\tbest xgboost's error=0.3799\n", + "[flaml.automl: 05-02 07:56:08] {993} INFO - iteration 6, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:08] {1141} INFO - at 2.1s,\tbest xgboost's error=0.2992,\tbest xgboost's error=0.2992\n", + "[flaml.automl: 05-02 07:56:08] {993} INFO - iteration 7, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:09] {1141} INFO - at 2.5s,\tbest xgboost's error=0.2992,\tbest xgboost's error=0.2992\n", + "[flaml.automl: 05-02 07:56:09] {993} INFO - iteration 8, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:09] {1141} INFO - at 2.7s,\tbest xgboost's error=0.2992,\tbest xgboost's error=0.2992\n", + "[flaml.automl: 05-02 07:56:09] {993} INFO - iteration 9, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:09] {1141} INFO - at 3.1s,\tbest xgboost's error=0.2513,\tbest xgboost's error=0.2513\n", + "[flaml.automl: 05-02 07:56:09] {993} INFO - iteration 10, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:10] {1141} INFO - at 3.4s,\tbest xgboost's error=0.2513,\tbest xgboost's error=0.2513\n", + "[flaml.automl: 05-02 07:56:10] {993} INFO - iteration 11, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:10] {1141} INFO - at 3.7s,\tbest xgboost's error=0.2513,\tbest xgboost's error=0.2513\n", + "[flaml.automl: 05-02 07:56:10] {993} INFO - iteration 12, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:10] {1141} INFO - at 4.0s,\tbest xgboost's error=0.2113,\tbest xgboost's error=0.2113\n", + "[flaml.automl: 05-02 07:56:10] {993} INFO - iteration 13, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:11] {1141} INFO - at 4.3s,\tbest xgboost's error=0.2113,\tbest xgboost's error=0.2113\n", + "[flaml.automl: 05-02 07:56:11] {993} INFO - iteration 14, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:11] {1141} INFO - at 5.1s,\tbest xgboost's error=0.2090,\tbest xgboost's error=0.2090\n", + "[flaml.automl: 05-02 07:56:11] {993} INFO - iteration 15, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:12] {1141} INFO - at 5.9s,\tbest xgboost's error=0.2090,\tbest xgboost's error=0.2090\n", + "[flaml.automl: 05-02 07:56:12] {993} INFO - iteration 16, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:13] {1141} INFO - at 7.3s,\tbest xgboost's error=0.1919,\tbest xgboost's error=0.1919\n", + "[flaml.automl: 05-02 07:56:13] {993} INFO - iteration 17, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:14] {1141} INFO - at 7.8s,\tbest xgboost's error=0.1919,\tbest xgboost's error=0.1919\n", + "[flaml.automl: 05-02 07:56:14] {993} INFO - iteration 18, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:19] {1141} INFO - at 13.2s,\tbest xgboost's error=0.1797,\tbest xgboost's error=0.1797\n", + "[flaml.automl: 05-02 07:56:19] {993} INFO - iteration 19, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:21] {1141} INFO - at 15.2s,\tbest xgboost's error=0.1797,\tbest xgboost's error=0.1797\n", + "[flaml.automl: 05-02 07:56:21] {993} INFO - iteration 20, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:40] {1141} INFO - at 33.4s,\tbest xgboost's error=0.1797,\tbest xgboost's error=0.1797\n", + "[flaml.automl: 05-02 07:56:40] {993} INFO - iteration 21, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:42] {1141} INFO - at 36.3s,\tbest xgboost's error=0.1797,\tbest xgboost's error=0.1797\n", + "[flaml.automl: 05-02 07:56:42] {993} INFO - iteration 22, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:49] {1141} INFO - at 43.0s,\tbest xgboost's error=0.1782,\tbest xgboost's error=0.1782\n", + "[flaml.automl: 05-02 07:56:49] {993} INFO - iteration 23, current learner xgboost\n", + "[flaml.automl: 05-02 07:56:53] {1141} INFO - at 46.8s,\tbest xgboost's error=0.1782,\tbest xgboost's error=0.1782\n", + "[flaml.automl: 05-02 07:56:53] {993} INFO - iteration 24, current learner xgboost\n", + "[flaml.automl: 05-02 07:57:05] {1141} INFO - at 58.7s,\tbest xgboost's error=0.1782,\tbest xgboost's error=0.1782\n", + "[flaml.automl: 05-02 07:57:05] {1187} INFO - selected model: \n", + "[flaml.automl: 05-02 07:57:05] {944} INFO - fit succeeded\n" + ] } ], "source": [ @@ -168,7 +227,9 @@ { "output_type": "stream", "name": "stdout", - "text": "Best hyperparmeter config:{'n_estimators': 102.0, 'max_leaves': 65.0, 'min_child_weight': 128.0, 'learning_rate': 0.11244600616797214, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.869583478411346, 'reg_alpha': 0.007099803569534422, 'reg_lambda': 3.0138982869145257}\nBest r2 on validation data: 0.8292\nTraining duration of best run: 2.752 s\n" + "text": [ + "Best hyperparmeter config: {'n_estimators': 222.0, 'max_leaves': 62.0, 'min_child_weight': 7.5054716192185795, 'learning_rate': 0.04623175582706431, 'subsample': 0.8756054034199897, 'colsample_bylevel': 0.44768367042684304, 'colsample_bytree': 0.7352307811741962, 'reg_alpha': 0.0009765625, 'reg_lambda': 0.6207832675443758}\nBest r2 on validation data: 0.8218\nTraining duration of best run: 6.742 s\n" + ] } ], "source": [ @@ -190,7 +251,9 @@ { "output_type": "execute_result", "data": { - "text/plain": "" + "text/plain": [ + "" + ] }, "metadata": {}, "execution_count": 6 @@ -229,7 +292,9 @@ { "output_type": "stream", "name": "stdout", - "text": "Predicted labels[139985.8 258609.5 147321.94 ... 245122.38 243654.11 271184.34]\nTrue labels[136900. 241300. 200700. ... 160900. 227300. 265600.]\n" + "text": [ + "Predicted labels [146973.44 249425.19 153984.38 ... 231542.02 240381.16 264131.75]\nTrue labels [136900. 241300. 200700. ... 160900. 227300. 265600.]\n" + ] } ], "source": [ @@ -252,7 +317,9 @@ { "output_type": "stream", "name": "stdout", - "text": "r2=0.8324355914726785\nmse=2214951230.2261615\nmae=31314.021581274224\n" + "text": [ + "r2 = 0.831184063859627\nmse = 2231494556.1660414\nmae = 31958.22036624879\n" + ] } ], "source": [ @@ -276,7 +343,9 @@ { "output_type": "stream", "name": "stdout", - "text": "{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 1, 'learning_rate': 0.1, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 1, 'learning_rate': 0.1, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 1.8630223791107017, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.9684145930669938, 'reg_alpha': 0.001831177697321707, 'reg_lambda': 0.2790165919053839}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 1.8630223791107017, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.9684145930669938, 'reg_alpha': 0.001831177697321707, 'reg_lambda': 0.2790165919053839}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 20.0, 'max_leaves': 4.0, 'min_child_weight': 12.035965728320889, 'learning_rate': 1.0, 'subsample': 0.9814787163243813, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.9534346594834143, 'reg_alpha': 0.002208534076096185, 'reg_lambda': 0.5460627024738886}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 20.0, 'max_leaves': 4.0, 'min_child_weight': 12.035965728320889, 'learning_rate': 1.0, 'subsample': 0.9814787163243813, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.9534346594834143, 'reg_alpha': 0.002208534076096185, 'reg_lambda': 0.5460627024738886}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 15.0, 'min_child_weight': 8.517629386811183, 'learning_rate': 0.4743416464891248, 'subsample': 0.9233328006239466, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473814}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 15.0, 'min_child_weight': 8.517629386811183, 'learning_rate': 0.4743416464891248, 'subsample': 0.9233328006239466, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473814}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 7.0, 'max_leaves': 41.0, 'min_child_weight': 36.70427494512747, 'learning_rate': 0.829848065653668, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.9462273188918029, 'reg_alpha': 0.03855312356344618, 'reg_lambda': 0.11780549418358223}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 7.0, 'max_leaves': 41.0, 'min_child_weight': 36.70427494512747, 'learning_rate': 0.829848065653668, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.9462273188918029, 'reg_alpha': 0.03855312356344618, 'reg_lambda': 0.11780549418358223}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 19.0, 'max_leaves': 67.0, 'min_child_weight': 50.99082180813152, 'learning_rate': 0.2715137524235054, 'subsample': 1.0, 'colsample_bylevel': 0.9209151683339658, 'colsample_bytree': 0.9707158427317836, 'reg_alpha': 0.07779983160764462, 'reg_lambda': 0.11002505912010285}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 19.0, 'max_leaves': 67.0, 'min_child_weight': 50.99082180813152, 'learning_rate': 0.2715137524235054, 'subsample': 1.0, 'colsample_bylevel': 0.9209151683339658, 'colsample_bytree': 0.9707158427317836, 'reg_alpha': 0.07779983160764462, 'reg_lambda': 0.11002505912010285}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 30.0, 'max_leaves': 236.0, 'min_child_weight': 9.853369895127964, 'learning_rate': 0.11879538341958183, 'subsample': 0.9097941662911945, 'colsample_bylevel': 0.8226978679658046, 'colsample_bytree': 0.8131809898257346, 'reg_alpha': 0.010230542165764517, 'reg_lambda': 0.2230054167461401}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 30.0, 'max_leaves': 236.0, 'min_child_weight': 9.853369895127964, 'learning_rate': 0.11879538341958183, 'subsample': 0.9097941662911945, 'colsample_bylevel': 0.8226978679658046, 'colsample_bytree': 0.8131809898257346, 'reg_alpha': 0.010230542165764517, 'reg_lambda': 0.2230054167461401}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 116.0, 'max_leaves': 181.0, 'min_child_weight': 121.26153395918108, 'learning_rate': 0.10509781099106284, 'subsample': 0.9632459632197474, 'colsample_bylevel': 0.8924959880815224, 'colsample_bytree': 0.9632899031790462, 'reg_alpha': 0.005131047214558587, 'reg_lambda': 0.5508272328115283}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 116.0, 'max_leaves': 181.0, 'min_child_weight': 121.26153395918108, 'learning_rate': 0.10509781099106284, 'subsample': 0.9632459632197474, 'colsample_bylevel': 0.8924959880815224, 'colsample_bytree': 0.9632899031790462, 'reg_alpha': 0.005131047214558587, 'reg_lambda': 0.5508272328115283}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 102.0, 'max_leaves': 65.0, 'min_child_weight': 128.0, 'learning_rate': 0.11244600616797214, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.869583478411346, 'reg_alpha': 0.007099803569534422, 'reg_lambda': 3.0138982869145257}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 102.0, 'max_leaves': 65.0, 'min_child_weight': 128.0, 'learning_rate': 0.11244600616797214, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.869583478411346, 'reg_alpha': 0.007099803569534422, 'reg_lambda': 3.0138982869145257}}\n" + "text": [ + "{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 1, 'learning_rate': 0.1, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 1, 'learning_rate': 0.1, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0009765625, 'reg_lambda': 1.0}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 0.2620811530815948, 'learning_rate': 0.25912534572860507, 'subsample': 0.9266743941610592, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0013933617380144255, 'reg_lambda': 0.18096917948292954}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 0.2620811530815948, 'learning_rate': 0.25912534572860507, 'subsample': 0.9266743941610592, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0013933617380144255, 'reg_lambda': 0.18096917948292954}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 1.8630223791107017, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.946138073111236, 'reg_alpha': 0.0018311776973217071, 'reg_lambda': 0.27901659190538414}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 1.8630223791107017, 'learning_rate': 1.0, 'subsample': 0.8513627344387318, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.946138073111236, 'reg_alpha': 0.0018311776973217071, 'reg_lambda': 0.27901659190538414}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 4.0, 'min_child_weight': 5.909231502320304, 'learning_rate': 1.0, 'subsample': 0.8894434216129232, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0013605736901132325, 'reg_lambda': 0.1222158118565165}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 4.0, 'min_child_weight': 5.909231502320304, 'learning_rate': 1.0, 'subsample': 0.8894434216129232, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 0.0013605736901132325, 'reg_lambda': 0.1222158118565165}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 11.0, 'min_child_weight': 8.517629386811171, 'learning_rate': 1.0, 'subsample': 0.9233328006239466, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.9468117873770695, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473819}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 11.0, 'min_child_weight': 8.517629386811171, 'learning_rate': 1.0, 'subsample': 0.9233328006239466, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.9468117873770695, 'reg_alpha': 0.034996420228767956, 'reg_lambda': 0.6169079461473819}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 20.0, 'max_leaves': 15.0, 'min_child_weight': 43.62419686983011, 'learning_rate': 0.6413547778096401, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.8481188761562112, 'reg_alpha': 0.01241885232679939, 'reg_lambda': 0.21352682817916652}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 20.0, 'max_leaves': 15.0, 'min_child_weight': 43.62419686983011, 'learning_rate': 0.6413547778096401, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 0.8481188761562112, 'reg_alpha': 0.01241885232679939, 'reg_lambda': 0.21352682817916652}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 58.0, 'max_leaves': 8.0, 'min_child_weight': 51.84874392377363, 'learning_rate': 0.23511987355535005, 'subsample': 1.0, 'colsample_bylevel': 0.8182737361783602, 'colsample_bytree': 0.8031986460435498, 'reg_alpha': 0.00400039941928546, 'reg_lambda': 0.3870252968100477}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 58.0, 'max_leaves': 8.0, 'min_child_weight': 51.84874392377363, 'learning_rate': 0.23511987355535005, 'subsample': 1.0, 'colsample_bylevel': 0.8182737361783602, 'colsample_bytree': 0.8031986460435498, 'reg_alpha': 0.00400039941928546, 'reg_lambda': 0.3870252968100477}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 101.0, 'max_leaves': 14.0, 'min_child_weight': 7.444058088783045, 'learning_rate': 0.39220715578198356, 'subsample': 1.0, 'colsample_bylevel': 0.6274332478496758, 'colsample_bytree': 0.7190251742957809, 'reg_alpha': 0.007212902167942765, 'reg_lambda': 0.20172056689658158}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 101.0, 'max_leaves': 14.0, 'min_child_weight': 7.444058088783045, 'learning_rate': 0.39220715578198356, 'subsample': 1.0, 'colsample_bylevel': 0.6274332478496758, 'colsample_bytree': 0.7190251742957809, 'reg_alpha': 0.007212902167942765, 'reg_lambda': 0.20172056689658158}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 205.0, 'max_leaves': 30.0, 'min_child_weight': 5.450621032615104, 'learning_rate': 0.12229148765139466, 'subsample': 0.8895588746662894, 'colsample_bylevel': 0.47518959001130784, 'colsample_bytree': 0.6845612830806885, 'reg_alpha': 0.01126059820390593, 'reg_lambda': 0.08170816686602438}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 205.0, 'max_leaves': 30.0, 'min_child_weight': 5.450621032615104, 'learning_rate': 0.12229148765139466, 'subsample': 0.8895588746662894, 'colsample_bylevel': 0.47518959001130784, 'colsample_bytree': 0.6845612830806885, 'reg_alpha': 0.01126059820390593, 'reg_lambda': 0.08170816686602438}}\n{'Current Learner': 'xgboost', 'Current Sample': 15480, 'Current Hyper-parameters': {'n_estimators': 222.0, 'max_leaves': 62.0, 'min_child_weight': 7.5054716192185795, 'learning_rate': 0.04623175582706431, 'subsample': 0.8756054034199897, 'colsample_bylevel': 0.44768367042684304, 'colsample_bytree': 0.7352307811741962, 'reg_alpha': 0.0009765625, 'reg_lambda': 0.6207832675443758}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 222.0, 'max_leaves': 62.0, 'min_child_weight': 7.5054716192185795, 'learning_rate': 0.04623175582706431, 'subsample': 0.8756054034199897, 'colsample_bylevel': 0.44768367042684304, 'colsample_bytree': 0.7352307811741962, 'reg_alpha': 0.0009765625, 'reg_lambda': 0.6207832675443758}}\n" + ] } ], "source": [ @@ -301,8 +370,8 @@ "output_type": "display_data", "data": { "text/plain": "
", - "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAEWCAYAAABIVsEJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3de5wcVZ338c+XIYGwCkkgYgi5ucRIEE10xMXLChFMcJVERQR33YhgVlfcXXmMBFFwcXmesOyK+HrwEhEBBblELlGDkbsuF2EwgVwwEgJChkDCJYgQCUl++0edCZWmu9OpmZ7umf6+X69+ddWpU1W/mnT616dO1SlFBGZmZjtqp0YHYGZmfZMTiJmZFeIEYmZmhTiBmJlZIU4gZmZWiBOImZkV4gRiVgeS3i1pRaPjMKsnJxDrdyQ9LOmwRsYQEb+JiPH12r6kKZJ+Lek5Sesk3SrpyHrtz6wcJxCzAiS1NXDfRwFXAhcD+wJ7A6cBHyywLUny94AV4g+OtQxJO0maLelBSU9JukLS0NzyKyU9LunZ9Ov+gNyyCyV9R9ICSc8Dh6aWzhcl3ZfWuVzSrqn+IZJW59avWDct/5KkNZIek3SCpJC0X5ljEPAN4OsRcX5EPBsRWyLi1oj4dKrzNUk/zq0zJm1v5zR/i6QzJd0GvADMktRRsp8vSJqfpneR9F+SHpH0hKTvShrUzX8O6wecQKyVfB6YDrwH2Ad4Bjgvt/w6YBzwGuB3wCUl638cOBN4NfA/qexoYCowFngT8Mkq+y9bV9JU4CTgMGA/4JAq2xgPjATmValTi08AM8mO5bvAeEnjcss/DlyapucArwcmpvhGkLV4rMU5gVgr+QxwakSsjogXga8BR3X9Mo+ICyLiudyyN0vaI7f+tRFxW/rF/5dU9q2IeCwingZ+RvYlW0mlukcDP4yIZRHxQtp3JXum9zW1HnQFF6b9bYqIZ4FrgWMBUiJ5AzA/tXhmAl+IiKcj4jng/wLHdHP/1g84gVgrGQ1cLWm9pPXA/cBmYG9JbZLmpNNbfwIeTuvslVv/0TLbfDw3/QLwqir7r1R3n5Jtl9tPl6fS+/AqdWpRuo9LSQmErPVxTUpmw4DdgHtyf7dfpnJrcU4g1koeBY6IiMG5164R0Un2pTmN7DTSHsCYtI5y69dr6Oo1ZJ3hXUZWqbuC7Dg+UqXO82Rf+l1eW6ZO6bFcDwyTNJEskXSdvnoS2AAckPub7RER1RKltQgnEOuvBkjaNffamexc/5mSRgNIGiZpWqr/auBFsl/4u5GdpuktVwDHSdpf0m7AVytVjOz5CycBX5V0nKTd08UB75I0N1VbDPytpFHpFNwp2wsgIl4iu7LrbGAoWUIhIrYA3wfOkfQaAEkjJE0pfLTWbziBWH+1gOyXc9fra8C5wHzgV5KeA+4E3p7qXwz8EegElqdlvSIirgO+BdwMrMzt+8UK9ecBHwM+BTwGPAH8B1k/BhFxPXA5cB9wD/DzGkO5lKwFdmVEbMqVn9wVVzq9dwNZZ761OPmBUmbNRdL+wFJgl5IvcrOm4haIWROQ9KF0v8UQ4CzgZ04e1uycQMyawz8Ba4EHya4M+2xjwzHbPp/CMjOzQtwCMTOzQnZudAC9aa+99ooxY8Y0Ogwzsz7lnnvueTIiXnHzaEslkDFjxtDR0bH9imZmtpWkP5Yr9yksMzMrxAnEzMwKcQIxM7NCnEDMzKwQJxAzMyukpa7CMuuOaxZ1cvbCFTy2fgP7DB7ErCnjmT5pRKPDMquo3p9ZJ5Am4C+m2jTy73TNok5OuWoJG17aDEDn+g2cctUSAP9bWVPqjc9sSw1l0t7eHs12H0jpPzLAoAFt/L8PH+gvppxG/53eOecmOtdveEX5wLadmDRqcN33b7ajFj2yno2bt7yifMTgQdw2e/IObUvSPRHRXlruFkiDnb1wxTZfigAbXtrMl+bdx0/ueqRBUTWfcv8ZevPvVC55AGX/g5o1g0qfzccqfJaLcAJpsEr/mP5i2lalv0dv/Z0Gtu1U8dfc5f90cK/EYLYjKrWa9xk8qMf24QTSYPsMHlT2H9lfTNuq9J+ht/5OlU6hzZriB/NZc5o1ZXzdP7NOIN2U79jdY9AAJFj/wks1d/L2xj9yf9Dov1PXv6MvdrC+ojc+s+5E74Zyv0rzau3kvWZRJ1+adx8bN29hhL+YKvLVamaNUakTvaEJRNJU4FygDTg/IuaULD8HODTN7ga8JiIGp2WbgSVp2SMRceT29tfTCaTSaZW8Wq/SWb7mT0wYvrtPW5lZ02m6q7AktQHnAYcDq4G7Jc2PiOVddSLiC7n6nwcm5TaxISIm9la85dRyNUOtnbwThu/OtIn+NW1mfUcj+0AOAlZGxCoASZcB04DlFeofC5zeS7Ft1zWLOtlJYvN2WnDuDDez/qqRY2GNAB7Nza9OZa8gaTQwFrgpV7yrpA5Jd0qaXmknkmameh3r1q3ribi39n1sL3m4M9zM+rO+MpjiMcC8iMj3Vo9O5+Q+DnxT0l+XWzEi5kZEe0S0Dxv2iicyFlLu5j8AAUN2G4DIWh6+m9zM+rNGnsLqBEbm5vdNZeUcA3wuXxARnel9laRbyPpHHuz5MF+pWt/HotPe1xshmJk1XCNbIHcD4ySNlTSQLEnML60k6Q3AEOCOXNkQSbuk6b2Ad1K576THVbqTsyfv8DQza3YNSyARsQk4EVgI3A9cERHLJJ0hKX9J7jHAZbHt9cb7Ax2S7gVuBubkr96qt1lTxjNoQNs2Ze7vMLNW4xsJC/LNf2bWKpruPpC+bvqkEVtHgfVlumbWivrKVVhmZtZknEDMzKwQJxAzMyvEfSA1KjcSrJlZK3MLpAZdQ5d0rt9A8PLD6Z/884uNDs3MrGGcQGpQ6bnlq9Y936CIzMwazwmkBpWGLgnwEOxm1rKcQGpQaYiSEYMH8fG3j+rlaMzMmoMTSA08dImZ2Sv5KqwadA1R4qFLzMxe5gRSIw9dYma2LZ/CMjOzQpxAzMysECcQMzMrxAnEzMwKaWgCkTRV0gpJKyXNLrP8k5LWSVqcXifkls2Q9EB6zejdyM3MrGFXYUlqA84DDgdWA3dLml/m0bSXR8SJJesOBU4H2sluCL8nrftML4RuZmY0tgVyELAyIlZFxEbgMmBajetOAa6PiKdT0rgemFqnOM3MrIxGJpARwKO5+dWprNRHJN0naZ6kkTu4rpmZ1Umzd6L/DBgTEW8ia2VctKMbkDRTUoekjnXr1vV4gGZmraqRCaQTGJmb3zeVbRURT0VE10M3zgfeWuu6uW3MjYj2iGgfNmxYjwRuZmaNTSB3A+MkjZU0EDgGmJ+vIGl4bvZI4P40vRB4n6QhkoYA70tlZmbWSxp2FVZEbJJ0ItkXfxtwQUQsk3QG0BER84F/kXQksAl4GvhkWvdpSV8nS0IAZ0TE071+EGZmLayhgylGxAJgQUnZabnpU4BTKqx7AXBBXQM0M7OKmr0T3czMmpQTiJmZFeIEYmZmhTiBmJlZIU4gZmZWiBOImZkV4gRiZmaFOIGYmVkhTiBmZlaIE4iZmRXiBGJmZoU4gZiZWSFOIGZmVogTiJmZFeIEYmZmhTiBmJlZIU4gZmZWSEMTiKSpklZIWilpdpnlJ0laLuk+STdKGp1btlnS4vSaX7qumZnVV8MeaSupDTgPOBxYDdwtaX5ELM9VWwS0R8QLkj4L/CfwsbRsQ0RM7NWgzcxsq0a2QA4CVkbEqojYCFwGTMtXiIibI+KFNHsnsG8vx2hmZhU0MoGMAB7Nza9OZZUcD1yXm99VUoekOyVNr7SSpJmpXse6deu6F7GZmW3VsFNYO0LSPwDtwHtyxaMjolPS64CbJC2JiAdL142IucBcgPb29uiVgM3MWkAjWyCdwMjc/L6pbBuSDgNOBY6MiBe7yiOiM72vAm4BJtUzWDMz21YjE8jdwDhJYyUNBI4BtrmaStIk4HtkyWNtrnyIpF3S9F7AO4F857uZmdVZw05hRcQmSScCC4E24IKIWCbpDKAjIuYDZwOvAq6UBPBIRBwJ7A98T9IWsiQ4p+TqLTMzq7OG9oFExAJgQUnZabnpwyqsdztwYH2jMzOzanwnupmZFeIEYmZmhTiBmJlZIU4gZmZWiBOImZkVUjWBSNpd0l+XKX9T/UIyM7O+oGICkXQ08Hvgp5KWSXpbbvGF9Q7MzMyaW7UWyJeBt6Yh048DfiTpQ2mZ6h6ZmZk1tWo3ErZFxBqAiLhL0qHAzyWNBDwooZlZi6vWAnku3/+RkskhZM/sOKDOcZmZWZOr1gL5LCWnqiLiOUlTgaPrGpWZmTW9ii2QiLgXeEjSzSXlL0XEJXWPzMzMmlrVy3gjYjOwRdIevRSPmZn1EbWMxvtnYImk64Hnuwoj4l/qFpWZmTW9WhLIVellZma21XYTSERc1BuBmJlZ39LQsbAkTZW0QtJKSbPLLN9F0uVp+W8ljcktOyWVr5A0pTfjNjOzBiYQSW3AecARwATgWEkTSqodDzwTEfsB5wBnpXUnkD1D/QBgKvDttD0zM+sljWyBHASsjIhVEbERuIzsJsW8aUDXKbR5wHuVPRx9GnBZRLwYEQ8BK9P2zMysl2y3D0TS64FZwOh8/YiY3M19jwAezc2vBt5eqU5EbJL0LLBnKr+zZN0RFeKfCcwEGDVqVDdDNjOzLrVchXUl8F3g+8Dm+obT8yJiLjAXoL293WN4mZn1kFoSyKaI+E4d9t0JjMzN75vKytVZLWlnYA/gqRrXNTOzOqqlD+Rnkv5Z0nBJQ7tePbDvu4FxksZKGkjWKT6/pM58YEaaPgq4KSIilR+TrtIaC4wD7uqBmMzMrEa1tEC6vsBn5coCeF13dpz6NE4EFgJtwAURsUzSGUBHRMwHfkD2HJKVwNNkSYZU7wpgObAJ+FwadsXMzHpJLTcSjq3XziNiAbCgpOy03PRfgI9WWPdM4Mx6xWZmZtXVchXWALKh3f82Fd0CfC8iXqpjXGZm1uRqOYX1HWAA8O00/4lUdkK9gjIzs+ZXSwJ5W0S8OTd/k6R76xWQmZn1DbVchbU5/2hbSa+jD94PYmZmPauWFsgs4GZJq8gecTsaOK6uUZmZWdOr5SqsGyWNA8anohUR8WJ9wzIzs2ZXMYFImhwRN0n6cMmi/SQREX7IlJlZC6vWAnkPcBPwwTLLAj+l0MyspVVMIBFxepo8Iw2ZvlUaPsTMzFpYLVdh/bRM2byeDsTMzPqWan0gbyB74t8eJf0guwO71jswMzNrbtX6QMYDHwAGs20/yHPAp+sZlJmZNb9qfSDXAtdKOjgi7ujFmMzMrA+o5UbCRZI+R3Y6a+upq4j4VN2iMjOzpldLJ/qPgNcCU4BbyZ7+91w9gzIzs+ZXSwLZLyK+CjwfERcBfwe8vb5hmZlZs6slgXQ992O9pDeSPZf8Nd3ZaXos7vWSHkjvQ8rUmSjpDknLJN0n6WO5ZRdKekjS4vSa2J14zMxsx9WSQOamL/ivkj2LfDnwn93c72zgxogYB9yY5ku9APxjRBwATAW+KWlwbvmsiJiYXou7GY+Zme2gWgZTPD9N3ko3n4OeMw04JE1fRPaUw5NL9vuH3PRjktYCw4D1PRSDmZl1Q7UbCU+qtmJEfKMb+907Itak6ceBvatVlnQQMBB4MFd8pqTTSC2YSiMES5oJzAQYNWpUN0I2M7O8ai2QV6f38cDbyE5fQXZT4V3b27CkG8iu3ip1an4mIkJSVNnOcLIrwWZExJZUfApZ4hkIzCVrvZxRbv2ImJvq0N7eXnE/Zma2Y6rdSPjvAJJ+DbwlIp5L818DfrG9DUfEYZWWSXpC0vCIWJMSxNoK9XZP+zo1Iu7Mbbur9fKipB8CX9xePGZm1rNq6UTfG9iYm9/Idk451WA+MCNNzwCuLa0gaSBwNXBxRMwrWTY8vQuYDiztZjxmZraDarkT/WLgLklXp/npwIXd3O8c4ApJxwN/BI4GkNQOfCYiTkhlfwvsKemTab1PpiuuLpE0jOwRu4uBz3QzHjMz20G1XIV1pqTrgHenouMiYlF3dhoRTwHvLVPeAZyQpn8M/LjC+pO7s38zM+u+aldh7R4Rf5I0FHg4vbqWDY2Ip+sfnpmZNatqLZBLyYZzv4fsEbZdlOZ76p4QMzPrg6pdhfWB9O7H15qZ2StUO4X1lmorRsTvej4cMzPrK6qdwvrvKssCcEe2mVkLq3YK69DeDMTMzPqWWu4DIQ3jPoFtn0h4cb2CMjOz5rfdBCLpdLKRcycAC4AjgP8hu8HQzMxaVC1DmRxFdtPf4xFxHPBmsodKmZlZC6slgWxIo+BuSoMbrgVG1jcsMzNrdrX0gXSkJwF+n+ymwj8Dd9Q1KjMza3rV7gM5D7g0Iv45FX1X0i+B3SPivl6JzszMmla1FsgfgP9KQ6dfAfyku4MomplZ/1GxDyQizo2Ig4H3AE8BF0j6vaTTJb2+1yI0M7OmtN1O9Ij4Y0ScFRGTgGPJngdyf90jMzOzprbdBCJpZ0kflHQJcB2wAvhw3SMzM7OmVq0T/XCyFsf7gbuAy4CZEfF8d3eanjFyOTCG7DkjR0fEM2XqbQaWpNlHIuLIVD42xbMn2ZVhn4iIjaXrm5lZ/VRrgZwC3A7sHxFHRsSlPZE8ktnAjRExDrgxzZezISImpteRufKzgHMiYj/gGeD4HorLzMxqVK0TfXJEnF+uZdADpgEXpemLyPpVaiJJZCMBzyuyvpmZ9Yxa7kSvh70jYk2afhzYu0K9XSV1SLpTUleS2BNYHxGb0vxqYEQdYzUzszJqGo23CEk3AK8ts+jU/ExEhKQoUw9gdER0SnodcJOkJcCzOxjHTGAmwKhRo3ZkVTMzq6JuCSQiDqu0TNITkoZHxJp0o+LaCtvoTO+rJN0CTAJ+CgyWtHNqhewLdFaJYy4wF6C9vb1SojIzsx3UqFNY84EZaXoGcG1pBUlDJO2SpvcC3gksj4gAbiYbJbji+mZmVl+NSiBzgMMlPQAcluaR1C7p/FRnf7KBHO8lSxhzImJ5WnYycJKklWR9Ij/o1ejNzKx+p7CqiYinyJ4xUlreAZyQpm8HDqyw/irgoHrGaGZm1TWqBWJmZn2cE4iZmRXiBGJmZoU4gZiZWSFOIGZmVogTiJmZFeIEYmZmhTiBmJlZIU4gZmZWiBOImZkV4gRiZmaFOIGYmVkhTiBmZlaIE4iZmRXiBGJmZoU4gZiZWSFOIGZmVkhDEoikoZKul/RAeh9Sps6hkhbnXn+RND0tu1DSQ7llE3v/KMzMWlujWiCzgRsjYhxwY5rfRkTcHBETI2IiMBl4AfhVrsqsruURsbhXojYzs60alUCmARel6YuA6dupfxRwXUS8UNeozMysZo1KIHtHxJo0/Tiw93bqHwP8pKTsTEn3STpH0i6VVpQ0U1KHpI5169Z1I2QzM8urWwKRdIOkpWVe0/L1IiKAqLKd4cCBwMJc8SnAG4C3AUOBkyutHxFzI6I9ItqHDRvWnUMyM7Ocneu14Yg4rNIySU9IGh4Ra1KCWFtlU0cDV0fES7ltd7VeXpT0Q+CLPRK0mZnVrFGnsOYDM9L0DODaKnWPpeT0VUo6SBJZ/8nSOsRoZmZVNCqBzAEOl/QAcFiaR1K7pPO7KkkaA4wEbi1Z/xJJS4AlwF7Af/RCzGZmllO3U1jVRMRTwHvLlHcAJ+TmHwZGlKk3uZ7xmZnZ9vlOdDMzK8QJxMzMCnECMTOzQpxAzMysECcQMzMrxAnEzMwKcQIxM7NCnEDMzKwQJxAzMyvECcTMzApxAjEzs0KcQMzMrBAnEDMzK8QJxMzMCnECMTOzQpxAzMyskIYkEEkflbRM0hZJ7VXqTZW0QtJKSbNz5WMl/TaVXy5pYO9EbmZmXRrVAlkKfBj4daUKktqA84AjgAnAsZImpMVnAedExH7AM8Dx9Q3XzMxKNSSBRMT9EbFiO9UOAlZGxKqI2AhcBkyTJGAyMC/VuwiYXr9ozcysnGbuAxkBPJqbX53K9gTWR8SmkvKyJM2U1CGpY926dXUL1sys1excrw1LugF4bZlFp0bEtfXab6mImAvMBWhvb4/e2q+ZWX9XtwQSEYd1cxOdwMjc/L6p7ClgsKSdUyukq9zMzHpRM5/CuhsYl664GggcA8yPiABuBo5K9WYAvdaiMTOzTKMu4/2QpNXAwcAvJC1M5ftIWgCQWhcnAguB+4ErImJZ2sTJwEmSVpL1ifygt4/BzKzV1e0UVjURcTVwdZnyx4D35+YXAAvK1FtFdpWWmZk1SDOfwjIzsybmBGJmZoU4gZiZWSFOIGZmVkhDOtH7kmsWdXL2whU8tn4DA9p2YuTQQY0OycysKbgFUsU1izo55aoldK7fQAAbN2/hoSef55pFvm/RzMwJpIqzF65gw0ubtynbElm5mVmrcwKp4rH1G3ao3MyslTiBVLHP4PL9HZXKzcxaiRNIFbOmjGfQgLZtygYNaGPWlPENisjMrHn4Kqwqpk/KHjPSdRXWPoMHMWvK+K3lZmatzAlkO6ZPGuGEYWZWhk9hmZlZIU4gZmZWiBOImZkV4gRiZmaFOIGYmVkhyh4x3hokrQP+WGDVvYAnezicZtQKx9kKxwg+zv6m0cc5OiKGlRa2VAIpSlJHRLQ3Oo56a4XjbIVjBB9nf9Osx+lTWGZmVogTiJmZFeIEUpu5jQ6gl7TCcbbCMYKPs79pyuN0H4iZmRXiFoiZmRXiBGJmZoU4gVQhaaqkFZJWSprd6Hh6iqQLJK2VtDRXNlTS9ZIeSO9DGhljT5A0UtLNkpZLWibpX1N5vzpWSbtKukvSvek4/z2Vj5X02/T5vVzSwEbH2l2S2iQtkvTzNN8fj/FhSUskLZbUkcqa8jPrBFKBpDbgPOAIYAJwrKQJjY2qx1wITC0pmw3cGBHjgBvTfF+3Cfg/ETEB+Bvgc+nfsL8d64vA5Ih4MzARmCrpb4CzgHMiYj/gGeD4BsbYU/4VuD833x+PEeDQiJiYu/ejKT+zTiCVHQSsjIhVEbERuAyY1uCYekRE/Bp4uqR4GnBRmr4ImN6rQdVBRKyJiN+l6efIvnhG0M+ONTJ/TrMD0iuAycC8VN7nj1PSvsDfAeenedHPjrGKpvzMOoFUNgJ4NDe/OpX1V3tHxJo0/TiwdyOD6WmSxgCTgN/SD481ndpZDKwFrgceBNZHxKZUpT98fr8JfAnYkub3pP8dI2TJ/1eS7pE0M5U15WfWTyS0V4iIkNRvru+W9Crgp8C/RcSfsh+umf5yrBGxGZgoaTBwNfCGBofUoyR9AFgbEfdIOqTR8dTZuyKiU9JrgOsl/T6/sJk+s26BVNYJjMzN75vK+qsnJA0HSO9rGxxPj5A0gCx5XBIRV6XifnmsABGxHrgZOBgYLKnrR2Jf//y+EzhS0sNkp5MnA+fSv44RgIjoTO9ryX4MHESTfmadQCq7GxiXrvIYCBwDzG9wTPU0H5iRpmcA1zYwlh6RzpH/ALg/Ir6RW9SvjlXSsNTyQNIg4HCy/p6bgaNStT59nBFxSkTsGxFjyP4v3hQRf08/OkYASX8l6dVd08D7gKU06WfWd6JXIen9ZOdd24ALIuLMBofUIyT9BDiEbIjoJ4DTgWuAK4BRZEPeHx0RpR3tfYqkdwG/AZbw8nnzL5P1g/SbY5X0JrKO1TayH4VXRMQZkl5H9mt9KLAI+IeIeLFxkfaMdArrixHxgf52jOl4rk6zOwOXRsSZkvakCT+zTiBmZlaIT2GZmVkhTiBmZlaIE4iZmRXiBGJmZoU4gZiZWSFOINZvSDpH0r/l5hdKOj83/9+STqqy/oWSjkrTt0hqL1NngKQ5aVTU30m6Q9IRadnDkvYqEPfW/VZYfl4amXW5pA1perGkoyQt6LoHpCdJGt414m2F5QMl/Tp3E5+1ICcQ609uA94BIGknsvtcDsgtfwdwezf38XVgOPDGiHgL2aB2r+7mNquKiM9FxETg/cCDaZTWiRExLyLen+4+72knAd+vEtNGslFhP1aHfVsf4QRi/cntZEN4QJY4lgLPSRoiaRdgf+B3kk6TdLekpZLmKj84VhWSdgM+DXy+62a1iHgiIq4oU/ektP2lJa2if5R0n7Jnd/yozHpfTy2SthpjeljSXpLGSPp9WvcPki6RdJik21Jr6aBU/6+UPQ/mLmXP1ag0wvRHgF+mdQ5I9Ren2MelOtcAf19LnNY/uflp/UZEPCZpk6RRZK2NO8hGZz0YeBZYEhEbJf3/iDgDIH2JfwD4WQ272A94JCL+VK2SpLcCxwFvBwT8VtKtwEbgK8A7IuJJSUNL1jubrDVzXBS7w3c/4KPAp8iG4vk48C7gSLI78KcDp5INA/KpdOrrLkk3RMTzuTjGAs/k7uj+DHBuRFyShvXpSm5LgbcViNP6CbdArL+5nSx5dCWQO3Lzt6U6hyp7it0SskH5Dii3oW54F3B1RDyfntNxFfDutK8rI+JJgJKhKL4K7BERnymYPAAeioglEbEFWEb2AKIgG8plTKrzPmC2sqHfbwF2JRseI284sC43fwfwZUknA6MjYkOKfzOwsWvsJms9TiDW33T1gxxI9gv5TrIWyDuA2yXtCnwbOCoiDiQ7z79rjdteCYyStHuPR521GN5a2irZQfkxoLbk5rfw8tkGAR/J9aOMioj8E/4ANpD7m0TEpWStmA3AAkmTc3V3Af7SjZitD3MCsf7mdrJTUk9HxOb0K38wWRK5nZe/GJ9U9pyQilc/lYqIF8hG9z03ncrpGgn3oyVVfwNMl7RbGlH1Q6nsJuCjaWA8SpLFL4E5wC/q/It+IfD5rn4fSZPK1PkDL7dYugb4WxUR3yIbBfZNqXxP4MmIeKmO8VoTcwKx/mYJ2dVXd5aUPRsRT6Yrlr5P1jpZSPbLf0d8hez0znJJS4GfA9v0iaTH6F4I3EU28u/5EbEoIpYBZwK3SroX+EbJelem2OanYdnr4etkj7y9T9KyNL+N1B/yoKT9UtHRwNJ02uuNwMWp/FDgF3WK0/oAj8ZrZq8g6UPAWyPiK1XqXAXMjog/9F5k1kx8FZaZvUJEXN11qq2cdArvGieP1uYWiJmZFeI+EDMzK8QJxMzMCuwiJxQAAAAYSURBVHECMTOzQpxAzMysECcQMzMr5H8BrogncqXvOWkAAAAASUVORK5CYII=\n" + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAEWCAYAAABIVsEJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3de5wcVZ338c+XIYGwCiEkYsiFwEOMBNFERljECyBIcJVERQR23Yhg1BXdlccIEQUXl2fDsivi68FLRAQU5BIhRA1mgQC6AsJAIrlgJASETAKESxBhTEjy2z/qTKw03Z1OzfR0z8z3/Xr1q6tOnar6TSXdvz51qk4pIjAzM9teOzQ6ADMz652cQMzMrBAnEDMzK8QJxMzMCnECMTOzQpxAzMysECcQszqQ9E5Jyxsdh1k9OYFYnyPpMUlHNTKGiPh1RIyr1/YlHSPpV5JelLRW0p2SjqvX/szKcQIxK0BSSwP3fTxwPXAlMBLYEzgH+ECBbUmSvwesEP/HsX5D0g6SzpL0iKRnJV0naUhu+fWSnpT0Qvp1f0Bu2eWSviNpnqSXgCNSS+eLkh5M61wraedU/3BJq3LrV6ybln9J0hpJqyWdJikk7VfmbxDwDeDrEXFpRLwQEZsj4s6I+GSq8zVJP86tMyZtb8c0f4ek8yX9BngZmC6prWQ/X5A0N03vJOk/JT0u6SlJ35U0qIv/HNYHOIFYf/I5YArwbmAv4Hngktzym4GxwOuAB4CrStY/GTgfeC3wP6nsBGASsA/wZuDjVfZftq6kScAZwFHAfsDhVbYxDhgFzK5SpxYfA6aR/S3fBcZJGptbfjJwdZqeCbwBmJDiG0HW4rF+zgnE+pNPA2dHxKqIWA98DTi+85d5RFwWES/mlr1F0m659W+KiN+kX/x/SWXfiojVEfEc8DOyL9lKKtU9AfhhRCyNiJfTvivZI72vqfWPruDytL+NEfECcBNwEkBKJG8E5qYWzzTgCxHxXES8CPw/4MQu7t/6ACcQ60/2Bm6UtE7SOuAhYBOwp6QWSTPT6a0/AY+ldYbm1n+izDafzE2/DLymyv4r1d2rZNvl9tPp2fQ+vEqdWpTu42pSAiFrfcxJyWwYsAtwf+64/TKVWz/nBGL9yRPAsRExOPfaOSLayb40J5OdRtoNGJPWUW79eg1dvYasM7zTqCp1l5P9HR+uUuclsi/9Tq8vU6f0b7kFGCZpAlki6Tx99QzQARyQO2a7RUS1RGn9hBOI9VUDJO2ce+1Idq7/fEl7A0gaJmlyqv9aYD3ZL/xdyE7T9JTrgFMk7S9pF+CrlSpG9vyFM4CvSjpF0q7p4oB3SJqVqi0C3iVpdDoFN2NbAUTEK2RXdl0IDCFLKETEZuD7wEWSXgcgaYSkYwr/tdZnOIFYXzWP7Jdz5+trwMXAXOC/Jb0I3AMckupfCfwRaAeWpWU9IiJuBr4F3A6syO17fYX6s4GPAp8AVgNPAf9G1o9BRNwCXAs8CNwP/LzGUK4ma4FdHxEbc+VndsaVTu/dStaZb/2c/EAps+YiaX9gCbBTyRe5WVNxC8SsCUj6YLrfYnfgAuBnTh7W7JxAzJrDp4CngUfIrgz7TGPDMds2n8IyM7NC3AIxM7NCdmx0AD1p6NChMWbMmEaHYWbWq9x///3PRMSrbh7tVwlkzJgxtLW1bbuimZltIemP5cp9CsvMzApxAjEzs0KcQMzMrBAnEDMzK8QJxMzMCulXV2FZc5uzsJ0L5y9n9boO9ho8iOnHjGPKxBGNDsus16r3Z8oJxJrCnIXtzLhhMR2vbAKgfV0HM25YDOAkYlZAT3ymnED6uN7yq/7C+cu3/Efv1PHKJr40+0F+cu/jDYrKrPda+Pg6NmzavFVZxyubuHD+cieQ/qRoEuhNv+pXr+soW176ATCz2lT67FT6rBXhBNIEqiWIriSB3vSrfkDLDmX/w48YPIhrP3VoAyIy690Om7mA9jLJYq/Bg7ptH04gDbatBNGVJFDuPw8056/6UUMG8egzL7E5Nzj0oAEtTD/GD74zK2L6MeO2+m6B7v9MOYE02LYSRFeSwMBe9qu+t/TXmPUGnZ+dPnsVlqRJZM+pbgEujYiZJcsvAo5Is7sAr4uIwWnZJmBxWvZ4RBzXM1F3r22d++9KEiht3UBz/6qfMnGEE4ZZN6r3Z6phCURSC3AJcDSwCrhP0tyIWNZZJyK+kKv/OWBibhMdETGhp+LtDuV+Ye81eFDZVkZnguhKEuiJXyBm1n81sgVyMLAiIlYCSLoGmAwsq1D/JODcHoqt21Xq6/jwQSP46f3tFRNEV5OAf9WbWb00MoGMAJ7Iza8CDilXUdLewD7AglzxzpLagI3AzIiYU69Au0Olvo7r7lvFqCGDWLn2JYKs5VGaIJwEzKwZ9ZZO9BOB2RGR/wbeOyLaJe0LLJC0OCIeKV1R0jRgGsDo0aN7JtoyqvV1DH3NTgx9zU5MnjCCkw9pXIxmZtujkQmkHRiVmx+Zyso5EfhsviAi2tP7Skl3kPWPvCqBRMQsYBZAa2trlC7vKdvq6zAz620aORrvfcBYSftIGkiWJOaWVpL0RmB34O5c2e6SdkrTQ4HDqNx30mPmLGznsJkL2OesX3DYzAXMWfjXfDj9mHEMGtCyVf1mviLKzGxbGtYCiYiNkk4H5pNdxntZRCyVdB7QFhGdyeRE4JqIyLce9ge+J2kzWRKcmb96qxG2dUNgZx/Gl2Y/yIZNm8v2dZiZ9Sba+nu5b2ttbY22tra6bLvSsAEDW3Zg4ujBW+aXrfkT44fv6tNWZtZrSLo/IlpLy/1AqW5S62CA44fvyuQJbnWYWe/XW67CanruJDez/sYtkG7iTnIz62/cAukm7iQ3s/7GCaQbTZk4YssQ6z5tZWZ9nRNIQR563Mz6OyeQAqrd82Fm1l+4E72Aag+BWrbmTw2KysysZzmBFFDtng/f52Fm/YVPYRXgez7MzNwCKcT3fJiZuQVSiO/5MDNzAinM93yYWX/nU1hmZlaIE4iZmRXiBGJmZoU4gZiZWSENTSCSJklaLmmFpLPKLP+4pLWSFqXXabllUyU9nF5TezZyMzNr2FVYklqAS4CjgVXAfZLmlnm2+bURcXrJukOAc4FWIID707rP90DoZmZGY1sgBwMrImJlRGwArgEm17juMcAtEfFcShq3AJPqFKeZmZXRyAQyAngiN78qlZX6sKQHJc2WNGo710XSNEltktrWrl3bHXGbmRnN34n+M2BMRLyZrJVxxfZuICJmRURrRLQOGzas2wM0M+uvGplA2oFRufmRqWyLiHg2Itan2UuBg2pd18zM6quRCeQ+YKykfSQNBE4E5uYrSBqemz0OeChNzwfeK2l3SbsD701lZmbWQxp2FVZEbJR0OtkXfwtwWUQslXQe0BYRc4HPSzoO2Ag8B3w8rfucpK+TJSGA8yLiuR7/I8zM+rGGDqYYEfOAeSVl5+SmZwAzKqx7GXBZXQM0M7OKmr0T3czMmpQTiJmZFeIEYmZmhTiBmJlZIU4gZmZWiBOImZkV4gRiZmaFOIGYmVkhTiBmZlaIE4iZmRXiBGJmZoU4gZiZWSFOIGZmVogTiJmZFeIEYmZmhTiBmJlZIQ1NIJImSVouaYWks8osP0PSMkkPSrpN0t65ZZskLUqvuaXrmplZfTXsiYSSWoBLgKOBVcB9kuZGxLJctYVAa0S8LOkzwH8AH03LOiJiQo8GbWZmWzSyBXIwsCIiVkbEBuAaYHK+QkTcHhEvp9l7gJE9HKOZmVXQyAQyAngiN78qlVVyKnBzbn5nSW2S7pE0pdJKkqalem1r167tWsRmZrZFw05hbQ9J/wC0Au/OFe8dEe2S9gUWSFocEY+UrhsRs4BZAK2trdEjAZuZ9QONbIG0A6Ny8yNT2VYkHQWcDRwXEes7yyOiPb2vBO4AJtYzWDMz21ojE8h9wFhJ+0gaCJwIbHU1laSJwPfIksfTufLdJe2UpocChwH5znczM6uzhp3CioiNkk4H5gMtwGURsVTSeUBbRMwFLgReA1wvCeDxiDgO2B/4nqTNZElwZsnVW2ZmVmcN7QOJiHnAvJKyc3LTR1VY7y7gwPpGZ2Zm1fhOdDMzK6RXXIXVTOYsbOfC+ctZva6DAS07MGrIoEaHZGbWEG6BbIc5C9uZccNi2td1EMCGTZt59JmXmLPwVRePmZn1eU4g2+HC+cvpeGXTVmWbIys3M+tvnEC2w+p1HdtVbmbWlzmBbIe9Bpfv76hUbmbWl1VNIJJ2lfR/ypS/uX4hNa/px4xj0ICWrcoGDWhh+jHjGhSRmVnjVEwgkk4Afg/8VNJSSW/LLb683oE1oykTR/DvHzqQgS3ZYRsxeBD//qEDmTKx2hiQZmZ9U7XLeL8MHBQRayQdDPxI0oyIuBFQz4TXfKZMHMFP7n0cgGs/dWiDozEza5xqCaQlItYARMS9ko4Afi5pFOBRbc3M+rlqfSAv5vs/UjI5nOyhTwfUOS4zM2ty1Vogn6HkVFVEvChpEnBCXaMyM7OmV7EFEhG/Ax6VdHtJ+SsRcVXdIzMzs6ZW9TLeiNgEbJa0Ww/FY2ZmvUQtgyn+GVgs6Rbgpc7CiPh83aIyM7OmV0sCuSG9zMzMtthmAomIK+q189QhfzHZEwkvjYiZJct3Aq4EDgKeBT4aEY+lZTOAU4FNwOcjYn694jQzs1dr2FhYklqAS4BjgfHASZLGl1Q7FXg+IvYDLgIuSOuOJ3uG+gHAJODbaXtmZtZDGjmY4sHAiohYGREbgGvI7jHJmwx0toBmA+9R9nD0ycA1EbE+Ih4FVqTtmZlZD2lkAhkBPJGbX5XKytaJiI3AC8AeNa5rZmZ1tM0+EElvAKYDe+frR8SRdYyr20iaBkwDGD16dIOjMTPrO2q5Cut64LvA98k6rLtLOzAqNz8ylZWrs0rSjsBuZJ3ptawLQETMAmYBtLa2egwvM7NuUksC2RgR36nDvu8Dxkrah+zL/0Tg5JI6c4GpwN3A8cCCiAhJc4GrJX0D2AsYC9xbhxjNzKyCWhLIzyT9E3AjsL6zMCKe68qOI2KjpNOB+WSX8V4WEUslnQe0RcRc4Adkw8ivAJ4jSzKketcBy4CNwGfTXfNmZtZDakkgU9P79FxZAPt2decRMQ+YV1J2Tm76L8BHKqx7PnB+V2MwM7NiarmRcJ+eCMTMzHqXWq7CGkA2tPu7UtEdwPci4pU6xmVmZk2ullNY3wEGAN9O8x9LZafVKygzM2t+tSSQt0XEW3LzCyT9rl4BmZlZ71DLneib8o+2lbQv3Xs/iJmZ9UK1tECmA7dLWkn2iNu9gVPqGpWZmTW9Wq7Cuk3SWGBcKloeEeurrWNmZn1fxQQi6ciIWCDpQyWL9pNERPghU2Zm/Vi1Fsi7gQXAB8osC/yUQjOzfq1iAomIc9PkeemZG1uk8avMzKwfq+UqrJ+WKZvd3YGYmVnvUq0P5I1kj4zdraQfZFdg53oHZmZmza1aH8g44P3AYLbuB3kR+GQ9gzIzs+ZXrQ/kJuAmSYdGxN09GJOZmfUCtdxIuFDSZ8lOZ205dRURn6hbVGZm1vRq6UT/EfB64BjgTrLHx75Yz6DMzKz51ZJA9ouIrwIvRcQVwN8Bh9Q3LDMza3a1JJDO536sk/QmYDfgdV3ZqaQhkm6R9HB6371MnQmS7pa0VNKDkj6aW3a5pEclLUqvCV2Jx8zMtl8tCWRW+oL/KjCX7Dnk/9HF/Z4F3BYRY4Hb0nypl4F/jIgDgEnANyUNzi2fHhET0mtRF+MxM7PtVMtgipemyTvphuegJ5OBw9P0FWRPOTyzZL9/yE2vlvQ0MAxY100xmJlZF1S7kfCMaitGxDe6sN89I2JNmn4S2LNaZUkHAwOBR3LF50s6h9SCqTRCsKRpwDSA0aNHdyFkMzPLq9YCeW16Hwe8jez0FWQ3Fd67rQ1LupXs6q1SZ+dnIiIkRZXtDCe7EmxqRGxOxTPIEs9AYBZZ6+W8cutHxKxUh9bW1or7MTOz7VPtRsJ/BZD0K+CtEfFimv8a8IttbTgijqq0TNJTkoZHxJqUIJ6uUG/XtK+zI+Ke3LY7Wy/rJf0Q+OK24jEzs+5VSyf6nsCG3PwGtnHKqQZzgalpeipwU2kFSQOBG4ErI2J2ybLh6V3AFGBJF+MxM7PtVMud6FcC90q6Mc1PAS7v4n5nAtdJOhX4I3ACgKRW4NMRcVoqexewh6SPp/U+nq64ukrSMLJH7C4CPt3FeMzMbDvVchXW+ZJuBt6Zik6JiIVd2WlEPAu8p0x5G3Bamv4x8OMK6x/Zlf2bmVnXVbsKa9eI+JOkIcBj6dW5bEhEPFf/8MzMrFlVa4FcTTac+/1kj7DtpDTfXfeEmJlZL1TtKqz3p3c/vtbMzF6l2imst1ZbMSIe6P5wzMyst6h2Cuu/qiwLwB3ZZmb9WLVTWEf0ZCBmZta71HIfCGkY9/Fs/UTCK+sVlJmZNb9tJhBJ55KNnDsemAccC/wP2Q2GZmbWT9UylMnxZDf9PRkRpwBvIXuolJmZ9WO1JJCONAruxjS44dPAqPqGZWZmza6WPpC29CTA75PdVPhn4O66RmVmZk2v2n0glwBXR8Q/paLvSvolsGtEPNgj0ZmZWdOq1gL5A/Cfaej064CfdHUQRTMz6zsq9oFExMURcSjwbuBZ4DJJv5d0rqQ39FiEZmbWlLbZiR4Rf4yICyJiInAS2fNAHqp7ZGZm1tS2mUAk7SjpA5KuAm4GlgMfqntkZmbW1Kp1oh9N1uJ4H3AvcA0wLSJe6upO0zNGrgXGkD1n5ISIeL5MvU3A4jT7eEQcl8r3SfHsQXZl2MciYkPp+mZmVj/VWiAzgLuA/SPiuIi4ujuSR3IWcFtEjAVuS/PldETEhPQ6Lld+AXBRROwHPA+c2k1xmZlZjap1oh8ZEZeWaxl0g8nAFWn6CrJ+lZpIEtlIwLOLrG9mZt2jljvR62HPiFiTpp8E9qxQb2dJbZLukdSZJPYA1kXExjS/ChhRaUeSpqVttK1du7ZbgjczsxpH4y1C0q3A68ssOjs/ExEhKcrUA9g7Itol7QsskLQYeGF74oiIWcAsgNbW1kr7MTOz7VS3BBIRR1VaJukpScMjYk26UfHpCttoT+8rJd0BTAR+CgyWtGNqhYwE2rv9DzAzs6oadQprLjA1TU8FbiqtIGl3STul6aHAYcCyiAjgdrJRgiuub2Zm9dWoBDITOFrSw8BRaR5JrZIuTXX2JxvI8XdkCWNmRCxLy84EzpC0gqxP5Ac9Gr2ZmdXvFFY1EfEs2TNGSsvbgNPS9F3AgRXWXwkcXM8Yzcysuka1QMzMrJdzAjEzs0KcQMzMrBAnEDMzK8QJxMzMCnECMTOzQpxAzMysECcQMzMrxAnEzMwKcQIxM7NCnEDMzKwQJxAzMyvECcTMzApxAjEzs0KcQMzMrBAnEDMzK8QJxMzMCmlIApE0RNItkh5O77uXqXOEpEW5118kTUnLLpf0aG7ZhJ7/K8zM+rdGtUDOAm6LiLHAbWl+KxFxe0RMiIgJwJHAy8B/56pM71weEYt6JGozM9uiUQlkMnBFmr4CmLKN+scDN0fEy3WNyszMataoBLJnRKxJ008Ce26j/onAT0rKzpf0oKSLJO1UaUVJ0yS1SWpbu3ZtF0I2M7O8uiUQSbdKWlLmNTlfLyICiCrbGQ4cCMzPFc8A3gi8DRgCnFlp/YiYFRGtEdE6bNiwrvxJZmaWs2O9NhwRR1VaJukpScMjYk1KEE9X2dQJwI0R8Upu252tl/WSfgh8sVuCNjOzmjXqFNZcYGqangrcVKXuSZScvkpJB0ki6z9ZUocYzcysikYlkJnA0ZIeBo5K80hqlXRpZyVJY4BRwJ0l618laTGwGBgK/FsPxGxmZjl1O4VVTUQ8C7ynTHkbcFpu/jFgRJl6R9YzPjMz2zbfiW5mZoU4gZiZWSFOIGZmVogTiJmZFeIEYmZmhTiBmJlZIU4gZmZWiBOImZkV4gRiZmaFOIGYmVkhTiBmZlaIE4iZmRXiBGJmZoU4gZiZWSFOIGZmVogTiJmZFdKQBCLpI5KWStosqbVKvUmSlktaIemsXPk+kn6byq+VNLBnIjczs06NaoEsAT4E/KpSBUktwCXAscB44CRJ49PiC4CLImI/4Hng1PqGa2ZmpRqSQCLioYhYvo1qBwMrImJlRGwArgEmSxJwJDA71bsCmFK/aM3MrJxm7gMZATyRm1+VyvYA1kXExpLysiRNk9QmqW3t2rV1C9bMrL/ZsV4blnQr8Poyi86OiJvqtd9SETELmAXQ2toaPbVfM7O+rm4JJCKO6uIm2oFRufmRqexZYLCkHVMrpLPczMx6UDOfwroPGJuuuBoInAjMjYgAbgeOT/WmAj3WojEzs0yjLuP9oKRVwKHALyTNT+V7SZoHkFoXpwPzgYeA6yJiadrEmcAZklaQ9Yn8oKf/BjOz/q5up7CqiYgbgRvLlK8G3pebnwfMK1NvJdlVWmZm1iDNfArLzMyamBOImZkV4gRiZmaFOIGYmVkhDelE703mLGznwvnLWb2ug70GD2L6MeMaHZKZWVNwC6SKOQvbmXHDYtrXdRBA+7oOZtywmGf+vL7RoZmZNZwTSBUXzl9OxyubtirreGUTK9e+1KCIzMyahxNIFavXdZQtD2DyhIrjN5qZ9QtOIFXsNXhQ2fIRgwdx8iGjezgaM7Pm4gRSxfRjxjFoQMtWZYMGtLgj3cwMX4VV1ZSJ2Wmq0quwOsvNzPozJ5BtmDJxhBOGmVkZPoVlZmaFOIGYmVkhTiBmZlaIE4iZmRXiBGJmZoUoe8R4/yBpLfDHGqsPBZ6pYzi9nY9PdT4+lfnYVNeMx2fviBhWWtivEsj2kNQWEa2NjqNZ+fhU5+NTmY9Ndb3p+PgUlpmZFeIEYmZmhTiBVDar0QE0OR+f6nx8KvOxqa7XHB/3gZiZWSFugZiZWSFOIGZmVogTSBmSJklaLmmFpLMaHU+jSbpM0tOSluTKhki6RdLD6X33RsbYKJJGSbpd0jJJSyX9cyr38QEk7SzpXkm/S8fnX1P5PpJ+mz5j10oa2OhYG0VSi6SFkn6e5nvNsXECKSGpBbgEOBYYD5wkaXxjo2q4y4FJJWVnAbdFxFjgtjTfH20E/m9EjAf+Fvhs+v/i45NZDxwZEW8BJgCTJP0tcAFwUUTsBzwPnNrAGBvtn4GHcvO95tg4gbzawcCKiFgZERuAa4DJDY6poSLiV8BzJcWTgSvS9BXAlB4NqklExJqIeCBNv0j2RTACHx8AIvPnNDsgvQI4Epidyvvt8ZE0Evg74NI0L3rRsXECebURwBO5+VWpzLa2Z0SsSdNPAns2MphmIGkMMBH4LT4+W6RTNIuAp4FbgEeAdRGxMVXpz5+xbwJfAjan+T3oRcfGCcS6LLJrwfv19eCSXgP8FPiXiPhTfll/Pz4RsSkiJgAjyVr4b2xwSE1B0vuBpyPi/kbHUpQfaftq7cCo3PzIVGZbe0rS8IhYI2k42a/LfknSALLkcVVE3JCKfXxKRMQ6SbcDhwKDJe2Yfmn318/YYcBxkt4H7AzsClxMLzo2boG82n3A2HQlxEDgRGBug2NqRnOBqWl6KnBTA2NpmHTO+gfAQxHxjdwiHx9A0jBJg9P0IOBosn6i24HjU7V+eXwiYkZEjIyIMWTfMwsi4u/pRcfGd6KXkX4RfBNoAS6LiPMbHFJDSfoJcDjZMNNPAecCc4DrgNFkQ+SfEBGlHe19nqR3AL8GFvPX89hfJusH8fGR3kzWEdxC9oP1uog4T9K+ZBeoDAEWAv8QEesbF2ljSToc+GJEvL83HRsnEDMzK8SnsMzMrBAnEDMzK8QJxMzMCnECMTOzQpxAzMysECcQ6zMkXSTpX3Lz8yVdmpv/L0lnVFn/cknHp+k7JLWWqTNA0sw0yu4Dku6WdGxa9pikoQXi3rLfCssvkbQojfjbkaYXSTpe0rzO+yy6k6ThnaPDVlg+UNKvJPlm5H7MCcT6kt8AbweQtAPZfSsH5Ja/Hbiri/v4OjAceFNEvJVsoLvXdnGbVUXEZ9NQIO8DHomICek1OyLeFxHr6rDbM4DvV4lpA9kowx+tw76tl3ACsb7kLrJhMiBLHEuAFyXtLmknYH/gAUnnSLpP0hJJs9Ld5NskaRfgk8DnOm/sioinIuK6MnXPSNtfUtIq+kdJD6bnY/yozHpfTy2SlhpjekzSUEljJP0+rfsHSVdJOkrSb1Jr6eBU/2+UPd/l3vQMikojTX8Y+GVa54BUf1GKfWyqMwf4+1ritL7JzU/rMyJitaSNkkaTtTbuJhvJ9FDgBWBxRGyQ9P8j4jyA9CX+fuBnNexiP+Dx0sESS0k6CDgFOAQQ8FtJdwIbgK8Ab4+IZyQNKVnvQrLWzClR7A7f/YCPAJ8gG5LnZOAdwHFkd8dPAc4mGzLjE+nU172Sbo2Il3Jx7AM8n7v7+dPAxRFxVRrepzO5LQHeViBO6yPcArG+5i6y5NGZQO7Ozf8m1TlC2RPfFpM9e+GAchvqgncAN0bES+lZGDcA70z7uj4ingEoGdrkq8BuEfHpgskD4NGIWBwRm4GlZA+0CrJhVsakOu8FzkrDq99BNojf6JLtDAfW5ubvBr4s6Uxg74joSPFvAjZIquspPGteTiDW13T2gxxI9gv5HrIWyNuBuyTtDHwbOD4iDiQ7z79zjdteAYyWtGu3R521GA4qbZVsp/x4SZtz85v569kGAR/O9aOMjoj80/AAOsgdk4i4mqwV0wHMk3Rkru5OwF+6ELP1Yk4g1tfcRXZK6rn0HIrngMFkSeQu/vrF+Ex6hkfFq59KRcTLZCPvXpxO5XSONvuRkqq/BqZI2kXS3wAfTGULgI9I2iOtm08WvwRmAr+o8y/6+cDnOvt9JE0sU+cP/LXFQhrcb2VEfItsZNg3p/I9gGci4pU6xmtNzAnE+prFZFdf3VNS9kJEPJOuWPo+WetkPtkv/+3xFbLTO8skLQF+DpQ+QOoBsufI30s2Ku+lEbEwIpYC5wN3Svod8I2S9a5Psc1NQ5/Xw9fJHiv7oKSlaX4rqT/kEUn7paITgCXptNebgCtT+RHAL+oUpxyuXE4AAABaSURBVPUCHo3XzF5F0geBgyLiK1Xq3ACcFRF/6LnIrJn4Kiwze5WIuLHzVFs56RTeHCeP/s0tEDMzK8R9IGZmVogTiJmZFeIEYmZmhTiBmJlZIU4gZmZWyP8CyVmOlb1ghpoAAAAASUVORK5CYII=\n" }, "metadata": { "needs_background": "light" @@ -340,7 +409,9 @@ { "output_type": "stream", "name": "stdout", - "text": "flaml (60s) r2=0.8324355914726785\n" + "text": [ + "flaml (60s) r2 = 0.831184063859627\n" + ] } ], "source": [ @@ -372,7 +443,16 @@ { "output_type": "execute_result", "data": { - "text/plain": "XGBRegressor(base_score=0.5, booster='gbtree', colsample_bylevel=1,\n colsample_bynode=1, colsample_bytree=1, gamma=0, gpu_id=-1,\n importance_type='gain', interaction_constraints='',\n learning_rate=0.300000012, max_delta_step=0, max_depth=6,\n min_child_weight=1, missing=nan, monotone_constraints='()',\n n_estimators=100, n_jobs=0, num_parallel_tree=1, random_state=0,\n reg_alpha=0, reg_lambda=1, scale_pos_weight=1, subsample=1,\n tree_method='exact', validate_parameters=1, verbosity=None)" + "text/plain": [ + "XGBRegressor(base_score=0.5, booster='gbtree', colsample_bylevel=1,\n", + " colsample_bynode=1, colsample_bytree=1, gamma=0, gpu_id=-1,\n", + " importance_type='gain', interaction_constraints='',\n", + " learning_rate=0.300000012, max_delta_step=0, max_depth=6,\n", + " min_child_weight=1, missing=nan, monotone_constraints='()',\n", + " n_estimators=100, n_jobs=0, num_parallel_tree=1, random_state=0,\n", + " reg_alpha=0, reg_lambda=1, scale_pos_weight=1, subsample=1,\n", + " tree_method='exact', validate_parameters=1, verbosity=None)" + ] }, "metadata": {}, "execution_count": 14 @@ -392,7 +472,9 @@ { "output_type": "stream", "name": "stdout", - "text": "default xgboost r2=0.8265451174596482\n" + "text": [ + "default xgboost r2 = 0.8265451174596482\n" + ] } ], "source": [ @@ -411,17 +493,162 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 16, "metadata": { - "tags": [ - "outputPrepend" - ] + "tags": [] }, "outputs": [ { "output_type": "stream", "name": "stderr", - "text": "NFO - Using RepeatedKFold\n[flaml.automl: 04-10 12:00:32] {911} INFO - Minimizing error metric: 1-r2\n[flaml.automl: 04-10 12:00:32] {930} INFO - List of ML learners in AutoML Run: ['my_xgb1', 'my_xgb2']\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 0, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.1s,\tbest my_xgb1's error=53750617.1059,\tbest my_xgb1's error=53750617.1059\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 1, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.1s,\tbest my_xgb1's error=260718.5183,\tbest my_xgb1's error=260718.5183\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 2, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.1s,\tbest my_xgb1's error=260718.5183,\tbest my_xgb1's error=260718.5183\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 3, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.2s,\tbest my_xgb1's error=2075.8695,\tbest my_xgb1's error=2075.8695\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 4, current learner my_xgb2\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.2s,\tbest my_xgb2's error=4.1611,\tbest my_xgb2's error=4.1611\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 5, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.2s,\tbest my_xgb1's error=2075.8695,\tbest my_xgb2's error=4.1611\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 6, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.3s,\tbest my_xgb1's error=289.1186,\tbest my_xgb2's error=4.1611\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 7, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.3s,\tbest my_xgb1's error=289.1186,\tbest my_xgb2's error=4.1611\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 8, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.3s,\tbest my_xgb1's error=289.1186,\tbest my_xgb2's error=4.1611\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 9, current learner my_xgb2\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.4s,\tbest my_xgb2's error=4.1611,\tbest my_xgb2's error=4.1611\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 10, current learner my_xgb2\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.4s,\tbest my_xgb2's error=4.1603,\tbest my_xgb2's error=4.1603\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 11, current learner my_xgb2\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.4s,\tbest my_xgb2's error=4.1603,\tbest my_xgb2's error=4.1603\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 12, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.5s,\tbest my_xgb1's error=7.0764,\tbest my_xgb2's error=4.1603\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 13, current learner my_xgb2\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.6s,\tbest my_xgb2's error=0.3612,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 14, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.6s,\tbest my_xgb1's error=7.0764,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 15, current learner my_xgb2\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.7s,\tbest my_xgb2's error=0.3612,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 16, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.7s,\tbest my_xgb1's error=7.0764,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 17, current learner my_xgb2\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.8s,\tbest my_xgb2's error=0.3612,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 18, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.8s,\tbest my_xgb1's error=7.0764,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 19, current learner my_xgb2\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.9s,\tbest my_xgb2's error=0.3612,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 20, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.9s,\tbest my_xgb1's error=3.8735,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 21, current learner my_xgb1\n[flaml.automl: 04-10 12:00:32] {1147} INFO - at 0.9s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:32] {994} INFO - iteration 22, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.0s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 23, current learner my_xgb2\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.1s,\tbest my_xgb2's error=0.3612,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 24, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.1s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 25, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.1s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 26, current learner my_xgb2\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.2s,\tbest my_xgb2's error=0.3612,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 27, current learner my_xgb2\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.3s,\tbest my_xgb2's error=0.3612,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 28, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 29, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.4s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 30, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.4s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.3612\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 31, current learner my_xgb2\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.6s,\tbest my_xgb2's error=0.2475,\tbest my_xgb2's error=0.2475\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 32, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.6s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.2475\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 33, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.7s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.2475\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 34, current learner my_xgb2\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.7s,\tbest my_xgb2's error=0.2475,\tbest my_xgb2's error=0.2475\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 35, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.8s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.2475\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 36, current learner my_xgb2\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.9s,\tbest my_xgb2's error=0.2475,\tbest my_xgb2's error=0.2475\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 37, current learner my_xgb1\n[flaml.automl: 04-10 12:00:33] {1147} INFO - at 1.9s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.2475\n[flaml.automl: 04-10 12:00:33] {994} INFO - iteration 38, current learner my_xgb2\n[flaml.automl: 04-10 12:00:34] {1147} INFO - at 2.2s,\tbest my_xgb2's error=0.1950,\tbest my_xgb2's error=0.1950\n[flaml.automl: 04-10 12:00:34] {994} INFO - iteration 39, current learner my_xgb2\n[flaml.automl: 04-10 12:00:34] {1147} INFO - at 2.3s,\tbest my_xgb2's error=0.1950,\tbest my_xgb2's error=0.1950\n[flaml.automl: 04-10 12:00:34] {994} INFO - iteration 40, current learner my_xgb1\n[flaml.automl: 04-10 12:00:34] {1147} INFO - at 2.4s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1950\n[flaml.automl: 04-10 12:00:34] {994} INFO - iteration 41, current learner my_xgb2\n[flaml.automl: 04-10 12:00:35] {1147} INFO - at 4.0s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:35] {994} INFO - iteration 42, current learner my_xgb2\n[flaml.automl: 04-10 12:00:39] {1147} INFO - at 7.1s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:39] {994} INFO - iteration 43, current learner my_xgb1\n[flaml.automl: 04-10 12:00:39] {1147} INFO - at 7.2s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:39] {994} INFO - iteration 44, current learner my_xgb1\n[flaml.automl: 04-10 12:00:39] {1147} INFO - at 7.2s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:39] {994} INFO - iteration 45, current learner my_xgb1\n[flaml.automl: 04-10 12:00:39] {1147} INFO - at 7.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:39] {994} INFO - iteration 46, current learner my_xgb1\n[flaml.automl: 04-10 12:00:39] {1147} INFO - at 7.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:39] {994} INFO - iteration 47, current learner my_xgb1\n[flaml.automl: 04-10 12:00:39] {1147} INFO - at 7.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:39] {994} INFO - iteration 48, current learner my_xgb2\n[flaml.automl: 04-10 12:00:44] {1147} INFO - at 12.7s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:44] {994} INFO - iteration 49, current learner my_xgb1\n[flaml.automl: 04-10 12:00:44] {1147} INFO - at 12.7s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:44] {994} INFO - iteration 50, current learner my_xgb1\n[flaml.automl: 04-10 12:00:44] {1147} INFO - at 12.8s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:44] {994} INFO - iteration 51, current learner my_xgb1\n[flaml.automl: 04-10 12:00:44] {1147} INFO - at 12.8s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:44] {994} INFO - iteration 52, current learner my_xgb1\n[flaml.automl: 04-10 12:00:44] {1147} INFO - at 12.8s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:44] {994} INFO - iteration 53, current learner my_xgb1\n[flaml.automl: 04-10 12:00:44] {1147} INFO - at 12.9s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:44] {994} INFO - iteration 54, current learner my_xgb1\n[flaml.automl: 04-10 12:00:44] {1147} INFO - at 12.9s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:44] {994} INFO - iteration 55, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.0s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 56, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.0s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 57, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.0s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 58, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.1s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 59, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.1s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 60, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.1s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 61, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.2s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 62, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.2s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 63, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.2s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 64, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 65, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 66, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 67, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.4s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 68, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.4s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 69, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 70, current learner my_xgb2\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.6s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 71, current learner my_xgb1\n[flaml.automl: 04-10 12:00:45] {1147} INFO - at 13.6s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:45] {994} INFO - iteration 72, current learner my_xgb2\n[flaml.automl: 04-10 12:00:47] {1147} INFO - at 15.4s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:47] {994} INFO - iteration 73, current learner my_xgb1\n[flaml.automl: 04-10 12:00:47] {1147} INFO - at 15.4s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:47] {994} INFO - iteration 74, current learner my_xgb1\n[flaml.automl: 04-10 12:00:47] {1147} INFO - at 15.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:47] {994} INFO - iteration 75, current learner my_xgb1\n[flaml.automl: 04-10 12:00:47] {1147} INFO - at 15.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:47] {994} INFO - iteration 76, current learner my_xgb1\n[flaml.automl: 04-10 12:00:47] {1147} INFO - at 15.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:47] {994} INFO - iteration 77, current learner my_xgb1\n[flaml.automl: 04-10 12:00:47] {1147} INFO - at 15.6s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:47] {994} INFO - iteration 78, current learner my_xgb2\n[flaml.automl: 04-10 12:00:48] {1147} INFO - at 16.3s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:48] {994} INFO - iteration 79, current learner my_xgb1\n[flaml.automl: 04-10 12:00:48] {1147} INFO - at 16.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:48] {994} INFO - iteration 80, current learner my_xgb1\n[flaml.automl: 04-10 12:00:48] {1147} INFO - at 16.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:48] {994} INFO - iteration 81, current learner my_xgb1\n[flaml.automl: 04-10 12:00:48] {1147} INFO - at 16.6s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:48] {994} INFO - iteration 82, current learner my_xgb2\n[flaml.automl: 04-10 12:00:49] {1147} INFO - at 17.5s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:49] {994} INFO - iteration 83, current learner my_xgb1\n[flaml.automl: 04-10 12:00:49] {1147} INFO - at 17.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:49] {994} INFO - iteration 84, current learner my_xgb1\n[flaml.automl: 04-10 12:00:49] {1147} INFO - at 17.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:49] {994} INFO - iteration 85, current learner my_xgb1\n[flaml.automl: 04-10 12:00:49] {1147} INFO - at 17.6s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:49] {994} INFO - iteration 86, current learner my_xgb1\n[flaml.automl: 04-10 12:00:49] {1147} INFO - at 17.6s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:49] {994} INFO - iteration 87, current learner my_xgb2\n[flaml.automl: 04-10 12:00:52] {1147} INFO - at 20.8s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:52] {994} INFO - iteration 88, current learner my_xgb1\n[flaml.automl: 04-10 12:00:52] {1147} INFO - at 20.8s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:52] {994} INFO - iteration 89, current learner my_xgb2\n[flaml.automl: 04-10 12:00:53] {1147} INFO - at 21.5s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:53] {994} INFO - iteration 90, current learner my_xgb1\n[flaml.automl: 04-10 12:00:53] {1147} INFO - at 21.5s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:53] {994} INFO - iteration 91, current learner my_xgb1\n[flaml.automl: 04-10 12:00:53] {1147} INFO - at 21.6s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:53] {994} INFO - iteration 92, current learner my_xgb1\n[flaml.automl: 04-10 12:00:53] {1147} INFO - at 21.6s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:53] {994} INFO - iteration 93, current learner my_xgb2\n[flaml.automl: 04-10 12:00:57] {1147} INFO - at 25.2s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:57] {994} INFO - iteration 94, current learner my_xgb1\n[flaml.automl: 04-10 12:00:57] {1147} INFO - at 25.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:57] {994} INFO - iteration 95, current learner my_xgb1\n[flaml.automl: 04-10 12:00:57] {1147} INFO - at 25.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:57] {994} INFO - iteration 96, current learner my_xgb1\n[flaml.automl: 04-10 12:00:57] {1147} INFO - at 25.3s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:57] {994} INFO - iteration 97, current learner my_xgb1\n[flaml.automl: 04-10 12:00:57] {1147} INFO - at 25.4s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:00:57] {994} INFO - iteration 98, current learner my_xgb2\n[flaml.automl: 04-10 12:00:59] {1147} INFO - at 27.8s,\tbest my_xgb2's error=0.1711,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:01:01] {1165} INFO - retrain my_xgb2 for 1.6s\n[flaml.automl: 04-10 12:01:01] {994} INFO - iteration 99, current learner my_xgb1\n[flaml.automl: 04-10 12:01:01] {1147} INFO - at 29.4s,\tbest my_xgb1's error=1.0036,\tbest my_xgb2's error=0.1711\n[flaml.automl: 04-10 12:01:03] {1165} INFO - retrain my_xgb1 for 1.6s\n[flaml.automl: 04-10 12:01:03] {1187} INFO - selected model: \n[flaml.automl: 04-10 12:01:03] {944} INFO - fit succeeded\n" + "text": [ + "[flaml.automl: 05-02 07:57:08] {890} INFO - Evaluation method: holdout\n", + "[flaml.automl: 05-02 07:57:08] {606} INFO - Using RepeatedKFold\n", + "[flaml.automl: 05-02 07:57:08] {911} INFO - Minimizing error metric: 1-r2\n", + "[flaml.automl: 05-02 07:57:08] {929} INFO - List of ML learners in AutoML Run: ['my_xgb1', 'my_xgb2']\n", + "[flaml.automl: 05-02 07:57:08] {993} INFO - iteration 0, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:08] {1141} INFO - at 0.1s,\tbest my_xgb1's error=53750617.1059,\tbest my_xgb1's error=53750617.1059\n", + "[flaml.automl: 05-02 07:57:08] {993} INFO - iteration 1, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:08] {1141} INFO - at 0.1s,\tbest my_xgb1's error=260718.5183,\tbest my_xgb1's error=260718.5183\n", + "[flaml.automl: 05-02 07:57:08] {993} INFO - iteration 2, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:08] {1141} INFO - at 0.2s,\tbest my_xgb1's error=260718.5183,\tbest my_xgb1's error=260718.5183\n", + "[flaml.automl: 05-02 07:57:08] {993} INFO - iteration 3, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:08] {1141} INFO - at 0.2s,\tbest my_xgb1's error=260718.5183,\tbest my_xgb1's error=260718.5183\n", + "[flaml.automl: 05-02 07:57:08] {993} INFO - iteration 4, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.3s,\tbest my_xgb2's error=4.1611,\tbest my_xgb2's error=4.1611\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 5, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.3s,\tbest my_xgb1's error=40726.5769,\tbest my_xgb2's error=4.1611\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 6, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.4s,\tbest my_xgb1's error=1918.9637,\tbest my_xgb2's error=4.1611\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 7, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.4s,\tbest my_xgb1's error=1918.9637,\tbest my_xgb2's error=4.1611\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 8, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.5s,\tbest my_xgb1's error=1918.9637,\tbest my_xgb2's error=4.1611\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 9, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.5s,\tbest my_xgb1's error=1918.9637,\tbest my_xgb2's error=4.1611\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 10, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.6s,\tbest my_xgb2's error=4.1611,\tbest my_xgb2's error=4.1611\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 11, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.6s,\tbest my_xgb2's error=4.1611,\tbest my_xgb2's error=4.1611\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 12, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.7s,\tbest my_xgb2's error=4.1603,\tbest my_xgb2's error=4.1603\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 13, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.7s,\tbest my_xgb2's error=4.1603,\tbest my_xgb2's error=4.1603\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 14, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.8s,\tbest my_xgb1's error=93.9115,\tbest my_xgb2's error=4.1603\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 15, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.8s,\tbest my_xgb2's error=4.1603,\tbest my_xgb2's error=4.1603\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 16, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 0.9s,\tbest my_xgb1's error=93.9115,\tbest my_xgb2's error=4.1603\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 17, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 1.0s,\tbest my_xgb2's error=3.8476,\tbest my_xgb2's error=3.8476\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 18, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 1.0s,\tbest my_xgb1's error=12.3445,\tbest my_xgb2's error=3.8476\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 19, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 1.1s,\tbest my_xgb1's error=12.3445,\tbest my_xgb2's error=3.8476\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 20, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 1.1s,\tbest my_xgb1's error=12.3445,\tbest my_xgb2's error=3.8476\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 21, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:09] {1141} INFO - at 1.2s,\tbest my_xgb1's error=4.1558,\tbest my_xgb2's error=3.8476\n", + "[flaml.automl: 05-02 07:57:09] {993} INFO - iteration 22, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 1.3s,\tbest my_xgb2's error=0.3645,\tbest my_xgb2's error=0.3645\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 23, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 1.4s,\tbest my_xgb2's error=0.3645,\tbest my_xgb2's error=0.3645\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 24, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 1.5s,\tbest my_xgb1's error=2.4948,\tbest my_xgb2's error=0.3645\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 25, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 1.6s,\tbest my_xgb2's error=0.3139,\tbest my_xgb2's error=0.3139\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 26, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 1.7s,\tbest my_xgb2's error=0.3139,\tbest my_xgb2's error=0.3139\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 27, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 1.8s,\tbest my_xgb2's error=0.3139,\tbest my_xgb2's error=0.3139\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 28, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 1.8s,\tbest my_xgb1's error=2.4948,\tbest my_xgb2's error=0.3139\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 29, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 1.9s,\tbest my_xgb1's error=2.4948,\tbest my_xgb2's error=0.3139\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 30, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 2.0s,\tbest my_xgb1's error=1.4151,\tbest my_xgb2's error=0.3139\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 31, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 2.1s,\tbest my_xgb2's error=0.2254,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 32, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 2.1s,\tbest my_xgb1's error=1.4151,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 33, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:10] {1141} INFO - at 2.2s,\tbest my_xgb2's error=0.2254,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:10] {993} INFO - iteration 34, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:11] {1141} INFO - at 2.5s,\tbest my_xgb2's error=0.2254,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:11] {993} INFO - iteration 35, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:11] {1141} INFO - at 2.5s,\tbest my_xgb2's error=0.2254,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:11] {993} INFO - iteration 36, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:11] {1141} INFO - at 2.7s,\tbest my_xgb2's error=0.2254,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:11] {993} INFO - iteration 37, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:11] {1141} INFO - at 2.8s,\tbest my_xgb1's error=1.4151,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:11] {993} INFO - iteration 38, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:11] {1141} INFO - at 2.9s,\tbest my_xgb2's error=0.2254,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:11] {993} INFO - iteration 39, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:11] {1141} INFO - at 3.2s,\tbest my_xgb2's error=0.2254,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:11] {993} INFO - iteration 40, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:12] {1141} INFO - at 3.3s,\tbest my_xgb1's error=1.4151,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:12] {993} INFO - iteration 41, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:12] {1141} INFO - at 3.4s,\tbest my_xgb2's error=0.2254,\tbest my_xgb2's error=0.2254\n", + "[flaml.automl: 05-02 07:57:12] {993} INFO - iteration 42, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:12] {1141} INFO - at 3.8s,\tbest my_xgb2's error=0.1900,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:12] {993} INFO - iteration 43, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:12] {1141} INFO - at 4.1s,\tbest my_xgb2's error=0.1900,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:12] {993} INFO - iteration 44, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:12] {1141} INFO - at 4.2s,\tbest my_xgb1's error=1.4151,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:12] {993} INFO - iteration 45, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:13] {1141} INFO - at 4.5s,\tbest my_xgb2's error=0.1900,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:13] {993} INFO - iteration 46, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:13] {1141} INFO - at 4.5s,\tbest my_xgb1's error=1.4151,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:13] {993} INFO - iteration 47, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:13] {1141} INFO - at 4.7s,\tbest my_xgb1's error=1.4151,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:13] {993} INFO - iteration 48, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:13] {1141} INFO - at 4.7s,\tbest my_xgb2's error=0.1900,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:13] {993} INFO - iteration 49, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:13] {1141} INFO - at 4.8s,\tbest my_xgb1's error=1.4151,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:13] {993} INFO - iteration 50, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:13] {1141} INFO - at 4.9s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:13] {993} INFO - iteration 51, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:13] {1141} INFO - at 5.1s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:13] {993} INFO - iteration 52, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:13] {1141} INFO - at 5.2s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:13] {993} INFO - iteration 53, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:14] {1141} INFO - at 6.1s,\tbest my_xgb2's error=0.1900,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:14] {993} INFO - iteration 54, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:14] {1141} INFO - at 6.2s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:14] {993} INFO - iteration 55, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:15] {1141} INFO - at 6.5s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:15] {993} INFO - iteration 56, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:15] {1141} INFO - at 6.6s,\tbest my_xgb2's error=0.1900,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:15] {993} INFO - iteration 57, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:15] {1141} INFO - at 6.7s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1900\n", + "[flaml.automl: 05-02 07:57:15] {993} INFO - iteration 58, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:17] {1141} INFO - at 8.3s,\tbest my_xgb2's error=0.1865,\tbest my_xgb2's error=0.1865\n", + "[flaml.automl: 05-02 07:57:17] {993} INFO - iteration 59, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:17] {1141} INFO - at 8.4s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1865\n", + "[flaml.automl: 05-02 07:57:17] {993} INFO - iteration 60, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:17] {1141} INFO - at 9.2s,\tbest my_xgb2's error=0.1790,\tbest my_xgb2's error=0.1790\n", + "[flaml.automl: 05-02 07:57:17] {993} INFO - iteration 61, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:19] {1141} INFO - at 10.8s,\tbest my_xgb2's error=0.1790,\tbest my_xgb2's error=0.1790\n", + "[flaml.automl: 05-02 07:57:19] {993} INFO - iteration 62, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:19] {1141} INFO - at 11.0s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1790\n", + "[flaml.automl: 05-02 07:57:19] {993} INFO - iteration 63, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:19] {1141} INFO - at 11.2s,\tbest my_xgb2's error=0.1790,\tbest my_xgb2's error=0.1790\n", + "[flaml.automl: 05-02 07:57:19] {993} INFO - iteration 64, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:25] {1141} INFO - at 16.4s,\tbest my_xgb2's error=0.1707,\tbest my_xgb2's error=0.1707\n", + "[flaml.automl: 05-02 07:57:25] {993} INFO - iteration 65, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:25] {1141} INFO - at 16.5s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1707\n", + "[flaml.automl: 05-02 07:57:25] {993} INFO - iteration 66, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:25] {1141} INFO - at 17.0s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1707\n", + "[flaml.automl: 05-02 07:57:25] {993} INFO - iteration 67, current learner my_xgb2\n", + "[flaml.automl: 05-02 07:57:28] {1141} INFO - at 19.3s,\tbest my_xgb2's error=0.1707,\tbest my_xgb2's error=0.1707\n", + "[flaml.automl: 05-02 07:57:33] {1164} INFO - retrain my_xgb2 for 5.8s\n", + "[flaml.automl: 05-02 07:57:33] {993} INFO - iteration 68, current learner my_xgb1\n", + "[flaml.automl: 05-02 07:57:33] {1141} INFO - at 25.1s,\tbest my_xgb1's error=1.0011,\tbest my_xgb2's error=0.1707\n", + "[flaml.automl: 05-02 07:57:39] {1164} INFO - retrain my_xgb1 for 5.4s\n", + "[flaml.automl: 05-02 07:57:39] {1187} INFO - selected model: \n", + "[flaml.automl: 05-02 07:57:39] {944} INFO - fit succeeded\n" + ] } ], "source": [ @@ -471,7 +698,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 17, "metadata": { "tags": [] }, @@ -479,7 +706,16 @@ { "output_type": "stream", "name": "stdout", - "text": "Best hyperparmeter config:{'n_estimators': 308.0, 'max_leaves': 106.0, 'min_child_weight': 23.491005464979025, 'learning_rate': 0.12163628930660118, 'subsample': 1.0, 'colsample_bylevel': 0.8088347023339904, 'colsample_bytree': 1.0, 'reg_alpha': 0.0017299741585977452, 'reg_lambda': 0.2417100801313181}\nBest r2 on validation data: 0.8289\nTraining duration of best run: 1.589 s\nPredicted labels[149762.39 238168. 150789.95 ... 180301.3 205223.98 262682.8 ]\nTrue labels[136900. 241300. 200700. ... 160900. 227300. 265600.]\nr2=0.8390991924308188\nmse=2126868377.371629\nmae=29728.29839662064\n" + "text": [ + "Best hyperparmeter config: {'n_estimators': 810.0, 'max_leaves': 148.0, 'min_child_weight': 79.26376930785436, 'learning_rate': 0.05793074143079168, 'subsample': 1.0, 'colsample_bylevel': 0.8662229421401874, 'colsample_bytree': 0.7851677398738949, 'reg_alpha': 0.00738292823760415, 'reg_lambda': 1.2202619267865558}\n", + "Best r2 on validation data: 0.8293\n", + "Training duration of best run: 5.254 s\n", + "Predicted labels [141967.81 241199.84 138144. ... 194218.06 226530.98 261889.88]\n", + "True labels [136900. 241300. 200700. ... 160900. 227300. 265600.]\n", + "r2 = 0.842873933929261\n", + "mse = 2076971932.1823325\n", + "mae = 29342.21334408309\n" + ] } ], "source": [ @@ -507,8 +743,8 @@ ], "metadata": { "kernelspec": { - "name": "python37764bitbsconda5b158f6acec0414d8c5c2401992dd9e1", - "display_name": "Python 3.7.7 64-bit ('bs': conda)", + "name": "python3", + "display_name": "Python 3.8.0 64-bit", "metadata": { "interpreter": { "hash": "0cfea3304185a9579d09e0953576b57c8581e46e6ebc6dfeb681bc5a511f7544" @@ -525,7 +761,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7-final" + "version": "3.8.0-final" } }, "nbformat": 4,