From fdab49fb4474853f3cbf82054fcae786918b04cb Mon Sep 17 00:00:00 2001 From: Anipik Date: Thu, 24 May 2018 13:43:22 +0530 Subject: [PATCH 1/2] Test Enabled --- test/Microsoft.ML.Predictor.Tests/TestPredictors.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 4a93d3468d..34f62f448c 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -461,7 +461,7 @@ public void FastTreeBinaryClassificationNoOpGroupIdTest() Done(); } - [Fact(Skip = "Need CoreTLC specific baseline update")] + [Fact] [TestCategory("Binary")] [TestCategory("FastTree")] public void FastTreeHighMinDocsTest() From f2926fa26de71766cd34ee6b6e158e352ba188e7 Mon Sep 17 00:00:00 2001 From: Anipik Date: Thu, 24 May 2018 13:43:44 +0530 Subject: [PATCH 2/2] Zbaseline files added --- ...ighMinDocs-TrainTest-breast-cancer-out.txt | 55 ++ ...HighMinDocs-TrainTest-breast-cancer-rp.txt | 4 + ...reeHighMinDocs-TrainTest-breast-cancer.txt | 700 ++++++++++++++++++ ...ighMinDocs-TrainTest-breast-cancer-out.txt | 55 ++ ...HighMinDocs-TrainTest-breast-cancer-rp.txt | 4 + ...reeHighMinDocs-TrainTest-breast-cancer.txt | 700 ++++++++++++++++++ 6 files changed, 1518 insertions(+) create mode 100644 ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-out.txt create mode 100644 ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-rp.txt create mode 100644 ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer.txt create mode 100644 ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-out.txt create mode 100644 ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-rp.txt create mode 100644 ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer.txt diff --git a/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-out.txt b/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-out.txt new file mode 100644 index 0000000000..7e76faa1d9 --- /dev/null +++ b/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-out.txt @@ -0,0 +1,55 @@ +maml.exe TrainTest test=%Data% tr=FastTreeBinaryClassification{mil=10000 iter=5} cache=- dout=%Output% loader=Text{sparse- col=Attr:TX:6 col=Label:0 col=Features:1-5,6,7-9} data=%Data% out=%Output% seed=1 +Not adding a normalizer. +Making per-feature arrays +Changing data from row-wise to column-wise +Warning: Skipped 16 instances with missing features during training +Processed 683 instances +Binning and forming Feature objects +Reserved memory for tree learner: 468 bytes +Starting to train ... +Warning: 5 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Not training a calibrator because it is not needed. +TEST POSITIVE RATIO: 0.3448 (241.0/(241.0+458.0)) +Confusion table + ||====================== +PREDICTED || positive | negative | Recall +TRUTH ||====================== + positive || 0 | 241 | 0.0000 + negative || 0 | 458 | 1.0000 + ||====================== +Precision || 0.0000 | 0.6552 | +OVERALL 0/1 ACCURACY: 0.655222 +LOG LOSS/instance: 1.000000 +Test-set entropy (prior Log-Loss/instance): 0.929318 +LOG-LOSS REDUCTION (RIG): -7.605800 +AUC: 0.500000 + +OVERALL RESULTS +--------------------------------------- +AUC: 0.500000 (0.0000) +Accuracy: 0.655222 (0.0000) +Positive precision: 0.000000 (0.0000) +Positive recall: 0.000000 (0.0000) +Negative precision: 0.655222 (0.0000) +Negative recall: 1.000000 (0.0000) +Log-loss: 1.000000 (0.0000) +Log-loss reduction: -7.605800 (0.0000) +F1 Score: NaN (0.0000) +AUPRC: 0.415719 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + +--- Progress log --- +[1] 'FastTree data preparation' started. +[1] 'FastTree data preparation' finished in %Time%. +[2] 'FastTree in-memory bins initialization' started. +[2] 'FastTree in-memory bins initialization' finished in %Time%. +[3] 'FastTree feature conversion' started. +[3] 'FastTree feature conversion' finished in %Time%. +[4] 'FastTree training' started. +[4] 'FastTree training' finished in %Time%. +[5] 'Saving model' started. +[5] 'Saving model' finished in %Time%. diff --git a/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-rp.txt b/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-rp.txt new file mode 100644 index 0000000000..3aab3ceed7 --- /dev/null +++ b/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-rp.txt @@ -0,0 +1,4 @@ +FastTreeBinaryClassification +AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /mil /iter Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.5 0.655222 0 0 0.655222 1 1 -7.6058 NaN 0.415719 10000 5 FastTreeBinaryClassification %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=FastTreeBinaryClassification{mil=10000 iter=5} cache=- dout=%Output% loader=Text{sparse- col=Attr:TX:6 col=Label:0 col=Features:1-5,6,7-9} data=%Data% out=%Output% seed=1 /mil:10000;/iter:5 + diff --git a/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer.txt b/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer.txt new file mode 100644 index 0000000000..a30a2f04cc --- /dev/null +++ b/ZBaselines/SingleDebug/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer.txt @@ -0,0 +1,700 @@ +Instance Label Score Probability Log-loss Assigned +0 0 0 0.5 1 0 +1 0 0 0.5 1 0 +2 0 0 0.5 1 0 +3 0 0 0.5 1 0 +4 0 0 0.5 1 0 +5 1 0 0.5 1 0 +6 0 0 0.5 1 0 +7 0 0 0.5 1 0 +8 0 0 0.5 1 0 +9 0 0 0.5 1 0 +10 0 0 0.5 1 0 +11 0 0 0.5 1 0 +12 1 0 0.5 1 0 +13 0 0 0.5 1 0 +14 1 0 0.5 1 0 +15 1 0 0.5 1 0 +16 0 0 0.5 1 0 +17 0 0 0.5 1 0 +18 1 0 0.5 1 0 +19 0 0 0.5 1 0 +20 1 0 0.5 1 0 +21 1 0 0.5 1 0 +22 0 0 0.5 1 0 +23 1 0 0.5 1 0 +24 0 0 0.5 1 0 +25 1 0 0.5 1 0 +26 0 0 0.5 1 0 +27 0 0 0.5 1 0 +28 0 0 0.5 1 0 +29 0 0 0.5 1 0 +30 0 0 0.5 1 0 +31 0 0 0.5 1 0 +32 1 0 0.5 1 0 +33 0 0 0.5 1 0 +34 0 0 0.5 1 0 +35 0 0 0.5 1 0 +36 1 0 0.5 1 0 +37 0 0 0.5 1 0 +38 1 0 0.5 1 0 +39 1 0 0.5 1 0 +40 0 0 0.5 1 0 +41 1 0 0.5 1 0 +42 1 0 0.5 1 0 +43 1 0 0.5 1 0 +44 1 0 0.5 1 0 +45 0 0 0.5 1 0 +46 1 0 0.5 1 0 +47 0 0 0.5 1 0 +48 0 0 0.5 1 0 +49 1 0 0.5 1 0 +50 1 0 0.5 1 0 +51 1 0 0.5 1 0 +52 1 0 0.5 1 0 +53 1 0 0.5 1 0 +54 1 0 0.5 1 0 +55 1 0 0.5 1 0 +56 1 0 0.5 1 0 +57 1 0 0.5 1 0 +58 1 0 0.5 1 0 +59 1 0 0.5 1 0 +60 1 0 0.5 1 0 +61 0 0 0.5 1 0 +62 1 0 0.5 1 0 +63 1 0 0.5 1 0 +64 0 0 0.5 1 0 +65 1 0 0.5 1 0 +66 0 0 0.5 1 0 +67 1 0 0.5 1 0 +68 1 0 0.5 1 0 +69 0 0 0.5 1 0 +70 0 0 0.5 1 0 +71 1 0 0.5 1 0 +72 0 0 0.5 1 0 +73 1 0 0.5 1 0 +74 1 0 0.5 1 0 +75 0 0 0.5 1 0 +76 0 0 0.5 1 0 +77 0 0 0.5 1 0 +78 0 0 0.5 1 0 +79 0 0 0.5 1 0 +80 0 0 0.5 1 0 +81 0 0 0.5 1 0 +82 0 0 0.5 1 0 +83 0 0 0.5 1 0 +84 1 0 0.5 1 0 +85 1 0 0.5 1 0 +86 1 0 0.5 1 0 +87 1 0 0.5 1 0 +88 0 0 0.5 1 0 +89 0 0 0.5 1 0 +90 0 0 0.5 1 0 +91 0 0 0.5 1 0 +92 0 0 0.5 1 0 +93 0 0 0.5 1 0 +94 0 0 0.5 1 0 +95 0 0 0.5 1 0 +96 0 0 0.5 1 0 +97 0 0 0.5 1 0 +98 1 0 0.5 1 0 +99 1 0 0.5 1 0 +100 1 0 0.5 1 0 +101 1 0 0.5 1 0 +102 0 0 0.5 1 0 +103 1 0 0.5 1 0 +104 1 0 0.5 1 0 +105 1 0 0.5 1 0 +106 1 0 0.5 1 0 +107 1 0 0.5 1 0 +108 0 0 0.5 1 0 +109 1 0 0.5 1 0 +110 0 0 0.5 1 0 +111 1 0 0.5 1 0 +112 1 0 0.5 1 0 +113 1 0 0.5 1 0 +114 0 0 0.5 1 0 +115 0 0 0.5 1 0 +116 0 0 0.5 1 0 +117 1 0 0.5 1 0 +118 0 0 0.5 1 0 +119 0 0 0.5 1 0 +120 0 0 0.5 1 0 +121 0 0 0.5 1 0 +122 1 0 0.5 1 0 +123 1 0 0.5 1 0 +124 1 0 0.5 1 0 +125 0 0 0.5 1 0 +126 1 0 0.5 1 0 +127 0 0 0.5 1 0 +128 1 0 0.5 1 0 +129 0 0 0.5 1 0 +130 0 0 0.5 1 0 +131 0 0 0.5 1 0 +132 1 0 0.5 1 0 +133 0 0 0.5 1 0 +134 0 0 0.5 1 0 +135 0 0 0.5 1 0 +136 0 0 0.5 1 0 +137 0 0 0.5 1 0 +138 0 0 0.5 1 0 +139 0 0 0.5 1 0 +140 0 0 0.5 1 0 +141 0 0 0.5 1 0 +142 1 0 0.5 1 0 +143 0 0 0.5 1 0 +144 0 0 0.5 1 0 +145 0 0 0.5 1 0 +146 1 0 0.5 1 0 +147 0 0 0.5 1 0 +148 0 0 0.5 1 0 +149 1 0 0.5 1 0 +150 0 0 0.5 1 0 +151 1 0 0.5 1 0 +152 1 0 0.5 1 0 +153 0 0 0.5 1 0 +154 0 0 0.5 1 0 +155 1 0 0.5 1 0 +156 0 0 0.5 1 0 +157 0 0 0.5 1 0 +158 0 0 0.5 1 0 +159 1 0 0.5 1 0 +160 1 0 0.5 1 0 +161 0 0 0.5 1 0 +162 0 0 0.5 1 0 +163 0 0 0.5 1 0 +164 0 0 0.5 1 0 +165 0 0 0.5 1 0 +166 1 0 0.5 1 0 +167 1 0 0.5 1 0 +168 0 0 0.5 1 0 +169 0 0 0.5 1 0 +170 0 0 0.5 1 0 +171 0 0 0.5 1 0 +172 0 0 0.5 1 0 +173 1 0 0.5 1 0 +174 1 0 0.5 1 0 +175 1 0 0.5 1 0 +176 0 0 0.5 1 0 +177 1 0 0.5 1 0 +178 0 0 0.5 1 0 +179 1 0 0.5 1 0 +180 0 0 0.5 1 0 +181 0 0 0.5 1 0 +182 0 0 0.5 1 0 +183 1 0 0.5 1 0 +184 1 0 0.5 1 0 +185 0 0 0.5 1 0 +186 1 0 0.5 1 0 +187 1 0 0.5 1 0 +188 1 0 0.5 1 0 +189 0 0 0.5 1 0 +190 1 0 0.5 1 0 +191 1 0 0.5 1 0 +192 0 0 0.5 1 0 +193 0 0 0.5 1 0 +194 0 0 0.5 1 0 +195 0 0 0.5 1 0 +196 0 0 0.5 1 0 +197 0 0 0.5 1 0 +198 0 0 0.5 1 0 +199 0 0 0.5 1 0 +200 1 0 0.5 1 0 +201 1 0 0.5 1 0 +202 0 0 0.5 1 0 +203 0 0 0.5 1 0 +204 0 0 0.5 1 0 +205 1 0 0.5 1 0 +206 1 0 0.5 1 0 +207 0 0 0.5 1 0 +208 0 0 0.5 1 0 +209 0 0 0.5 1 0 +210 1 0 0.5 1 0 +211 1 0 0.5 1 0 +212 0 0 0.5 1 0 +213 1 0 0.5 1 0 +214 1 0 0.5 1 0 +215 1 0 0.5 1 0 +216 0 0 0.5 1 0 +217 0 0 0.5 1 0 +218 1 0 0.5 1 0 +219 0 0 0.5 1 0 +220 0 0 0.5 1 0 +221 1 0 0.5 1 0 +222 1 0 0.5 1 0 +223 1 0 0.5 1 0 +224 1 0 0.5 1 0 +225 0 0 0.5 1 0 +226 1 0 0.5 1 0 +227 1 0 0.5 1 0 +228 0 0 0.5 1 0 +229 1 0 0.5 1 0 +230 1 0 0.5 1 0 +231 1 0 0.5 1 0 +232 0 0 0.5 1 0 +233 1 0 0.5 1 0 +234 0 0 0.5 1 0 +235 0 0 0.5 1 0 +236 1 0 0.5 1 0 +237 1 0 0.5 1 0 +238 1 0 0.5 1 0 +239 1 0 0.5 1 0 +240 0 0 0.5 1 0 +241 0 0 0.5 1 0 +242 0 0 0.5 1 0 +243 0 0 0.5 1 0 +244 0 0 0.5 1 0 +245 0 0 0.5 1 0 +246 1 0 0.5 1 0 +247 1 0 0.5 1 0 +248 0 0 0.5 1 0 +249 0 0 0.5 1 0 +250 0 0 0.5 1 0 +251 1 0 0.5 1 0 +252 0 0 0.5 1 0 +253 1 0 0.5 1 0 +254 1 0 0.5 1 0 +255 1 0 0.5 1 0 +256 0 0 0.5 1 0 +257 0 0 0.5 1 0 +258 0 0 0.5 1 0 +259 0 0 0.5 1 0 +260 1 0 0.5 1 0 +261 1 0 0.5 1 0 +262 1 0 0.5 1 0 +263 1 0 0.5 1 0 +264 1 0 0.5 1 0 +265 0 0 0.5 1 0 +266 1 0 0.5 1 0 +267 1 0 0.5 1 0 +268 1 0 0.5 1 0 +269 0 0 0.5 1 0 +270 1 0 0.5 1 0 +271 0 0 0.5 1 0 +272 1 0 0.5 1 0 +273 1 0 0.5 1 0 +274 0 0 0.5 1 0 +275 0 0 0.5 1 0 +276 0 0 0.5 1 0 +277 0 0 0.5 1 0 +278 0 0 0.5 1 0 +279 1 0 0.5 1 0 +280 0 0 0.5 1 0 +281 0 0 0.5 1 0 +282 1 0 0.5 1 0 +283 1 0 0.5 1 0 +284 1 0 0.5 1 0 +285 1 0 0.5 1 0 +286 1 0 0.5 1 0 +287 0 0 0.5 1 0 +288 1 0 0.5 1 0 +289 1 0 0.5 1 0 +290 0 0 0.5 1 0 +291 0 0 0.5 1 0 +292 1 0 0.5 1 0 +293 1 0 0.5 1 0 +294 0 0 0.5 1 0 +295 1 0 0.5 1 0 +296 0 0 0.5 1 0 +297 0 0 0.5 1 0 +298 0 0 0.5 1 0 +299 1 0 0.5 1 0 +300 1 0 0.5 1 0 +301 0 0 0.5 1 0 +302 1 0 0.5 1 0 +303 0 0 0.5 1 0 +304 1 0 0.5 1 0 +305 1 0 0.5 1 0 +306 0 0 0.5 1 0 +307 0 0 0.5 1 0 +308 1 0 0.5 1 0 +309 0 0 0.5 1 0 +310 0 0 0.5 1 0 +311 0 0 0.5 1 0 +312 1 0 0.5 1 0 +313 0 0 0.5 1 0 +314 0 0 0.5 1 0 +315 0 0 0.5 1 0 +316 1 0 0.5 1 0 +317 1 0 0.5 1 0 +318 0 0 0.5 1 0 +319 0 0 0.5 1 0 +320 1 0 0.5 1 0 +321 0 0 0.5 1 0 +322 0 0 0.5 1 0 +323 1 0 0.5 1 0 +324 0 0 0.5 1 0 +325 0 0 0.5 1 0 +326 1 0 0.5 1 0 +327 0 0 0.5 1 0 +328 1 0 0.5 1 0 +329 1 0 0.5 1 0 +330 1 0 0.5 1 0 +331 0 0 0.5 1 0 +332 0 0 0.5 1 0 +333 1 0 0.5 1 0 +334 1 0 0.5 1 0 +335 0 0 0.5 1 0 +336 1 0 0.5 1 0 +337 0 0 0.5 1 0 +338 0 0 0.5 1 0 +339 1 0 0.5 1 0 +340 1 0 0.5 1 0 +341 0 0 0.5 1 0 +342 0 0 0.5 1 0 +343 0 0 0.5 1 0 +344 1 0 0.5 1 0 +345 0 0 0.5 1 0 +346 0 0 0.5 1 0 +347 0 0 0.5 1 0 +348 1 0 0.5 1 0 +349 1 0 0.5 1 0 +350 0 0 0.5 1 0 +351 0 0 0.5 1 0 +352 0 0 0.5 1 0 +353 1 0 0.5 1 0 +354 0 0 0.5 1 0 +355 0 0 0.5 1 0 +356 1 0 0.5 1 0 +357 1 0 0.5 1 0 +358 1 0 0.5 1 0 +359 1 0 0.5 1 0 +360 1 0 0.5 1 0 +361 1 0 0.5 1 0 +362 0 0 0.5 1 0 +363 0 0 0.5 1 0 +364 0 0 0.5 1 0 +365 0 0 0.5 1 0 +366 1 0 0.5 1 0 +367 1 0 0.5 1 0 +368 0 0 0.5 1 0 +369 0 0 0.5 1 0 +370 0 0 0.5 1 0 +371 0 0 0.5 1 0 +372 0 0 0.5 1 0 +373 0 0 0.5 1 0 +374 0 0 0.5 1 0 +375 0 0 0.5 1 0 +376 0 0 0.5 1 0 +377 0 0 0.5 1 0 +378 0 0 0.5 1 0 +379 0 0 0.5 1 0 +380 0 0 0.5 1 0 +381 1 0 0.5 1 0 +382 0 0 0.5 1 0 +383 0 0 0.5 1 0 +384 0 0 0.5 1 0 +385 0 0 0.5 1 0 +386 1 0 0.5 1 0 +387 0 0 0.5 1 0 +388 0 0 0.5 1 0 +389 0 0 0.5 1 0 +390 0 0 0.5 1 0 +391 1 0 0.5 1 0 +392 0 0 0.5 1 0 +393 0 0 0.5 1 0 +394 0 0 0.5 1 0 +395 0 0 0.5 1 0 +396 0 0 0.5 1 0 +397 0 0 0.5 1 0 +398 0 0 0.5 1 0 +399 0 0 0.5 1 0 +400 1 0 0.5 1 0 +401 0 0 0.5 1 0 +402 0 0 0.5 1 0 +403 0 0 0.5 1 0 +404 0 0 0.5 1 0 +405 0 0 0.5 1 0 +406 0 0 0.5 1 0 +407 0 0 0.5 1 0 +408 0 0 0.5 1 0 +409 0 0 0.5 1 0 +410 0 0 0.5 1 0 +411 0 0 0.5 1 0 +412 1 0 0.5 1 0 +413 0 0 0.5 1 0 +414 1 0 0.5 1 0 +415 0 0 0.5 1 0 +416 1 0 0.5 1 0 +417 0 0 0.5 1 0 +418 0 0 0.5 1 0 +419 0 0 0.5 1 0 +420 0 0 0.5 1 0 +421 1 0 0.5 1 0 +422 0 0 0.5 1 0 +423 0 0 0.5 1 0 +424 0 0 0.5 1 0 +425 1 0 0.5 1 0 +426 0 0 0.5 1 0 +427 1 0 0.5 1 0 +428 0 0 0.5 1 0 +429 0 0 0.5 1 0 +430 0 0 0.5 1 0 +431 0 0 0.5 1 0 +432 0 0 0.5 1 0 +433 0 0 0.5 1 0 +434 0 0 0.5 1 0 +435 1 0 0.5 1 0 +436 1 0 0.5 1 0 +437 0 0 0.5 1 0 +438 0 0 0.5 1 0 +439 0 0 0.5 1 0 +440 1 0 0.5 1 0 +441 0 0 0.5 1 0 +442 0 0 0.5 1 0 +443 0 0 0.5 1 0 +444 0 0 0.5 1 0 +445 0 0 0.5 1 0 +446 0 0 0.5 1 0 +447 0 0 0.5 1 0 +448 0 0 0.5 1 0 +449 1 0 0.5 1 0 +450 0 0 0.5 1 0 +451 0 0 0.5 1 0 +452 0 0 0.5 1 0 +453 1 0 0.5 1 0 +454 0 0 0.5 1 0 +455 1 0 0.5 1 0 +456 1 0 0.5 1 0 +457 1 0 0.5 1 0 +458 0 0 0.5 1 0 +459 0 0 0.5 1 0 +460 0 0 0.5 1 0 +461 0 0 0.5 1 0 +462 0 0 0.5 1 0 +463 0 0 0.5 1 0 +464 0 0 0.5 1 0 +465 1 0 0.5 1 0 +466 1 0 0.5 1 0 +467 1 0 0.5 1 0 +468 0 0 0.5 1 0 +469 0 0 0.5 1 0 +470 0 0 0.5 1 0 +471 0 0 0.5 1 0 +472 0 0 0.5 1 0 +473 0 0 0.5 1 0 +474 0 0 0.5 1 0 +475 0 0 0.5 1 0 +476 0 0 0.5 1 0 +477 0 0 0.5 1 0 +478 0 0 0.5 1 0 +479 1 0 0.5 1 0 +480 0 0 0.5 1 0 +481 0 0 0.5 1 0 +482 1 0 0.5 1 0 +483 1 0 0.5 1 0 +484 0 0 0.5 1 0 +485 0 0 0.5 1 0 +486 0 0 0.5 1 0 +487 1 0 0.5 1 0 +488 1 0 0.5 1 0 +489 1 0 0.5 1 0 +490 0 0 0.5 1 0 +491 1 0 0.5 1 0 +492 0 0 0.5 1 0 +493 1 0 0.5 1 0 +494 0 0 0.5 1 0 +495 0 0 0.5 1 0 +496 0 0 0.5 1 0 +497 0 0 0.5 1 0 +498 0 0 0.5 1 0 +499 0 0 0.5 1 0 +500 0 0 0.5 1 0 +501 0 0 0.5 1 0 +502 0 0 0.5 1 0 +503 0 0 0.5 1 0 +504 0 0 0.5 1 0 +505 0 0 0.5 1 0 +506 1 0 0.5 1 0 +507 0 0 0.5 1 0 +508 0 0 0.5 1 0 +509 0 0 0.5 1 0 +510 0 0 0.5 1 0 +511 0 0 0.5 1 0 +512 0 0 0.5 1 0 +513 0 0 0.5 1 0 +514 1 0 0.5 1 0 +515 1 0 0.5 1 0 +516 0 0 0.5 1 0 +517 0 0 0.5 1 0 +518 0 0 0.5 1 0 +519 1 0 0.5 1 0 +520 0 0 0.5 1 0 +521 0 0 0.5 1 0 +522 1 0 0.5 1 0 +523 1 0 0.5 1 0 +524 0 0 0.5 1 0 +525 0 0 0.5 1 0 +526 0 0 0.5 1 0 +527 0 0 0.5 1 0 +528 0 0 0.5 1 0 +529 0 0 0.5 1 0 +530 1 0 0.5 1 0 +531 0 0 0.5 1 0 +532 0 0 0.5 1 0 +533 0 0 0.5 1 0 +534 0 0 0.5 1 0 +535 0 0 0.5 1 0 +536 0 0 0.5 1 0 +537 0 0 0.5 1 0 +538 0 0 0.5 1 0 +539 0 0 0.5 1 0 +540 0 0 0.5 1 0 +541 0 0 0.5 1 0 +542 0 0 0.5 1 0 +543 0 0 0.5 1 0 +544 0 0 0.5 1 0 +545 0 0 0.5 1 0 +546 1 0 0.5 1 0 +547 0 0 0.5 1 0 +548 0 0 0.5 1 0 +549 1 0 0.5 1 0 +550 0 0 0.5 1 0 +551 0 0 0.5 1 0 +552 0 0 0.5 1 0 +553 0 0 0.5 1 0 +554 0 0 0.5 1 0 +555 0 0 0.5 1 0 +556 0 0 0.5 1 0 +557 0 0 0.5 1 0 +558 0 0 0.5 1 0 +559 0 0 0.5 1 0 +560 0 0 0.5 1 0 +561 0 0 0.5 1 0 +562 0 0 0.5 1 0 +563 0 0 0.5 1 0 +564 0 0 0.5 1 0 +565 1 0 0.5 1 0 +566 0 0 0.5 1 0 +567 0 0 0.5 1 0 +568 1 0 0.5 1 0 +569 1 0 0.5 1 0 +570 1 0 0.5 1 0 +571 1 0 0.5 1 0 +572 0 0 0.5 1 0 +573 0 0 0.5 1 0 +574 1 0 0.5 1 0 +575 0 0 0.5 1 0 +576 0 0 0.5 1 0 +577 0 0 0.5 1 0 +578 0 0 0.5 1 0 +579 0 0 0.5 1 0 +580 0 0 0.5 1 0 +581 1 0 0.5 1 0 +582 1 0 0.5 1 0 +583 0 0 0.5 1 0 +584 0 0 0.5 1 0 +585 0 0 0.5 1 0 +586 1 0 0.5 1 0 +587 0 0 0.5 1 0 +588 1 0 0.5 1 0 +589 0 0 0.5 1 0 +590 1 0 0.5 1 0 +591 1 0 0.5 1 0 +592 1 0 0.5 1 0 +593 0 0 0.5 1 0 +594 1 0 0.5 1 0 +595 0 0 0.5 1 0 +596 0 0 0.5 1 0 +597 0 0 0.5 1 0 +598 0 0 0.5 1 0 +599 0 0 0.5 1 0 +600 0 0 0.5 1 0 +601 0 0 0.5 1 0 +602 0 0 0.5 1 0 +603 1 0 0.5 1 0 +604 1 0 0.5 1 0 +605 1 0 0.5 1 0 +606 0 0 0.5 1 0 +607 0 0 0.5 1 0 +608 1 0 0.5 1 0 +609 0 0 0.5 1 0 +610 1 0 0.5 1 0 +611 1 0 0.5 1 0 +612 1 0 0.5 1 0 +613 0 0 0.5 1 0 +614 0 0 0.5 1 0 +615 0 0 0.5 1 0 +616 0 0 0.5 1 0 +617 0 0 0.5 1 0 +618 0 0 0.5 1 0 +619 0 0 0.5 1 0 +620 0 0 0.5 1 0 +621 0 0 0.5 1 0 +622 0 0 0.5 1 0 +623 0 0 0.5 1 0 +624 0 0 0.5 1 0 +625 0 0 0.5 1 0 +626 1 0 0.5 1 0 +627 0 0 0.5 1 0 +628 0 0 0.5 1 0 +629 0 0 0.5 1 0 +630 0 0 0.5 1 0 +631 0 0 0.5 1 0 +632 0 0 0.5 1 0 +633 1 0 0.5 1 0 +634 0 0 0.5 1 0 +635 0 0 0.5 1 0 +636 1 0 0.5 1 0 +637 0 0 0.5 1 0 +638 0 0 0.5 1 0 +639 0 0 0.5 1 0 +640 0 0 0.5 1 0 +641 0 0 0.5 1 0 +642 0 0 0.5 1 0 +643 0 0 0.5 1 0 +644 0 0 0.5 1 0 +645 0 0 0.5 1 0 +646 0 0 0.5 1 0 +647 0 0 0.5 1 0 +648 1 0 0.5 1 0 +649 0 0 0.5 1 0 +650 0 0 0.5 1 0 +651 0 0 0.5 1 0 +652 0 0 0.5 1 0 +653 0 0 0.5 1 0 +654 0 0 0.5 1 0 +655 0 0 0.5 1 0 +656 0 0 0.5 1 0 +657 0 0 0.5 1 0 +658 1 0 0.5 1 0 +659 0 0 0.5 1 0 +660 0 0 0.5 1 0 +661 0 0 0.5 1 0 +662 0 0 0.5 1 0 +663 0 0 0.5 1 0 +664 0 0 0.5 1 0 +665 0 0 0.5 1 0 +666 0 0 0.5 1 0 +667 0 0 0.5 1 0 +668 1 0 0.5 1 0 +669 1 0 0.5 1 0 +670 1 0 0.5 1 0 +671 0 0 0.5 1 0 +672 0 0 0.5 1 0 +673 0 0 0.5 1 0 +674 0 0 0.5 1 0 +675 0 0 0.5 1 0 +676 0 0 0.5 1 0 +677 0 0 0.5 1 0 +678 0 0 0.5 1 0 +679 0 0 0.5 1 0 +680 1 0 0.5 1 0 +681 1 0 0.5 1 0 +682 0 0 0.5 1 0 +683 0 0 0.5 1 0 +684 0 0 0.5 1 0 +685 0 0 0.5 1 0 +686 0 0 0.5 1 0 +687 0 0 0.5 1 0 +688 0 0 0.5 1 0 +689 0 0 0.5 1 0 +690 0 0 0.5 1 0 +691 1 0 0.5 1 0 +692 0 0 0.5 1 0 +693 0 0 0.5 1 0 +694 0 0 0.5 1 0 +695 0 0 0.5 1 0 +696 1 0 0.5 1 0 +697 1 0 0.5 1 0 +698 1 0 0.5 1 0 diff --git a/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-out.txt b/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-out.txt new file mode 100644 index 0000000000..7e76faa1d9 --- /dev/null +++ b/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-out.txt @@ -0,0 +1,55 @@ +maml.exe TrainTest test=%Data% tr=FastTreeBinaryClassification{mil=10000 iter=5} cache=- dout=%Output% loader=Text{sparse- col=Attr:TX:6 col=Label:0 col=Features:1-5,6,7-9} data=%Data% out=%Output% seed=1 +Not adding a normalizer. +Making per-feature arrays +Changing data from row-wise to column-wise +Warning: Skipped 16 instances with missing features during training +Processed 683 instances +Binning and forming Feature objects +Reserved memory for tree learner: 468 bytes +Starting to train ... +Warning: 5 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Not training a calibrator because it is not needed. +TEST POSITIVE RATIO: 0.3448 (241.0/(241.0+458.0)) +Confusion table + ||====================== +PREDICTED || positive | negative | Recall +TRUTH ||====================== + positive || 0 | 241 | 0.0000 + negative || 0 | 458 | 1.0000 + ||====================== +Precision || 0.0000 | 0.6552 | +OVERALL 0/1 ACCURACY: 0.655222 +LOG LOSS/instance: 1.000000 +Test-set entropy (prior Log-Loss/instance): 0.929318 +LOG-LOSS REDUCTION (RIG): -7.605800 +AUC: 0.500000 + +OVERALL RESULTS +--------------------------------------- +AUC: 0.500000 (0.0000) +Accuracy: 0.655222 (0.0000) +Positive precision: 0.000000 (0.0000) +Positive recall: 0.000000 (0.0000) +Negative precision: 0.655222 (0.0000) +Negative recall: 1.000000 (0.0000) +Log-loss: 1.000000 (0.0000) +Log-loss reduction: -7.605800 (0.0000) +F1 Score: NaN (0.0000) +AUPRC: 0.415719 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + +--- Progress log --- +[1] 'FastTree data preparation' started. +[1] 'FastTree data preparation' finished in %Time%. +[2] 'FastTree in-memory bins initialization' started. +[2] 'FastTree in-memory bins initialization' finished in %Time%. +[3] 'FastTree feature conversion' started. +[3] 'FastTree feature conversion' finished in %Time%. +[4] 'FastTree training' started. +[4] 'FastTree training' finished in %Time%. +[5] 'Saving model' started. +[5] 'Saving model' finished in %Time%. diff --git a/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-rp.txt b/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-rp.txt new file mode 100644 index 0000000000..3aab3ceed7 --- /dev/null +++ b/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer-rp.txt @@ -0,0 +1,4 @@ +FastTreeBinaryClassification +AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /mil /iter Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.5 0.655222 0 0 0.655222 1 1 -7.6058 NaN 0.415719 10000 5 FastTreeBinaryClassification %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=FastTreeBinaryClassification{mil=10000 iter=5} cache=- dout=%Output% loader=Text{sparse- col=Attr:TX:6 col=Label:0 col=Features:1-5,6,7-9} data=%Data% out=%Output% seed=1 /mil:10000;/iter:5 + diff --git a/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer.txt b/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer.txt new file mode 100644 index 0000000000..a30a2f04cc --- /dev/null +++ b/ZBaselines/SingleRelease/FastTreeBinaryClassification/FastTreeHighMinDocs-TrainTest-breast-cancer.txt @@ -0,0 +1,700 @@ +Instance Label Score Probability Log-loss Assigned +0 0 0 0.5 1 0 +1 0 0 0.5 1 0 +2 0 0 0.5 1 0 +3 0 0 0.5 1 0 +4 0 0 0.5 1 0 +5 1 0 0.5 1 0 +6 0 0 0.5 1 0 +7 0 0 0.5 1 0 +8 0 0 0.5 1 0 +9 0 0 0.5 1 0 +10 0 0 0.5 1 0 +11 0 0 0.5 1 0 +12 1 0 0.5 1 0 +13 0 0 0.5 1 0 +14 1 0 0.5 1 0 +15 1 0 0.5 1 0 +16 0 0 0.5 1 0 +17 0 0 0.5 1 0 +18 1 0 0.5 1 0 +19 0 0 0.5 1 0 +20 1 0 0.5 1 0 +21 1 0 0.5 1 0 +22 0 0 0.5 1 0 +23 1 0 0.5 1 0 +24 0 0 0.5 1 0 +25 1 0 0.5 1 0 +26 0 0 0.5 1 0 +27 0 0 0.5 1 0 +28 0 0 0.5 1 0 +29 0 0 0.5 1 0 +30 0 0 0.5 1 0 +31 0 0 0.5 1 0 +32 1 0 0.5 1 0 +33 0 0 0.5 1 0 +34 0 0 0.5 1 0 +35 0 0 0.5 1 0 +36 1 0 0.5 1 0 +37 0 0 0.5 1 0 +38 1 0 0.5 1 0 +39 1 0 0.5 1 0 +40 0 0 0.5 1 0 +41 1 0 0.5 1 0 +42 1 0 0.5 1 0 +43 1 0 0.5 1 0 +44 1 0 0.5 1 0 +45 0 0 0.5 1 0 +46 1 0 0.5 1 0 +47 0 0 0.5 1 0 +48 0 0 0.5 1 0 +49 1 0 0.5 1 0 +50 1 0 0.5 1 0 +51 1 0 0.5 1 0 +52 1 0 0.5 1 0 +53 1 0 0.5 1 0 +54 1 0 0.5 1 0 +55 1 0 0.5 1 0 +56 1 0 0.5 1 0 +57 1 0 0.5 1 0 +58 1 0 0.5 1 0 +59 1 0 0.5 1 0 +60 1 0 0.5 1 0 +61 0 0 0.5 1 0 +62 1 0 0.5 1 0 +63 1 0 0.5 1 0 +64 0 0 0.5 1 0 +65 1 0 0.5 1 0 +66 0 0 0.5 1 0 +67 1 0 0.5 1 0 +68 1 0 0.5 1 0 +69 0 0 0.5 1 0 +70 0 0 0.5 1 0 +71 1 0 0.5 1 0 +72 0 0 0.5 1 0 +73 1 0 0.5 1 0 +74 1 0 0.5 1 0 +75 0 0 0.5 1 0 +76 0 0 0.5 1 0 +77 0 0 0.5 1 0 +78 0 0 0.5 1 0 +79 0 0 0.5 1 0 +80 0 0 0.5 1 0 +81 0 0 0.5 1 0 +82 0 0 0.5 1 0 +83 0 0 0.5 1 0 +84 1 0 0.5 1 0 +85 1 0 0.5 1 0 +86 1 0 0.5 1 0 +87 1 0 0.5 1 0 +88 0 0 0.5 1 0 +89 0 0 0.5 1 0 +90 0 0 0.5 1 0 +91 0 0 0.5 1 0 +92 0 0 0.5 1 0 +93 0 0 0.5 1 0 +94 0 0 0.5 1 0 +95 0 0 0.5 1 0 +96 0 0 0.5 1 0 +97 0 0 0.5 1 0 +98 1 0 0.5 1 0 +99 1 0 0.5 1 0 +100 1 0 0.5 1 0 +101 1 0 0.5 1 0 +102 0 0 0.5 1 0 +103 1 0 0.5 1 0 +104 1 0 0.5 1 0 +105 1 0 0.5 1 0 +106 1 0 0.5 1 0 +107 1 0 0.5 1 0 +108 0 0 0.5 1 0 +109 1 0 0.5 1 0 +110 0 0 0.5 1 0 +111 1 0 0.5 1 0 +112 1 0 0.5 1 0 +113 1 0 0.5 1 0 +114 0 0 0.5 1 0 +115 0 0 0.5 1 0 +116 0 0 0.5 1 0 +117 1 0 0.5 1 0 +118 0 0 0.5 1 0 +119 0 0 0.5 1 0 +120 0 0 0.5 1 0 +121 0 0 0.5 1 0 +122 1 0 0.5 1 0 +123 1 0 0.5 1 0 +124 1 0 0.5 1 0 +125 0 0 0.5 1 0 +126 1 0 0.5 1 0 +127 0 0 0.5 1 0 +128 1 0 0.5 1 0 +129 0 0 0.5 1 0 +130 0 0 0.5 1 0 +131 0 0 0.5 1 0 +132 1 0 0.5 1 0 +133 0 0 0.5 1 0 +134 0 0 0.5 1 0 +135 0 0 0.5 1 0 +136 0 0 0.5 1 0 +137 0 0 0.5 1 0 +138 0 0 0.5 1 0 +139 0 0 0.5 1 0 +140 0 0 0.5 1 0 +141 0 0 0.5 1 0 +142 1 0 0.5 1 0 +143 0 0 0.5 1 0 +144 0 0 0.5 1 0 +145 0 0 0.5 1 0 +146 1 0 0.5 1 0 +147 0 0 0.5 1 0 +148 0 0 0.5 1 0 +149 1 0 0.5 1 0 +150 0 0 0.5 1 0 +151 1 0 0.5 1 0 +152 1 0 0.5 1 0 +153 0 0 0.5 1 0 +154 0 0 0.5 1 0 +155 1 0 0.5 1 0 +156 0 0 0.5 1 0 +157 0 0 0.5 1 0 +158 0 0 0.5 1 0 +159 1 0 0.5 1 0 +160 1 0 0.5 1 0 +161 0 0 0.5 1 0 +162 0 0 0.5 1 0 +163 0 0 0.5 1 0 +164 0 0 0.5 1 0 +165 0 0 0.5 1 0 +166 1 0 0.5 1 0 +167 1 0 0.5 1 0 +168 0 0 0.5 1 0 +169 0 0 0.5 1 0 +170 0 0 0.5 1 0 +171 0 0 0.5 1 0 +172 0 0 0.5 1 0 +173 1 0 0.5 1 0 +174 1 0 0.5 1 0 +175 1 0 0.5 1 0 +176 0 0 0.5 1 0 +177 1 0 0.5 1 0 +178 0 0 0.5 1 0 +179 1 0 0.5 1 0 +180 0 0 0.5 1 0 +181 0 0 0.5 1 0 +182 0 0 0.5 1 0 +183 1 0 0.5 1 0 +184 1 0 0.5 1 0 +185 0 0 0.5 1 0 +186 1 0 0.5 1 0 +187 1 0 0.5 1 0 +188 1 0 0.5 1 0 +189 0 0 0.5 1 0 +190 1 0 0.5 1 0 +191 1 0 0.5 1 0 +192 0 0 0.5 1 0 +193 0 0 0.5 1 0 +194 0 0 0.5 1 0 +195 0 0 0.5 1 0 +196 0 0 0.5 1 0 +197 0 0 0.5 1 0 +198 0 0 0.5 1 0 +199 0 0 0.5 1 0 +200 1 0 0.5 1 0 +201 1 0 0.5 1 0 +202 0 0 0.5 1 0 +203 0 0 0.5 1 0 +204 0 0 0.5 1 0 +205 1 0 0.5 1 0 +206 1 0 0.5 1 0 +207 0 0 0.5 1 0 +208 0 0 0.5 1 0 +209 0 0 0.5 1 0 +210 1 0 0.5 1 0 +211 1 0 0.5 1 0 +212 0 0 0.5 1 0 +213 1 0 0.5 1 0 +214 1 0 0.5 1 0 +215 1 0 0.5 1 0 +216 0 0 0.5 1 0 +217 0 0 0.5 1 0 +218 1 0 0.5 1 0 +219 0 0 0.5 1 0 +220 0 0 0.5 1 0 +221 1 0 0.5 1 0 +222 1 0 0.5 1 0 +223 1 0 0.5 1 0 +224 1 0 0.5 1 0 +225 0 0 0.5 1 0 +226 1 0 0.5 1 0 +227 1 0 0.5 1 0 +228 0 0 0.5 1 0 +229 1 0 0.5 1 0 +230 1 0 0.5 1 0 +231 1 0 0.5 1 0 +232 0 0 0.5 1 0 +233 1 0 0.5 1 0 +234 0 0 0.5 1 0 +235 0 0 0.5 1 0 +236 1 0 0.5 1 0 +237 1 0 0.5 1 0 +238 1 0 0.5 1 0 +239 1 0 0.5 1 0 +240 0 0 0.5 1 0 +241 0 0 0.5 1 0 +242 0 0 0.5 1 0 +243 0 0 0.5 1 0 +244 0 0 0.5 1 0 +245 0 0 0.5 1 0 +246 1 0 0.5 1 0 +247 1 0 0.5 1 0 +248 0 0 0.5 1 0 +249 0 0 0.5 1 0 +250 0 0 0.5 1 0 +251 1 0 0.5 1 0 +252 0 0 0.5 1 0 +253 1 0 0.5 1 0 +254 1 0 0.5 1 0 +255 1 0 0.5 1 0 +256 0 0 0.5 1 0 +257 0 0 0.5 1 0 +258 0 0 0.5 1 0 +259 0 0 0.5 1 0 +260 1 0 0.5 1 0 +261 1 0 0.5 1 0 +262 1 0 0.5 1 0 +263 1 0 0.5 1 0 +264 1 0 0.5 1 0 +265 0 0 0.5 1 0 +266 1 0 0.5 1 0 +267 1 0 0.5 1 0 +268 1 0 0.5 1 0 +269 0 0 0.5 1 0 +270 1 0 0.5 1 0 +271 0 0 0.5 1 0 +272 1 0 0.5 1 0 +273 1 0 0.5 1 0 +274 0 0 0.5 1 0 +275 0 0 0.5 1 0 +276 0 0 0.5 1 0 +277 0 0 0.5 1 0 +278 0 0 0.5 1 0 +279 1 0 0.5 1 0 +280 0 0 0.5 1 0 +281 0 0 0.5 1 0 +282 1 0 0.5 1 0 +283 1 0 0.5 1 0 +284 1 0 0.5 1 0 +285 1 0 0.5 1 0 +286 1 0 0.5 1 0 +287 0 0 0.5 1 0 +288 1 0 0.5 1 0 +289 1 0 0.5 1 0 +290 0 0 0.5 1 0 +291 0 0 0.5 1 0 +292 1 0 0.5 1 0 +293 1 0 0.5 1 0 +294 0 0 0.5 1 0 +295 1 0 0.5 1 0 +296 0 0 0.5 1 0 +297 0 0 0.5 1 0 +298 0 0 0.5 1 0 +299 1 0 0.5 1 0 +300 1 0 0.5 1 0 +301 0 0 0.5 1 0 +302 1 0 0.5 1 0 +303 0 0 0.5 1 0 +304 1 0 0.5 1 0 +305 1 0 0.5 1 0 +306 0 0 0.5 1 0 +307 0 0 0.5 1 0 +308 1 0 0.5 1 0 +309 0 0 0.5 1 0 +310 0 0 0.5 1 0 +311 0 0 0.5 1 0 +312 1 0 0.5 1 0 +313 0 0 0.5 1 0 +314 0 0 0.5 1 0 +315 0 0 0.5 1 0 +316 1 0 0.5 1 0 +317 1 0 0.5 1 0 +318 0 0 0.5 1 0 +319 0 0 0.5 1 0 +320 1 0 0.5 1 0 +321 0 0 0.5 1 0 +322 0 0 0.5 1 0 +323 1 0 0.5 1 0 +324 0 0 0.5 1 0 +325 0 0 0.5 1 0 +326 1 0 0.5 1 0 +327 0 0 0.5 1 0 +328 1 0 0.5 1 0 +329 1 0 0.5 1 0 +330 1 0 0.5 1 0 +331 0 0 0.5 1 0 +332 0 0 0.5 1 0 +333 1 0 0.5 1 0 +334 1 0 0.5 1 0 +335 0 0 0.5 1 0 +336 1 0 0.5 1 0 +337 0 0 0.5 1 0 +338 0 0 0.5 1 0 +339 1 0 0.5 1 0 +340 1 0 0.5 1 0 +341 0 0 0.5 1 0 +342 0 0 0.5 1 0 +343 0 0 0.5 1 0 +344 1 0 0.5 1 0 +345 0 0 0.5 1 0 +346 0 0 0.5 1 0 +347 0 0 0.5 1 0 +348 1 0 0.5 1 0 +349 1 0 0.5 1 0 +350 0 0 0.5 1 0 +351 0 0 0.5 1 0 +352 0 0 0.5 1 0 +353 1 0 0.5 1 0 +354 0 0 0.5 1 0 +355 0 0 0.5 1 0 +356 1 0 0.5 1 0 +357 1 0 0.5 1 0 +358 1 0 0.5 1 0 +359 1 0 0.5 1 0 +360 1 0 0.5 1 0 +361 1 0 0.5 1 0 +362 0 0 0.5 1 0 +363 0 0 0.5 1 0 +364 0 0 0.5 1 0 +365 0 0 0.5 1 0 +366 1 0 0.5 1 0 +367 1 0 0.5 1 0 +368 0 0 0.5 1 0 +369 0 0 0.5 1 0 +370 0 0 0.5 1 0 +371 0 0 0.5 1 0 +372 0 0 0.5 1 0 +373 0 0 0.5 1 0 +374 0 0 0.5 1 0 +375 0 0 0.5 1 0 +376 0 0 0.5 1 0 +377 0 0 0.5 1 0 +378 0 0 0.5 1 0 +379 0 0 0.5 1 0 +380 0 0 0.5 1 0 +381 1 0 0.5 1 0 +382 0 0 0.5 1 0 +383 0 0 0.5 1 0 +384 0 0 0.5 1 0 +385 0 0 0.5 1 0 +386 1 0 0.5 1 0 +387 0 0 0.5 1 0 +388 0 0 0.5 1 0 +389 0 0 0.5 1 0 +390 0 0 0.5 1 0 +391 1 0 0.5 1 0 +392 0 0 0.5 1 0 +393 0 0 0.5 1 0 +394 0 0 0.5 1 0 +395 0 0 0.5 1 0 +396 0 0 0.5 1 0 +397 0 0 0.5 1 0 +398 0 0 0.5 1 0 +399 0 0 0.5 1 0 +400 1 0 0.5 1 0 +401 0 0 0.5 1 0 +402 0 0 0.5 1 0 +403 0 0 0.5 1 0 +404 0 0 0.5 1 0 +405 0 0 0.5 1 0 +406 0 0 0.5 1 0 +407 0 0 0.5 1 0 +408 0 0 0.5 1 0 +409 0 0 0.5 1 0 +410 0 0 0.5 1 0 +411 0 0 0.5 1 0 +412 1 0 0.5 1 0 +413 0 0 0.5 1 0 +414 1 0 0.5 1 0 +415 0 0 0.5 1 0 +416 1 0 0.5 1 0 +417 0 0 0.5 1 0 +418 0 0 0.5 1 0 +419 0 0 0.5 1 0 +420 0 0 0.5 1 0 +421 1 0 0.5 1 0 +422 0 0 0.5 1 0 +423 0 0 0.5 1 0 +424 0 0 0.5 1 0 +425 1 0 0.5 1 0 +426 0 0 0.5 1 0 +427 1 0 0.5 1 0 +428 0 0 0.5 1 0 +429 0 0 0.5 1 0 +430 0 0 0.5 1 0 +431 0 0 0.5 1 0 +432 0 0 0.5 1 0 +433 0 0 0.5 1 0 +434 0 0 0.5 1 0 +435 1 0 0.5 1 0 +436 1 0 0.5 1 0 +437 0 0 0.5 1 0 +438 0 0 0.5 1 0 +439 0 0 0.5 1 0 +440 1 0 0.5 1 0 +441 0 0 0.5 1 0 +442 0 0 0.5 1 0 +443 0 0 0.5 1 0 +444 0 0 0.5 1 0 +445 0 0 0.5 1 0 +446 0 0 0.5 1 0 +447 0 0 0.5 1 0 +448 0 0 0.5 1 0 +449 1 0 0.5 1 0 +450 0 0 0.5 1 0 +451 0 0 0.5 1 0 +452 0 0 0.5 1 0 +453 1 0 0.5 1 0 +454 0 0 0.5 1 0 +455 1 0 0.5 1 0 +456 1 0 0.5 1 0 +457 1 0 0.5 1 0 +458 0 0 0.5 1 0 +459 0 0 0.5 1 0 +460 0 0 0.5 1 0 +461 0 0 0.5 1 0 +462 0 0 0.5 1 0 +463 0 0 0.5 1 0 +464 0 0 0.5 1 0 +465 1 0 0.5 1 0 +466 1 0 0.5 1 0 +467 1 0 0.5 1 0 +468 0 0 0.5 1 0 +469 0 0 0.5 1 0 +470 0 0 0.5 1 0 +471 0 0 0.5 1 0 +472 0 0 0.5 1 0 +473 0 0 0.5 1 0 +474 0 0 0.5 1 0 +475 0 0 0.5 1 0 +476 0 0 0.5 1 0 +477 0 0 0.5 1 0 +478 0 0 0.5 1 0 +479 1 0 0.5 1 0 +480 0 0 0.5 1 0 +481 0 0 0.5 1 0 +482 1 0 0.5 1 0 +483 1 0 0.5 1 0 +484 0 0 0.5 1 0 +485 0 0 0.5 1 0 +486 0 0 0.5 1 0 +487 1 0 0.5 1 0 +488 1 0 0.5 1 0 +489 1 0 0.5 1 0 +490 0 0 0.5 1 0 +491 1 0 0.5 1 0 +492 0 0 0.5 1 0 +493 1 0 0.5 1 0 +494 0 0 0.5 1 0 +495 0 0 0.5 1 0 +496 0 0 0.5 1 0 +497 0 0 0.5 1 0 +498 0 0 0.5 1 0 +499 0 0 0.5 1 0 +500 0 0 0.5 1 0 +501 0 0 0.5 1 0 +502 0 0 0.5 1 0 +503 0 0 0.5 1 0 +504 0 0 0.5 1 0 +505 0 0 0.5 1 0 +506 1 0 0.5 1 0 +507 0 0 0.5 1 0 +508 0 0 0.5 1 0 +509 0 0 0.5 1 0 +510 0 0 0.5 1 0 +511 0 0 0.5 1 0 +512 0 0 0.5 1 0 +513 0 0 0.5 1 0 +514 1 0 0.5 1 0 +515 1 0 0.5 1 0 +516 0 0 0.5 1 0 +517 0 0 0.5 1 0 +518 0 0 0.5 1 0 +519 1 0 0.5 1 0 +520 0 0 0.5 1 0 +521 0 0 0.5 1 0 +522 1 0 0.5 1 0 +523 1 0 0.5 1 0 +524 0 0 0.5 1 0 +525 0 0 0.5 1 0 +526 0 0 0.5 1 0 +527 0 0 0.5 1 0 +528 0 0 0.5 1 0 +529 0 0 0.5 1 0 +530 1 0 0.5 1 0 +531 0 0 0.5 1 0 +532 0 0 0.5 1 0 +533 0 0 0.5 1 0 +534 0 0 0.5 1 0 +535 0 0 0.5 1 0 +536 0 0 0.5 1 0 +537 0 0 0.5 1 0 +538 0 0 0.5 1 0 +539 0 0 0.5 1 0 +540 0 0 0.5 1 0 +541 0 0 0.5 1 0 +542 0 0 0.5 1 0 +543 0 0 0.5 1 0 +544 0 0 0.5 1 0 +545 0 0 0.5 1 0 +546 1 0 0.5 1 0 +547 0 0 0.5 1 0 +548 0 0 0.5 1 0 +549 1 0 0.5 1 0 +550 0 0 0.5 1 0 +551 0 0 0.5 1 0 +552 0 0 0.5 1 0 +553 0 0 0.5 1 0 +554 0 0 0.5 1 0 +555 0 0 0.5 1 0 +556 0 0 0.5 1 0 +557 0 0 0.5 1 0 +558 0 0 0.5 1 0 +559 0 0 0.5 1 0 +560 0 0 0.5 1 0 +561 0 0 0.5 1 0 +562 0 0 0.5 1 0 +563 0 0 0.5 1 0 +564 0 0 0.5 1 0 +565 1 0 0.5 1 0 +566 0 0 0.5 1 0 +567 0 0 0.5 1 0 +568 1 0 0.5 1 0 +569 1 0 0.5 1 0 +570 1 0 0.5 1 0 +571 1 0 0.5 1 0 +572 0 0 0.5 1 0 +573 0 0 0.5 1 0 +574 1 0 0.5 1 0 +575 0 0 0.5 1 0 +576 0 0 0.5 1 0 +577 0 0 0.5 1 0 +578 0 0 0.5 1 0 +579 0 0 0.5 1 0 +580 0 0 0.5 1 0 +581 1 0 0.5 1 0 +582 1 0 0.5 1 0 +583 0 0 0.5 1 0 +584 0 0 0.5 1 0 +585 0 0 0.5 1 0 +586 1 0 0.5 1 0 +587 0 0 0.5 1 0 +588 1 0 0.5 1 0 +589 0 0 0.5 1 0 +590 1 0 0.5 1 0 +591 1 0 0.5 1 0 +592 1 0 0.5 1 0 +593 0 0 0.5 1 0 +594 1 0 0.5 1 0 +595 0 0 0.5 1 0 +596 0 0 0.5 1 0 +597 0 0 0.5 1 0 +598 0 0 0.5 1 0 +599 0 0 0.5 1 0 +600 0 0 0.5 1 0 +601 0 0 0.5 1 0 +602 0 0 0.5 1 0 +603 1 0 0.5 1 0 +604 1 0 0.5 1 0 +605 1 0 0.5 1 0 +606 0 0 0.5 1 0 +607 0 0 0.5 1 0 +608 1 0 0.5 1 0 +609 0 0 0.5 1 0 +610 1 0 0.5 1 0 +611 1 0 0.5 1 0 +612 1 0 0.5 1 0 +613 0 0 0.5 1 0 +614 0 0 0.5 1 0 +615 0 0 0.5 1 0 +616 0 0 0.5 1 0 +617 0 0 0.5 1 0 +618 0 0 0.5 1 0 +619 0 0 0.5 1 0 +620 0 0 0.5 1 0 +621 0 0 0.5 1 0 +622 0 0 0.5 1 0 +623 0 0 0.5 1 0 +624 0 0 0.5 1 0 +625 0 0 0.5 1 0 +626 1 0 0.5 1 0 +627 0 0 0.5 1 0 +628 0 0 0.5 1 0 +629 0 0 0.5 1 0 +630 0 0 0.5 1 0 +631 0 0 0.5 1 0 +632 0 0 0.5 1 0 +633 1 0 0.5 1 0 +634 0 0 0.5 1 0 +635 0 0 0.5 1 0 +636 1 0 0.5 1 0 +637 0 0 0.5 1 0 +638 0 0 0.5 1 0 +639 0 0 0.5 1 0 +640 0 0 0.5 1 0 +641 0 0 0.5 1 0 +642 0 0 0.5 1 0 +643 0 0 0.5 1 0 +644 0 0 0.5 1 0 +645 0 0 0.5 1 0 +646 0 0 0.5 1 0 +647 0 0 0.5 1 0 +648 1 0 0.5 1 0 +649 0 0 0.5 1 0 +650 0 0 0.5 1 0 +651 0 0 0.5 1 0 +652 0 0 0.5 1 0 +653 0 0 0.5 1 0 +654 0 0 0.5 1 0 +655 0 0 0.5 1 0 +656 0 0 0.5 1 0 +657 0 0 0.5 1 0 +658 1 0 0.5 1 0 +659 0 0 0.5 1 0 +660 0 0 0.5 1 0 +661 0 0 0.5 1 0 +662 0 0 0.5 1 0 +663 0 0 0.5 1 0 +664 0 0 0.5 1 0 +665 0 0 0.5 1 0 +666 0 0 0.5 1 0 +667 0 0 0.5 1 0 +668 1 0 0.5 1 0 +669 1 0 0.5 1 0 +670 1 0 0.5 1 0 +671 0 0 0.5 1 0 +672 0 0 0.5 1 0 +673 0 0 0.5 1 0 +674 0 0 0.5 1 0 +675 0 0 0.5 1 0 +676 0 0 0.5 1 0 +677 0 0 0.5 1 0 +678 0 0 0.5 1 0 +679 0 0 0.5 1 0 +680 1 0 0.5 1 0 +681 1 0 0.5 1 0 +682 0 0 0.5 1 0 +683 0 0 0.5 1 0 +684 0 0 0.5 1 0 +685 0 0 0.5 1 0 +686 0 0 0.5 1 0 +687 0 0 0.5 1 0 +688 0 0 0.5 1 0 +689 0 0 0.5 1 0 +690 0 0 0.5 1 0 +691 1 0 0.5 1 0 +692 0 0 0.5 1 0 +693 0 0 0.5 1 0 +694 0 0 0.5 1 0 +695 0 0 0.5 1 0 +696 1 0 0.5 1 0 +697 1 0 0.5 1 0 +698 1 0 0.5 1 0