Microsoft.ML.FastTree.xml
256.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338
2339
2340
2341
2342
2343
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
2375
2376
2377
2378
2379
2380
2381
2382
2383
2384
2385
2386
2387
2388
2389
2390
2391
2392
2393
2394
2395
2396
2397
2398
2399
2400
2401
2402
2403
2404
2405
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418
2419
2420
2421
2422
2423
2424
2425
2426
2427
2428
2429
2430
2431
2432
2433
2434
2435
2436
2437
2438
2439
2440
2441
2442
2443
2444
2445
2446
2447
2448
2449
2450
2451
2452
2453
2454
2455
2456
2457
2458
2459
2460
2461
2462
2463
2464
2465
2466
2467
2468
2469
2470
2471
2472
2473
2474
2475
2476
2477
2478
2479
2480
2481
2482
2483
2484
2485
2486
2487
2488
2489
2490
2491
2492
2493
2494
2495
2496
2497
2498
2499
2500
2501
2502
2503
2504
2505
2506
2507
2508
2509
2510
2511
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524
2525
2526
2527
2528
2529
2530
2531
2532
2533
2534
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551
2552
2553
2554
2555
2556
2557
2558
2559
2560
2561
2562
2563
2564
2565
2566
2567
2568
2569
2570
2571
2572
2573
2574
2575
2576
2577
2578
2579
2580
2581
2582
2583
2584
2585
2586
2587
2588
2589
2590
2591
2592
2593
2594
2595
2596
2597
2598
2599
2600
2601
2602
2603
2604
2605
2606
2607
2608
2609
2610
2611
2612
2613
2614
2615
2616
2617
2618
2619
2620
2621
2622
2623
2624
2625
2626
2627
2628
2629
2630
2631
2632
2633
2634
2635
2636
2637
2638
2639
2640
2641
2642
2643
2644
2645
2646
2647
2648
2649
2650
2651
2652
2653
2654
2655
2656
2657
2658
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669
2670
2671
2672
2673
2674
2675
2676
2677
2678
2679
2680
2681
2682
2683
2684
2685
2686
2687
2688
2689
2690
2691
2692
2693
2694
2695
2696
2697
2698
2699
2700
2701
2702
2703
2704
2705
2706
2707
2708
2709
2710
2711
2712
2713
2714
2715
2716
2717
2718
2719
2720
2721
2722
2723
2724
2725
2726
2727
2728
2729
2730
2731
2732
2733
2734
2735
2736
2737
2738
2739
2740
2741
2742
2743
2744
2745
2746
2747
2748
2749
2750
2751
2752
2753
2754
2755
2756
2757
2758
2759
2760
2761
2762
2763
2764
2765
2766
2767
2768
2769
2770
2771
2772
2773
2774
2775
2776
2777
2778
2779
2780
2781
2782
2783
2784
2785
2786
2787
2788
2789
2790
2791
2792
2793
2794
2795
2796
2797
2798
2799
2800
2801
2802
2803
2804
2805
2806
2807
2808
2809
2810
2811
2812
2813
2814
2815
2816
2817
2818
2819
2820
2821
2822
2823
2824
2825
2826
2827
2828
2829
2830
2831
2832
2833
2834
2835
2836
2837
2838
2839
2840
2841
2842
2843
2844
2845
2846
2847
2848
2849
2850
2851
2852
2853
2854
2855
2856
2857
2858
2859
2860
2861
2862
2863
2864
2865
2866
2867
2868
2869
2870
2871
2872
2873
2874
2875
2876
2877
2878
2879
2880
2881
2882
2883
2884
2885
2886
2887
2888
2889
2890
2891
2892
2893
2894
2895
2896
2897
2898
2899
2900
2901
2902
2903
2904
2905
2906
2907
2908
2909
2910
2911
2912
2913
2914
2915
2916
2917
2918
2919
2920
2921
2922
2923
2924
2925
2926
2927
2928
2929
2930
2931
2932
2933
2934
2935
2936
2937
2938
2939
2940
2941
2942
2943
2944
2945
2946
2947
2948
2949
2950
2951
2952
2953
2954
2955
2956
2957
2958
2959
2960
2961
2962
2963
2964
2965
2966
2967
2968
2969
2970
2971
2972
2973
2974
2975
2976
2977
2978
2979
2980
2981
2982
2983
2984
2985
2986
2987
2988
2989
2990
2991
2992
2993
2994
2995
2996
2997
2998
2999
3000
3001
3002
3003
3004
3005
3006
3007
3008
3009
3010
3011
3012
3013
3014
3015
3016
3017
3018
3019
3020
3021
3022
3023
3024
3025
3026
3027
3028
3029
3030
3031
3032
3033
3034
3035
3036
3037
3038
3039
3040
3041
3042
3043
3044
3045
3046
3047
3048
3049
3050
3051
3052
3053
3054
3055
3056
3057
3058
3059
3060
3061
3062
3063
3064
3065
3066
3067
3068
3069
3070
3071
3072
3073
3074
3075
3076
3077
3078
3079
3080
3081
3082
3083
3084
3085
3086
3087
3088
3089
3090
3091
3092
3093
3094
3095
3096
3097
3098
3099
3100
3101
3102
3103
3104
3105
3106
3107
3108
3109
3110
3111
3112
3113
3114
3115
3116
3117
3118
3119
3120
3121
3122
3123
3124
3125
3126
3127
3128
3129
3130
3131
3132
3133
3134
3135
3136
3137
3138
3139
3140
3141
3142
3143
3144
3145
3146
3147
3148
3149
3150
3151
3152
3153
3154
3155
3156
3157
3158
3159
3160
3161
3162
3163
3164
3165
3166
3167
3168
3169
3170
3171
3172
3173
3174
3175
3176
3177
3178
3179
3180
3181
3182
3183
3184
3185
3186
3187
3188
3189
3190
3191
3192
3193
3194
3195
3196
3197
3198
3199
3200
3201
3202
3203
3204
3205
3206
3207
3208
3209
3210
3211
3212
3213
3214
3215
3216
3217
3218
3219
3220
3221
3222
3223
3224
3225
3226
3227
3228
3229
3230
3231
3232
3233
3234
3235
3236
3237
3238
3239
3240
3241
3242
3243
3244
3245
3246
3247
3248
3249
3250
3251
3252
3253
3254
3255
3256
3257
3258
3259
3260
3261
3262
3263
3264
3265
3266
3267
3268
3269
3270
3271
3272
3273
3274
3275
3276
3277
3278
3279
3280
3281
3282
3283
3284
3285
3286
3287
3288
3289
3290
3291
3292
3293
3294
3295
3296
3297
3298
3299
3300
3301
3302
3303
3304
3305
3306
3307
3308
3309
3310
3311
3312
3313
3314
3315
3316
3317
3318
3319
3320
3321
3322
3323
3324
3325
3326
3327
3328
3329
3330
3331
3332
3333
3334
3335
3336
3337
3338
3339
3340
3341
3342
3343
3344
3345
3346
3347
3348
3349
3350
3351
3352
3353
3354
3355
3356
3357
3358
3359
3360
3361
3362
3363
3364
3365
3366
3367
3368
3369
3370
3371
3372
3373
3374
3375
3376
3377
3378
3379
3380
3381
3382
3383
3384
3385
3386
3387
3388
3389
3390
3391
3392
3393
3394
3395
3396
3397
3398
3399
3400
3401
3402
3403
3404
3405
3406
3407
3408
3409
3410
3411
3412
3413
3414
3415
3416
3417
3418
3419
3420
3421
3422
3423
3424
3425
3426
3427
3428
3429
3430
3431
3432
3433
3434
3435
3436
3437
3438
3439
3440
3441
3442
3443
3444
3445
3446
3447
3448
3449
3450
3451
3452
3453
3454
3455
3456
3457
3458
3459
3460
3461
3462
3463
3464
3465
3466
3467
3468
3469
3470
3471
3472
3473
3474
3475
3476
3477
3478
3479
3480
3481
3482
3483
3484
3485
3486
3487
3488
3489
3490
3491
3492
3493
3494
3495
3496
3497
3498
3499
3500
3501
3502
3503
3504
3505
3506
3507
3508
3509
3510
3511
3512
3513
3514
3515
3516
3517
3518
3519
3520
3521
3522
3523
3524
3525
3526
3527
3528
3529
3530
3531
3532
3533
3534
3535
3536
3537
3538
3539
3540
3541
3542
3543
3544
3545
3546
3547
3548
3549
3550
3551
3552
3553
3554
3555
3556
3557
3558
3559
3560
3561
3562
3563
3564
3565
3566
3567
3568
3569
3570
3571
3572
3573
3574
3575
3576
3577
3578
3579
3580
3581
3582
3583
3584
3585
3586
3587
3588
3589
3590
3591
3592
3593
3594
3595
3596
3597
3598
3599
3600
3601
3602
3603
3604
3605
3606
3607
3608
3609
3610
3611
3612
3613
3614
3615
3616
3617
3618
3619
3620
3621
3622
3623
3624
3625
3626
3627
3628
3629
3630
3631
3632
3633
3634
3635
3636
3637
3638
3639
3640
3641
3642
3643
3644
3645
3646
3647
3648
3649
3650
3651
3652
3653
3654
3655
3656
3657
3658
3659
3660
3661
3662
3663
3664
3665
3666
3667
3668
3669
3670
3671
3672
3673
3674
3675
3676
3677
3678
3679
3680
3681
3682
3683
3684
3685
3686
3687
3688
3689
3690
3691
3692
3693
3694
3695
3696
3697
3698
3699
3700
3701
3702
3703
3704
3705
3706
3707
3708
3709
3710
3711
3712
3713
3714
3715
3716
3717
3718
3719
3720
3721
3722
3723
3724
3725
3726
3727
3728
3729
3730
3731
3732
3733
3734
3735
3736
3737
3738
3739
3740
3741
3742
3743
3744
3745
3746
3747
3748
3749
3750
3751
3752
3753
3754
3755
3756
3757
3758
3759
3760
3761
3762
3763
3764
3765
3766
3767
3768
3769
3770
3771
3772
3773
3774
3775
3776
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786
3787
3788
3789
3790
3791
3792
3793
3794
3795
3796
3797
3798
3799
3800
3801
3802
3803
3804
3805
3806
3807
3808
3809
3810
3811
3812
3813
3814
3815
3816
3817
3818
3819
3820
3821
3822
3823
3824
3825
3826
3827
3828
3829
3830
3831
3832
3833
3834
3835
3836
3837
3838
3839
3840
3841
3842
3843
3844
3845
3846
3847
3848
3849
3850
3851
3852
3853
3854
3855
3856
3857
3858
3859
3860
3861
3862
3863
3864
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875
3876
3877
3878
3879
3880
3881
3882
3883
3884
3885
3886
3887
3888
3889
3890
3891
3892
3893
3894
3895
3896
3897
3898
3899
3900
3901
3902
3903
3904
3905
3906
3907
3908
3909
3910
3911
3912
3913
3914
3915
3916
3917
3918
3919
3920
3921
3922
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932
3933
3934
3935
3936
3937
3938
3939
3940
3941
3942
3943
3944
3945
3946
3947
3948
3949
3950
3951
3952
3953
3954
3955
3956
3957
3958
3959
3960
3961
3962
3963
3964
3965
3966
3967
3968
3969
3970
3971
3972
3973
3974
3975
3976
3977
3978
3979
3980
3981
3982
3983
3984
3985
3986
<?xml version="1.0"?>
<doc>
<assembly>
<name>Microsoft.ML.FastTree</name>
</assembly>
<members>
<member name="T:Microsoft.ML.Trainers.FastTree.BinFinder">
<summary>
A class that bins vectors of doubles into a specified number of equal mass bins.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BinFinder.FindDistinctCounts(Microsoft.ML.Data.VBuffer{System.Double}@,System.Double[],System.Double[],System.Int32[])">
<summary>
Finds the distinct values in the <paramref name="values"/>. You must have
pre-allocated <paramref name="distinctValues"/> and <paramref name="counts"/> yourself.
The scheme is destructive, because it modifies the arrays within <paramref name="values"/>.
</summary>
<param name="values">The values we are binning</param>
<param name="valueBuffer">A buffer space to work over the values, so the original
values aren't modified.</param>
<param name="distinctValues">This working array will be filled with a sorted list of the
distinct values detected within <paramref name="values"/></param>
<param name="counts">This working array will be filled with a sorted list of the distinct
values detected within <paramref name="values"/></param>
<returns>The logical length of both <paramref name="distinctValues"/> and
<paramref name="counts"/></returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BinFinder.FindBinsFromDistinctCounts(System.Double[],System.Int32[],System.Int32,System.Int32,System.Double[]@,System.Int32@)">
<summary>
Finds the bins.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BinFinder.IsTrivial(System.Int32[],System.Int32,System.Int32)">
<summary>
Check to see if we can "trivialize" this feature, because it would
be impossible to split with the indicated minimum examples per leaf.
</summary>
<param name="distinctCounts">The counts of each distinct bin value</param>
<param name="numDistinct">The logical length of <paramref name="distinctCounts"/></param>
<param name="minPerLeaf">The minimum examples per leaf we are filtering on</param>
<returns>Whether this feature is trivial, that is, it would be impossible to split on it</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BinFinder.FindBins(Microsoft.ML.Data.VBuffer{System.Double}@,System.Int32,System.Int32,System.Double[]@)">
<summary>
Finds the bins.
</summary>
<param name="values">The values we are binning</param>
<param name="maxBins">The maximum number of bins to find</param>
<param name="minPerLeaf">The minimum number of documents per leaf</param>
<param name="binUpperBounds">The calculated upper bound of each bin</param>
<returns>Whether finding the bins is successful. If there were NaN values in <paramref name="values"/>,
this will return false and the output arrays will be <c>null</c>. Otherwise it will return true.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IniFileParserInterface.GetFeatureMap">
<summary>
Gets the map between feature names and indices
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IniFileParserInterface.GetFeatureEvaluators">
<summary>
Gets the list of FeatureEvaluators
</summary>
<returns></returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IniFileParserInterface.#ctor(System.IntPtr)">
<summary>
Creates an InputExtractor wrapper for a given unmanaged pointer
</summary>
<param name="pInputExtractor"></param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.IniFileParserInterface.FeatureMap">
<summary>
Wraps the functions of InputExtractor that map between raw feature names and indices
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.IniFileParserInterface.FeatureEvaluator">
<summary>
Wraps the Evaluate method of InputExtractor (and associated methods)
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BoostingFastTreeTrainerBase`3.BsrMaxTreeOutput">
<summary>
Retrieves max tree output if best regression step option is active or returns negative value otherwise.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dataset">
<summary>
A dataset of features.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.#ctor(Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton,Microsoft.ML.Trainers.FastTree.FeatureFlockBase[])">
<summary>
Initializes a new instance of the <see cref="T:Microsoft.ML.Trainers.FastTree.Dataset"/> class.
</summary>
<param name="datasetSkeleton">The dataset skeleton corresponding to the features</param>
<param name="flocks">An array of feature flocks</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.MapFeatureToFlockAndSubFeature(System.Int32,System.Int32@,System.Int32@)">
<summary>
Maps a global feature index, to the index of the particular flock, as well as the
index of the subfeature within that flock.
</summary>
<param name="feature">The index of the feature at the dataset level</param>
<param name="flock">The index of the flock containing this feature</param>
<param name="subfeature">The index of the feature within the flock</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.FlockToFirstFeature(System.Int32)">
<summary>
Given a flock index, returns the index of the first feature in this flock.
</summary>
<param name="flock">Index of the flock</param>
<returns>The index of the first feature that belongs to this flock</returns>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.Skeleton">
<summary>
Gets the dataset skeleton.
</summary>
<value>The skeleton.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.Ratings">
<summary>
Gets the labels.
</summary>
<value>The labels.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.Boundaries">
<summary>
Gets the boundaries.
</summary>
<value>The boundaries.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.QueryIds">
<summary>
Gets the query ids.
</summary>
<value>The query ids.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.DocIds">
<summary>
Gets the doc ids.
</summary>
<value>The doc ids.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.MaxDcg">
<summary>
Gets the max DCG.
</summary>
<value>The max DCG.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.MaxDocsPerQuery">
<summary>
Gets the max number of docs per any query.
</summary>
<value>The max number of docs per any query.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.NumDocs">
<summary>
Gets the number of docs in the entire dataset.
</summary>
<value>The number of docs in the entire dataset.</value>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.NumDocsInQuery(System.Int32)">
<summary>
Nums the docs in a given query.
</summary>
<param name="queryIndex">Index of the query.</param>
<returns>the number of docs in the query</returns>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.NumQueries">
<summary>
Gets the number of queries in the dataset.
</summary>
<value>The number of queries in the dataset.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.DocToQuery">
<summary>
Returns the document to query
</summary>
<returns>The associated document</returns>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.SampleWeights">
<summary>
Returns the query weights object in underlying dataset skeleton
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.Flocks">
<summary>
Gets the array of features.
</summary>
<value>The array of features.</value>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.NumFlocks">
<summary>
The number of feature flocks.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.NumFeatures">
<summary>
The number of features.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.Split(System.Double[],System.Int32,System.Boolean)">
<summary>
Split a dataset by queries into disjoint parts
</summary>
<param name="fraction">an array of the fractional size of each part, must sum to 1.0</param>
<param name="randomSeed">a seed that deterministically defines the split</param>
<param name="destroyThisDataset">do you want the features of this dataset to be destroyed on-the-fly as the new datasets are created</param>
<returns></returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.GetSubDataset(System.Int32[],System.Boolean)">
<summary>
Creates a new Dataset, which includes a subset of the docs in this Dataset.
</summary>
<param name="docIndices">A sorted array of doc indices</param>
<param name="destroyThisDataset">Determines if this Dataset is deleted on the fly as the
new one is created (this reduces peak memory)</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.GetFeatureBinRowwiseIndexer(System.Boolean[])">
<summary>
Returns a row-wise forward indexer across multiple features in the dataset.
</summary>
<param name="activeFeatures">Boolean array indicating active features, or null to
indicate all features should be used</param>
<returns>Row forward indexer</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton">
<summary>
A class that contains all of the feature-independent data of the dataset
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.#ctor(System.Int16[],System.Int32[],System.UInt64[],System.UInt64[],System.Double[])">
<summary>
Initializes a new instance of the <see cref="T:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton"/> class.
</summary>
<param name="ratings"></param>
<param name="boundaries">The boundaries.</param>
<param name="queryIds">The query ids.</param>
<param name="docIds">The doc ids.</param>
<param name="actualTargets"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.#ctor(System.Int16[],System.Int32[],System.UInt64[],System.UInt64[],System.Double[][],System.Double[])">
<summary>
Initializes a new instance of the <see cref="T:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton"/> class.
</summary>
<param name="ratings">The ratings.</param>
<param name="boundaries">The boundaries.</param>
<param name="queryIds">The query ids.</param>
<param name="docIds">The doc ids.</param>
<param name="maxDcg">The vector of maxDCG.</param>
<param name="actualTargets"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.CheckConsistency">
<summary>
Checks the consistency of the DatasetSkeleton
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.Concat(Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton[])">
<summary>
Takes an array of DatasetSkeleton objects and concatenates them into one big DatasetSkeleton
</summary>
<param name="parts">An array of DatasetSkeletons</param>
<returns>A concatenated DatasetSkeleton</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.MaxDcgRange(System.Int16[],System.Int32[],System.Int32)">
<summary>
Calculates natural-based max DCG at all truncations from 1 to trunc
</summary>
<param name="labels">vector of labels</param>
<param name="boundaries">vector of query boundaries</param>
<param name="trunc">max truncation</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.SetConcatenatedAuxiliaryData(Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton[],Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton)">
<summary>
Given the auxiliary data in a bunch of parts, set the concatenated dataset appropriately.
</summary>
<param name="parts">The individual parts of the dataset</param>
<param name="concat">The concatenated version of this dataset</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.SetData(System.String,System.Array,System.Boolean)">
<summary>
Sets some named query or document level auxiliary data.
</summary>
<param name="name">The name of the parameter</param>
<param name="array"></param>
<param name="queryLevel"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton.GetData``1(System.String)">
<summary>
Retrieves some auxiliary data previously set to this skeleton.
</summary>
<typeparam name="T">The type of the array, which should match the type passed in</typeparam>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dataset.RowForwardIndexer">
<summary>
Structure allowing forward indexing by row, across multiple features in the dataset.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Dataset.RowForwardIndexer.Row.Item(System.Int32)">
<summary>
Indexes the value of a feature for this row.
</summary>
<param name="featureIndex">The feature index</param>
<returns>The binned valued of a feature for this row</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dataset.RowForwardIndexer.#ctor(Microsoft.ML.Trainers.FastTree.Dataset,System.Boolean[])">
<summary>
Constructor.
</summary>
<param name="dataset">The dataset to create the indexer over</param>
<param name="active">Either null to indicate all columns should be active, or
a boolean array of length equal to the number of features that should be active</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.DatasetUtils">
<summary>
Loads training/validation/test sets from file
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DatasetUtils.CreateFeatureFromQueryId(Microsoft.ML.Trainers.FastTree.Dataset.DatasetSkeleton)">
<summary>
Attempts to create a feature from a ulong array. The intent
is that this will handle query ID.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.DenseIntArray">
<summary>
Abstract class implementing some common functions of the dense int array types.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.DenseIntArray.Item(System.Int32)">
<summary>
Gets or sets the value at this index.
Value must be in legal range 0...((2^<see cref="P:Microsoft.ML.Trainers.FastTree.IntArray.BitsPerItem"/>)-1).
</summary>
<param name="index">Index of value to get or set</param>
<returns>The value at this index</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DenseIntArray.Clone(System.Int32[])">
<summary>
Clone an IntArray containing only the items indexed by <paramref name="itemIndices"/>
</summary>
<param name="itemIndices"> item indices will be contained in the cloned IntArray </param>
<returns> The cloned IntArray </returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dense0BitIntArray">
<summary>
A "null" feature representing only zeros.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense0BitIntArray.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense0BitIntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dense10BitIntArray">
<summary>
A class to represent features using 10 bits.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense10BitIntArray.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense10BitIntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dense8BitIntArray">
<summary>
A class to represent features using 8 bits
</summary>
<remarks>Represents values -1...(2^s-2)
0-bit array only represents the value -1</remarks>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense8BitIntArray.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense8BitIntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dense4BitIntArray">
<summary>
A class to represent features using 4 bits.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.Dense4BitIntArray._data">
<summary>
For a given byte, the high 4 bits is the first value, the low 4 bits is the next value.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense4BitIntArray.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense4BitIntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dense16BitIntArray">
<summary>
A class to represent features using 16 bits.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense16BitIntArray.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense16BitIntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Dense32BitIntArray">
<summary>
A class to represent features using 32 bits.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense32BitIntArray.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Dense32BitIntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Feature">
<summary>
Represents a binned feature
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Feature.FeatureType">
<summary>
The type of the feature. This is serialized as part of the Bing extraction BIN file,
so it should remain binary compatible from version to version.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Feature.Length">
<summary>
Returns the number of entires (documents) in the feature
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Feature.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Feature.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.Feature.Bins">
<summary>
Gets the compactIntArray of bin values.
</summary>
<value>The bin values.</value>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TsvFeature.#ctor(Microsoft.ML.Trainers.FastTree.IntArray,System.UInt32[],System.String)">
<summary>
Initializes a new instance of the <see cref="T:Microsoft.ML.Trainers.FastTree.Feature"/> class.
</summary>
<param name="bins">The bins.</param>
<param name="valueMap"></param>
<param name="name">The name.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TsvFeature.#ctor(System.String,System.Int32)">
<summary>
Constructs an empty (all zero) feature
</summary>
<param name="name"></param>
<param name="length"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TsvFeature.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TsvFeature.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TsvFeature.ValueMap">
<summary>
Gets the value that represents each bin
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TsvFeature.Clone(System.Int32[])">
<summary>
Clone a TSVFeature containing only the items indexed by <paramref name="itemIndices"/>
</summary>
<param name="itemIndices"> item indices will be contained in the cloned TSVFeature </param>
<returns> The cloned TSVFeature </returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TsvFeature.Concat(Microsoft.ML.Trainers.FastTree.TsvFeature[])">
<summary>
Concatenates an array of features into one long feature
</summary>
<param name="parts">An array of features</param>
<returns>A concatenated feature</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.PerBinStats">
<summary>
Holds statistics per bin value for a feature. These are yielded by <see cref="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.GetBinStats(System.Int32)"/>
to indicate after a <see cref="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.Sumup(System.Int32,System.Boolean[],System.Int32,System.Double,System.Double,System.Double[],System.Double[],System.Int32[])"/> call over a subset of the dataset. These statistics
are then used in <see cref="T:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner"/> to find splitting on which bin will yield the
best least squares solution
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.PerBinStats.SumTargets">
<summary>Sum of all target values in a partition for the bin.</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.PerBinStats.SumWeights">
<summary>Sum of all target weights in a partition. May be 0 if we are not doing weighted training.</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.PerBinStats.Count">
<summary>Count of the documents in this partition for the bin.</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.SufficientStatsBase">
<summary>
These objects are stateful, reusable objects that enable the collection of sufficient
stats per feature flock, per node or leaf of a tree, to enable it to find the "best"
splits.
Each instance of this corresponds to a single flock, but multiple of these will be created
per flock. Note that feature indices, whenever present, refer to the feature within the
particular flock the same as they do with <see cref="T:Microsoft.ML.Trainers.FastTree.FeatureFlockBase"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.IsSplittable">
<summary>
An array as large as there are count of features in the corresponding flock. Used by
<see cref="T:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner"/> to indicate whether a particular
feature has been judged to be potentially splittable or not.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.Sumup(System.Int32,System.Boolean[],System.Int32,System.Double,System.Double,System.Double[],System.Double[],System.Int32[])">
<summary>
Performs the accumulation of sufficient statistics for active features within a flock.
</summary>
<param name="featureOffset">Offset into <paramref name="active"/> where we should start querying active stats</param>
<param name="active">The indicator array of whether features are active or not, logically starting for
this flock at <paramref name="featureOffset"/>, where after this </param>
<param name="numDocsInLeaf">Minimum documents total in this leaf</param>
<param name="sumTargets">The sum of the targets for this leaf</param>
<param name="sumWeights">The sum of the weights for this leaf</param>
<param name="outputs">The target values, indexed by <paramref name="numDocsInLeaf"/></param>
<param name="weights"></param>
<param name="docIndices">The first <paramref name="numDocsInLeaf"/> entries indicate the row indices
in this leaf, and these row indices are used to </param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.SumupCore(System.Int32,System.Boolean[],System.Int32,System.Double,System.Double,System.Double[],System.Double[],System.Int32[])">
<summary>
The core implementation called from <see cref="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.Sumup(System.Int32,System.Boolean[],System.Int32,System.Double,System.Double,System.Double[],System.Double[],System.Int32[])"/>.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.Subtract(Microsoft.ML.Trainers.FastTree.SufficientStatsBase)">
<summary>
Subtracts one sufficient statistics from another. Note that this other
sufficient statistics object must be over the same feature flock in order
to be meaningful, as well as have undergone <see cref="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.Sumup(System.Int32,System.Boolean[],System.Int32,System.Double,System.Double,System.Double[],System.Double[],System.Int32[])"/> under
the same set of active features.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.SizeInBytes">
<summary>
An approximation of the size in bytes used by this structure. Used for tracking
and memory size estimation purposes.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase.CopyFeatureHistogram(System.Int32,Microsoft.ML.Trainers.FastTree.PerBinStats[]@)">
<summary>
Get a fullcopy of histogram for one sub feature.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.SufficientStatsBase`1">
<summary>
Type specific implementation of sufficient stats.
</summary>
<typeparam name="TSuffStats">The type of sufficient stats that we will be able to do
"peer" operations against, like subtract. This will always be the derived class itself.</typeparam>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SufficientStatsBase`1.SubtractCore(`0)">
<summary>
Actual core implementation of subtraction. By the time this is called, the
base class has ensured that the feature flock of this is the same as the
feature flock in the other.
</summary>
<param name="other">The sufficient statistics we are subtracting</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FeatureFlockBase">
<summary>
A feature flock is a collection of features, grouped together because storing the
features and performing the key operations on them in a collection can be done
more efficiently than if they were stored as separate features.
Since this is a collection of features, feature specific quantities and methods
will have a feature index parameter. Note that this index is always, for every
flock, from 0 up to but not including <see cref="F:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.Count"/>. Now,
in the larger context of a <see cref="T:Microsoft.ML.Trainers.FastTree.Dataset"/> holding many flocks, the
individual features might have some sort of "dataset-wide" index, but this is
considered the business of the dataset, not the flocks themselves. See
<see cref="M:Microsoft.ML.Trainers.FastTree.Dataset.MapFeatureToFlockAndSubFeature(System.Int32,System.Int32@,System.Int32@)"/> to see some details of this
dataset-wide versus flock-wide feature index.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.Count">
<summary>
The number of features contained within this flock.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.Examples">
<summary>
The number of training examples represented by the features within this flock.
This should be the same for all flocks within a dataset.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.Categorical">
<summary>
Flock is a categorical feature.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.SizeInBytes">
<summary>
An approximation of the size in bytes used by this structure. Used for estimating
memory usage of the tree learner.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.CreateSufficientStats(System.Boolean)">
<summary>
A reusable structure for tracking the sufficient statistics for tree learning
of the features in this flock.
</summary>
<param name="hasWeights">Whether structures related to tracking
example weights should be allocated</param>
<returns>A sufficient statistics object</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.GetIndexer(System.Int32)">
<summary>
Returns a forward indexer for a single feature. This has a default implementation that
relies on <see cref="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.GetFlockIndexer"/>, but base classes may find it beneficial from
a performance perspective to provide their own implementation.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.GetFlockIndexer">
<summary>
Returns a forward indexer for all features within the flock.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.BinCount(System.Int32)">
<summary>
Given a feature index, return the number of distinct bins there are for that feature.
This will be the length of <see cref="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.BinUpperBounds(System.Int32)"/> for this feature. This is also
the upper exclusive limit on the binned value seen for this feature.
</summary>
<param name="featureIndex">The index of the feature within the flock</param>
<returns>The number of distinct bins for that feature within the flock</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.Trust(System.Int32)">
<summary>
The multiplier on the gain for any particular feature. This can be used to make
features appear more or less attractive. The default value should be considered
to be 1.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.BinUpperBounds(System.Int32)">
<summary>
An array of increasing values, forming the boundaries of all the binned values.
</summary>
<param name="featureIndex"></param>
<returns>The bin upper bounds for a feature. This array will have the same
length as <see cref="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.BinCount(System.Int32)"/>.</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.GenericIntArrayForwardIndexer">
<summary>
If you need to implement <see cref="M:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.GetIndexer(System.Int32)"/> you can use
<see cref="T:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.GenericIntArrayForwardIndexer"/>. This will be slower than a
specialized implementation but is at least a useful shim.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.FlockForwardIndexerBase">
<summary>
Interface for objects that can index into a flock, but only with a nondecreasing sequence of row
indices from access to access. It is fine for feature indices to appear in any order.
A feature group analogy to <see cref="T:Microsoft.ML.Trainers.FastTree.IIntArrayForwardIndexer"/> but for feature flocks instead of
<see cref="T:Microsoft.ML.Trainers.FastTree.IntArray"/> instances.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.FlockForwardIndexerBase.Flock">
<summary>
The flock over which this feature flock was built.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.FeatureFlockBase.FlockForwardIndexerBase.Item(System.Int32,System.Int32)">
<summary>
Gets the element at the given position.
</summary>
<param name="featureIndex">The index of the feature within the flock</param>
<param name="rowIndex">Index of the row to get, should be non-decreasing from any previous
access on this indexer</param>
<returns>The value at the index</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.SinglePartitionedIntArrayFlockBase`1">
<summary>
A base class for a feature flock that wraps a single <see cref="T:Microsoft.ML.Trainers.FastTree.IntArray"/> that contains multiple
feature values using a concatentation of the non-zero ranges of each feature, and also in some way
that doing a <see cref="M:Microsoft.ML.Trainers.FastTree.IntArray.Sumup(Microsoft.ML.Trainers.FastTree.SumupInputData,Microsoft.ML.Trainers.FastTree.FeatureHistogram)"/> will accumulate sufficient statistics correctly for all
except the first (zero) bin.
</summary>
<typeparam name="TIntArray">The type of <see>IntArray</see> this implementation wraps</typeparam>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SinglePartitionedIntArrayFlockBase`1.#ctor(`0,System.Int32[],System.Double[][],System.Boolean)">
<summary>
Constructor for the <see cref="T:Microsoft.ML.Trainers.FastTree.SinglePartitionedIntArrayFlockBase`1"/>.
</summary>
<param name="bins">The binned version of the features, stored collectively in one
<see cref="T:Microsoft.ML.Trainers.FastTree.IntArray"/>, where 0 indicates that all features are in the "cold" bin
of 0, and a non-zero value indicates one of the features is "hot," where which
feature is hot and what value it has is indicated by <paramref name="hotFeatureStarts"/>.
The zero value is "shared" among all features, effectively, and the non-zero values
are the result of a shifted concatenation of the range of the non-zero values, for
each feature incorporated in the flock. See the example for more concrete information.
</param>
<param name="hotFeatureStarts">The ranges of values of <paramref name="bins"/>
where features start and stop. This is a non-decreasing array of integers. For
feature <c>f</c>, the elements at <c>f</c> and <c>f+1</c> indicate the minimum
and limit of values in <paramref name="bins"/> that indicate that the corresponding
feature is "hot" starting at a value of 1.</param>
<param name="binUpperBounds">The bin upper bounds structure</param>
<param name="categorical"></param>
<example>
Imagine we have a six row dataset, with two features, which if stored separately in,
say, a <see cref="T:Microsoft.ML.Trainers.FastTree.SingletonFeatureFlock"/>, would have bin values as follows.
<c>f0 = { 0, 1, 0, 0, 2, 0}</c>
<c>f1 = { 0, 0, 1, 0, 0, 1}</c>
These two are a candidate for a <see cref="T:Microsoft.ML.Trainers.FastTree.OneHotFeatureFlock"/>, because they never both
have a non-zero bin value for any row. Then, in order to represent this in this feature,
we would pass in this value for the <paramref name="bins"/>:
<c><paramref name="bins"/> = { 0, 1, 3, 0, 2, 3 }</c>
and this value for <paramref name="hotFeatureStarts"/>:
<c><paramref name="hotFeatureStarts"/> = { 1, 3, 4 }</c>
Note that the range of <paramref name="bins"/> is, aside from the zero, the concatenation
of the non-zero range of all constituent input features, and where the reconstruction of
what feature is which can be reconstructed from <paramref name="hotFeatureStarts"/>, which
for each feature specifies the range in <paramref name="bins"/> corresponding to the
"logical" bin value for that feature starting from 1.
Note that it would also have been legal for <paramref name="hotFeatureStarts"/> to be
larger than the actual observed range, for example, it could have been:
<c><paramref name="hotFeatureStarts"/> = { 1, 5, 8}</c>
or something. This could happen if binning happened over a different dataset from the data
being represented right now, for example, but this is a more complex case.
The <paramref name="binUpperBounds"/> would contain the upper bounds for both of these features,
which would be arrays large enough so that the maximum value of the logical bin for each feature
in the flock could index it. (So in this example, the first bin upper bound would be at least
length 3, and the second at least length 2.)
The <paramref name="categorical"/> indicates if the flock is a categorical feature.
</example>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.SinglePartitionedIntArrayFlockBase`1.SufficientStats">
<summary>
Stores the sufficient statistics for all features within this flock using a single
histogram, where the range of what accumulated sufficient statistics are relevant
to what feature can be inferred by <see cref="F:Microsoft.ML.Trainers.FastTree.SinglePartitionedIntArrayFlockBase`1.HotFeatureStarts"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.SinglePartitionedIntArrayFlockBase`1.SufficientStats.Hist">
<summary>
Stores the sufficient statistics for all features within this flock using a single
histogram, where the range of what accumulated sufficient statistics correspond to
what actual logical feature depends on <see cref="F:Microsoft.ML.Trainers.FastTree.SinglePartitionedIntArrayFlockBase`1.HotFeatureStarts"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FeatureHistogram">
<summary>
Class to represent statistics of the feature used by LeastSquaresRegressionTreeLearner
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureHistogram.#ctor(Microsoft.ML.Trainers.FastTree.IntArray,System.Int32,System.Boolean)">
<summary>
Make a new FeatureHistogram
</summary>
<param name="bins">The bins we will be calculating sumups over</param>
<param name="numBins">The number of bins, should be at least as large as the number of bins</param>
<param name="useWeights">Allocates weights array when true</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureHistogram.EstimateMemoryUsedForFeatureHistogram(System.Int32,System.Boolean)">
<summary>
This function returns the estimated memory used for a FeatureHistogram object according to given
number of bins.
</summary>
<param name="numBins">number of bins</param>
<param name="hasWeights">weights array is counted when true</param>
<returns>estimated size of memory used for a feature histogram object</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeatureHistogram.Subtract(Microsoft.ML.Trainers.FastTree.FeatureHistogram)">
<summary>
Subtract from myself the counts of the child histogram
</summary>
<param name="child">Another histogram to subtract</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.IntArray">
<summary>
An object representing an array of integers
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.IntArray.Length">
<summary>
The virtual length of the array
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IntArray.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary representation is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IntArray.NumBitsNeeded(System.Int32)">
<summary>
Number of bytes needed to store this number of values
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IntArray.New(System.Byte[],System.Int32@)">
<summary>
Creates a new int array given a byte representation
</summary>
<param name="buffer">the byte array representation of the dense array. The buffer can be larger than needed since the caller might be re-using buffers from a pool</param>
<param name="position">the position in the byte array</param>
<returns>the int array object</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IntArray.Clone(Microsoft.ML.Trainers.FastTree.IntArrayBits,Microsoft.ML.Trainers.FastTree.IntArrayType)">
<summary>
Clones the contents of this IntArray into an new IntArray
</summary>
<param name="bitsPerItem">The number of bits per item in the created IntArray</param>
<param name="type">The type of the new IntArray</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IntArray.Clone(System.Int32[])">
<summary>
Clone an IntArray containing only the items indexed by <paramref name="itemIndices"/>
</summary>
<param name="itemIndices"> item indices will be contained in the cloned IntArray </param>
<returns> The cloned IntArray </returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IntArray.GetIndexer">
<summary>
Gets an indexer into the array
</summary>
<returns>An indexer into the array</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IntArray.Compress(System.UInt32[])">
<summary>
Finds the most space efficient representation of the feature
(with slight slack cut for dense features). The behavior of
this method depends upon the static value <see cref="F:Microsoft.ML.Trainers.FastTree.IntArray.CompatibilityLevel"/>.
</summary>
<param name="workarray">Should be non-null if you want it to
consider segment arrays.</param>
<returns>Returns a more space efficient version of the array,
or the item itself if that is impossible, somehow.</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.IIntArrayForwardIndexer">
<summary>
Interface for objects that can index into an <see cref="T:Microsoft.ML.Trainers.FastTree.IntArray"/>, but only with a non-decreasing sequence of indices.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.IIntArrayForwardIndexer.Item(System.Int32)">
<summary>
Gets the element at the given index.
</summary>
<param name="index">Index to get</param>
<returns>The value at the index</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.NHotFeatureFlock">
<summary>
This is a feature flock that misuses a property of <see cref="M:Microsoft.ML.Trainers.FastTree.DeltaSparseIntArray.Sumup(Microsoft.ML.Trainers.FastTree.SumupInputData,Microsoft.ML.Trainers.FastTree.FeatureHistogram)"/>
that it works just fine with zero deltas, to "overload" rows into having their targets, thus
allowing a single sumup to accumulate multiple "features" on a single row.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.OneHotFeatureFlock">
<summary>
A feature flock for a set of features where per example at most one of the features has a
non-zero bin.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.OneHotFeatureFlock.Indexer.#ctor(Microsoft.ML.Trainers.FastTree.IIntArrayForwardIndexer,System.Int32,System.Int32)">
<summary>
Instantiates an indexer that translates from the "concatenated" bin space across all features,
into the original logical space for each individual feature.
</summary>
<param name="indexer">The indexer into the "shared" <see cref="T:Microsoft.ML.Trainers.FastTree.IntArray"/>, that we
are translating into the original logical space for this feature, where values in the
range of [<paramref name="min"/>,<paramref name="lim"/>) will map from 1 onwards, and all
other values will map to 0</param>
<param name="min">The minimum value from the indexer that will map to 1</param>
<param name="lim">The exclusive upper bound on values from the indexer</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DeltaRepeatIntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DeltaRepeatIntArray.Clone(System.Int32[])">
<summary>
Clone an IntArray containing only the items indexed by <paramref name="itemIndices"/>
</summary>
<param name="itemIndices"> item indices will be contained in the cloned IntArray </param>
<returns> The cloned IntArray </returns>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.SegmentIntArray.TransitionCost">
<summary>
The cost of a transition between segments in bits.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SegmentIntArray.StatsOfBestEncoding(System.UInt32[],System.Int32,System.Boolean,System.Int64@,System.Int32@,System.UInt32@)">
<summary>
Finds the bits necessary for the optimal variable bit encoding of this
array. If we are also finding the actual optimal path, it can only work
This is a considerably less efficienct managed analogue to the
C_SegmentFindOptimalPath and C_SegmentFindOptimalCost functions.
It is used by the class only when not using the unmanaged library.
</summary>
<param name="ivalues">The values for which we should find the optimal cost. If
findPath is active, the most significant 5 bits will be used to store the bitness
with which this path should be chosen.</param>
<param name="bitsForMaxItem">This should be the maximum number of bits necessary
to encode the largest item in that array, or a higher value. Owing to the nature
of the values as 32 bit quantities this value should be in the range [0,32], or
21 if we are finding the
cannot exceed 31.</param>
<param name="findPath">Whether we should find the best path, by also storing the
optimal path in the most 5 significant digits.</param>
<param name="bits">The number of bits necessary for the optimal encoding.</param>
<param name="transitions">The number of transitions necessary in the
optimal encoding (only if findPath is true).</param>
<param name="max">The maximum element in the ivalues array.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.SegmentIntArray.Clone(System.Int32[])">
<summary>
Clone an IntArray containing only the items indexed by <paramref name="itemIndices"/>
</summary>
<param name="itemIndices"> item indices will be contained in the cloned IntArray </param>
<returns> The cloned IntArray </returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.SingletonFeatureFlock">
<summary>
The singleton feature flock is the simplest possible sort of flock, that is, a flock
over one feature.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.DeltaSparseIntArray">
<summary>
This <see cref="T:Microsoft.ML.Trainers.FastTree.IntArray"/> implementation represents a sequence of values using parallel
arrays of both values, as well as deltas indicating the number of values to the next
explicit value. Values "between" these deltas are implicitly zero.
Note that it is possible to misuse the deltas by making some of them themselves 0, allowing
us to represent multiple values per row. In this case, <see cref="M:Microsoft.ML.Trainers.FastTree.IntArray.GetEnumerator"/>
and <see cref="M:Microsoft.ML.Trainers.FastTree.IntArray.GetIndexer"/> will not have sensible values, but
<see cref="M:Microsoft.ML.Trainers.FastTree.IntArray.Sumup(Microsoft.ML.Trainers.FastTree.SumupInputData,Microsoft.ML.Trainers.FastTree.FeatureHistogram)"/> will work and accumulate the same target into multiple bins.
This trick should be used with caution, and is useful for the <see cref="T:Microsoft.ML.Trainers.FastTree.NHotFeatureFlock"/>.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DeltaSparseIntArray.#ctor(System.Int32,Microsoft.ML.Trainers.FastTree.IntArrayBits,System.Collections.Generic.IEnumerable{System.Collections.Generic.KeyValuePair{System.Int32,System.Int32}})">
<summary>
Construct a sparse int array from index, value pairs.
</summary>
<param name="length">The total length of the constructed array.</param>
<param name="bitsPerItem">The number of bits required to store the values.</param>
<param name="nonZeroValues">An ordered enumerable of (index,value) pairs.
Each index should be strictly increasing as the iterable proceeds.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DeltaSparseIntArray.Clone(System.Int32[])">
<summary>
Clone an IntArray containing only the items indexed by <paramref name="itemIndices"/>
</summary>
<param name="itemIndices"> item indices will be contained in the cloned IntArray </param>
<returns> The cloned IntArray </returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DeltaSparseIntArray.SizeInBytes">
<summary>
Returns the number of bytes written by the member ToByteArray()
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DeltaSparseIntArray.ToByteArray(System.Byte[],System.Int32@)">
<summary>
Writes a binary representation of this class to a byte buffer, at a given position.
The position is incremented to the end of the representation
</summary>
<param name="buffer">a byte array where the binary represenaion is written</param>
<param name="position">the position in the byte array</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FastTreeShared">
<summary>
FastTreeTrainerBase is generic class and can't have shared object among classes.
This class is to provide common for all classes object which we can use for lock purpose.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.TestData">
<summary>
If not null, it's a test data set passed in from training context. It will be converted to one element in
<see cref="F:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.Tests"/> by calling <see cref="M:Microsoft.ML.Trainers.FastTree.ExamplesToFastTreeBins.GetCompatibleDataset(Microsoft.ML.Data.RoleMappedData,Microsoft.ML.PredictionKind,System.Int32[],System.Boolean)"/> in <see cref="M:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.InitializeTests"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.TestSets">
<summary>
Data sets used to evaluate the prediction scores produced the trained model during the triaining process.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.Tests">
<summary>
In the training process, <see cref="F:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.TrainSet"/>, <see cref="F:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.ValidSet"/>, <see cref="F:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.TestSets"/> would be
converted into <see cref="F:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.Tests"/> for efficient model evaluation.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.SchemaShape.Column,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32)">
<summary>
Constructor to use when instantiating the classes deriving from here through the API.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.#ctor(Microsoft.ML.IHostEnvironment,`0,Microsoft.ML.SchemaShape.Column)">
<summary>
Constructor that is used when invoking the classes deriving from this, through maml.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.GetTestGraphHeader">
<summary>
A virtual method that is used to print header of test graph.
Appliations that need printing test graph are supposed to override
it to print specific test graph header.
</summary>
<returns> string representation of test graph header </returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.GetTestGraphLine">
<summary>
A virtual method that is used to print a single line of test graph.
Applications that need printing test graph are supposed to override
it to print a specific line of test graph after a new iteration is finished.
</summary>
<returns> string representation of a line of test graph </returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.ComputeTests">
<summary>
A virtual method that is used to compute test results after each iteration is finished.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTrainerBase`3.MakeGradientWrapper(Microsoft.ML.IChannel)">
<summary>
Creates weights wrapping (possibly, trivial) for gradient target values.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.DataConverter.BinUpperBounds">
<summary>
The per-feature bin upper bounds. Implementations may differ on when all of the items
in this array are initialized to non-null values but it must happen at least no later
than immediately after we return from <see cref="M:Microsoft.ML.Trainers.FastTree.DataConverter.GetDataset"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.DataConverter.FeatureMap">
<summary>
In the event that any features are filtered, this will contain the feature map, where
the indices are the indices of features within the dataset, and the tree as we are
learning, and the values are the indices of the features within the original input
data. This array is used to "rehydrate" the tree once we finish training, so that the
feature indices are once again over the full set of features, as opposed to the subset
of features we actually trained on. This can be null in the event that no filtering
occurred.
</summary>
<seealso cref="M:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble.RemapFeatures(System.Int32[])"/>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DataConverter.CalculateBins(Microsoft.ML.Trainers.FastTree.BinFinder,Microsoft.ML.Data.VBuffer{System.Double}@,System.Int32,System.Int32,System.Double[]@)">
<summary>
Bins and input vector of feature values.
</summary>
<param name="binFinder">The instead of the bin finder to use</param>
<param name="values">The values for one particular feature value across all examples</param>
<param name="maxBins">The maximum number of bins to find</param>
<param name="minDocsPerLeaf"></param>
<param name="upperBounds">The bin upper bounds, maximum length will be <paramref name="maxBins"/></param>
<returns>Whether finding the bins was successful or not. It will be unsuccessful iff <paramref name="values"/>
has any missing values. In that event, the out parameters will be left as null.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DataConverter.CreateSingletonFlock(Microsoft.ML.IChannel,Microsoft.ML.Data.VBuffer{System.Double}@,System.Int32[],System.Double[])">
<summary>
Create a new feature flock with a given name, values and specified bin bounds.
</summary>
<param name="ch"></param>
<param name="values">The values for this feature, that will be binned.</param>
<param name="binnedValues">A working array of length equal to the length of the input feature vector</param>
<param name="binUpperBounds">The upper bounds of the binning of this feature.</param>
<returns>A derived binned derived feature vector.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DataConverter.DiskImpl.ConstructDropSlotRanges(Microsoft.ML.Data.SlotCursor,Microsoft.Data.DataView.ValueGetter{Microsoft.ML.Data.VBuffer{System.Single}},Microsoft.ML.Data.VBuffer{System.Single}@)">
<summary>
Returns a slot dropper object that has ranges of slots to be dropped,
based on an examination of the feature values.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DataConverter.MemImpl.CreateDatasetSkeleton">
<summary>
Create an artificial metadata object to pad the Dataset
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.DataConverter.ValuesList">
<summary>
A mutable list of index,value that may be kept sparse or dense.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DataConverter.ValuesList.CountIndicesGT(System.Int32,System.Double)">
<summary>
Returns the count of all positions greater than an indicated value.
</summary>
<param name="length">The limit of indices to check</param>
<param name="gtValue">The value against which the greater-than
comparison is made</param>
<returns>The count of all indices in the range of 0 to <paramref name="length"/>
exclusive whose values are greater than <paramref name="gtValue"/></returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DataConverter.ValuesList.AllIndicesGT(System.Int32,System.Double)">
<summary>
Return all indices that are greater than an indicated value.
</summary>
<param name="lim">The limit of indices to return</param>
<param name="gtValue">The value against which the greater-than
comparison is made</param>
<returns>All indices in the range of 0 to <paramref name="lim"/> exclusive
whose values are greater than <paramref name="gtValue"/>, in
increasing order</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DataConverter.ValuesList.Binned(System.Double[],System.Int32)">
<summary>
An enumerable of the row/bin pair of every non-zero bin row according to the
binning passed into this method.
</summary>
<param name="binUpperBounds">The binning to use for the enumeration</param>
<param name="length">The number of rows in this feature</param>
<returns>An enumerable that returns a pair of every row-index and binned value,
where the row indices are increasing, the binned values are positive</returns>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.DataConverter.ValuesList.ForwardIndexer.Item(System.Int32,System.Int32)">
<summary>
Access the value of a particular feature, at a particular row.
</summary>
<param name="featureIndex">A feature index, which indexes not the global feature indices,
but the index into the subset of features specified at the constructor time</param>
<param name="rowIndex">The row index to access, which must be non-decreasing, and must
indeed be actually increasing for access on the same feature (for example, if you have two features,
it is OK to access <c>[1, 5]</c>, then <c>[0, 5]</c>, but once this is done you cannot
access the same feature at the same position.</param>
<returns></returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DataConverter.ValuesList.ForwardIndexer.#ctor(Microsoft.ML.Trainers.FastTree.DataConverter.ValuesList[],System.Int32[],System.Int32[]@)">
<summary>
Initialize a forward indexer.
</summary>
<param name="values">Holds the values of the features</param>
<param name="features">The array of feature indices this will index</param>
<param name="workArray">A possibly shared working array, once used by this forward
indexer it should not be used in any previously created forward indexer</param>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.ExamplesToFastTreeBins.BinUpperBounds">
<summary> Bin boundaries </summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.Microsoft#ML#Model#ICalculateFeatureContribution#FeatureContributionCalculator">
<summary>
Used to determine the contribution of each feature to the score of an example by <see cref="T:Microsoft.ML.Transforms.FeatureContributionCalculatingTransformer"/>.
The calculation of feature contribution essentially consists in determining which splits in the tree have the most impact
on the final score and assigning the value of the impact to the features determining the split. More precisely, the contribution of a feature
is equal to the change in score produced by exploring the opposite sub-tree every time a decision node for the given feature is encountered.
Consider a simple case with a single decision tree that has a decision node for the binary feature F1. Given an example that has feature F1
equal to true, we can calculate the score it would have obtained if we chose the subtree corresponding to the feature F1 being equal to false
while keeping the other features constant. The contribution of feature F1 for the given example is the difference between the original score
and the score obtained by taking the opposite decision at the node corresponding to feature F1. This algorithm extends naturally to models with
many decision trees.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.#ctor(Microsoft.ML.IHostEnvironment,System.String,Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble,System.Int32,System.String)">
The following function is used in both FastTree and LightGBM so <see cref="T:Microsoft.ML.BestFriendAttribute"/> is required.
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.Microsoft#ML#Model#ICanSaveInSourceCode#SaveAsCode(System.IO.TextWriter,Microsoft.ML.Data.RoleMappedSchema)">
<summary>
write out a C# representation of the ensemble
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.Microsoft#ML#Model#ICanSaveInTextFormat#SaveAsText(System.IO.TextWriter,Microsoft.ML.Data.RoleMappedSchema)">
<summary>
Output the INI model to a given writer
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.Microsoft#ML#Model#ICanSaveInIniFormat#SaveAsIni(System.IO.TextWriter,Microsoft.ML.Data.RoleMappedSchema,Microsoft.ML.Calibrators.ICalibrator)">
<summary>
Output the INI model to a given writer
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.Microsoft#ML#Model#ICanGetSummaryInKeyValuePairs#GetSummaryInKeyValuePairs(Microsoft.ML.Data.RoleMappedSchema)">
<inheritdoc/>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.SaveEnsembleAsCode(System.IO.TextWriter,Microsoft.ML.Data.RoleMappedSchema)">
<summary>
returns a C# representation of the ensemble
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.SaveTreeAsCode(Microsoft.ML.Trainers.FastTree.InternalRegressionTree,System.IO.TextWriter,Microsoft.ML.Data.VBuffer{System.ReadOnlyMemory{System.Char}}@)">
<summary>
Convert a single tree to code, called recursively
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters.GetLeaf(System.Int32,Microsoft.ML.Data.VBuffer{System.Single}@,System.Collections.Generic.List{System.Int32}@)">
<summary>
Returns the leaf node in the requested tree for the given feature vector, and populates 'path' with the list of
internal nodes in the path from the root to that leaf. If 'path' is null a new list is initialized. All elements
in 'path' are cleared before filling in the current path nodes.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree">
<summary>
<see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree"/> is derived from
<see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters"/> plus a strongly-typed public attribute,
<see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree.TrainedTreeEnsemble"/>, for exposing trained model's details to users.
Its function, <see cref="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree.CreateTreeEnsembleFromInternalDataStructure"/>, is
called to create <see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree.TrainedTreeEnsemble"/> inside <see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters"/>.
Note that the major difference between <see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree"/>
and <see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree"/> is the type of
<see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree.TrainedTreeEnsemble"/>.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree.TrainedTreeEnsemble">
<summary>
An ensemble of trees exposed to users. It is a wrapper on the <see langword="internal"/>
<see cref="T:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble"/> in <see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree">
<summary>
<see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree"/> is derived from
<see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters"/> plus a strongly-typed public attribute,
<see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree.TrainedTreeEnsemble"/>, for exposing trained model's details to users.
Its function, <see cref="M:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree.CreateTreeEnsembleFromInternalDataStructure"/>, is
called to create <see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree.TrainedTreeEnsemble"/> inside <see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParameters"/>.
Note that the major difference between <see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree"/>
and <see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnRegressionTree"/> is the type of
<see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree.TrainedTreeEnsemble"/>.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TreeEnsembleModelParametersBasedOnQuantileRegressionTree.TrainedTreeEnsemble">
<summary>
An ensemble of trees exposed to users. It is a wrapper on the <see langword="internal"/>
<see cref="T:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble"/> in <see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer">
<summary>
Trains gradient boosted decision trees to the LambdaRank quasi-gradient.
</summary><remarks>
<para>
FastTree is an efficient implementation of the <a href="https://arxiv.org/abs/1505.01866">MART</a> gradient boosting algorithm.
Gradient boosting is a machine learning technique for regression problems.
It builds each regression tree in a step-wise fashion, using a predefined loss function to measure the error for each step and corrects for it in the next.
So this prediction model is actually an ensemble of weaker prediction models. In regression problems, boosting builds a series of of such trees in a step-wise fashion and then selects the optimal tree using an arbitrary differentiable loss function.
</para>
<para>
MART learns an ensemble of regression trees, which is a decision tree with scalar values in its leaves.
A decision (or regression) tree is a binary tree-like flow chart, where at each interior node one decides which of the two child nodes to continue to based on one of the feature values from the input.
At each leaf node, a value is returned. In the interior nodes, the decision is based on the test 'x <= v' where x is the value of the feature in the input sample and v is one of the possible values of this feature.
The functions that can be produced by a regression tree are all the piece-wise constant functions.
</para>
<para>
The ensemble of trees is produced by computing, in each step, a regression tree that approximates the gradient of the loss function, and adding it to the previous tree with coefficients that minimize the loss of the new tree.
The output of the ensemble produced by MART on a given instance is the sum of the tree outputs.
</para>
<list type="bullet">
<item><description>In case of a binary classification problem, the output is converted to a probability by using some form of calibration.</description></item>
<item><description>In case of a regression problem, the output is the predicted value of the function.</description></item>
<item><description>In case of a ranking problem, the instances are ordered by the output value of the ensemble.</description></item>
</list>
<para>For more information see:</para>
<list type="bullet">
<item><description><a href="https://en.wikipedia.org/wiki/Gradient_boosting#Gradient_tree_boosting">Wikipedia: Gradient boosting (Gradient tree boosting).</a></description></item>
<item><description><a href="https://projecteuclid.org/DPubS?service=UI&version=1.0&verb=Display&handle=euclid.aos/1013203451">Greedy function approximation: A gradient boosting machine.</a></description></item>
</list>
</remarks>
<summary>
Trains gradient boosted decision trees to the LambdaRank quasi-gradient.
</summary><remarks>
<para>
FastTree is an efficient implementation of the <a href="https://arxiv.org/abs/1505.01866">MART</a> gradient boosting algorithm.
Gradient boosting is a machine learning technique for regression problems.
It builds each regression tree in a step-wise fashion, using a predefined loss function to measure the error for each step and corrects for it in the next.
So this prediction model is actually an ensemble of weaker prediction models. In regression problems, boosting builds a series of of such trees in a step-wise fashion and then selects the optimal tree using an arbitrary differentiable loss function.
</para>
<para>
MART learns an ensemble of regression trees, which is a decision tree with scalar values in its leaves.
A decision (or regression) tree is a binary tree-like flow chart, where at each interior node one decides which of the two child nodes to continue to based on one of the feature values from the input.
At each leaf node, a value is returned. In the interior nodes, the decision is based on the test 'x <= v' where x is the value of the feature in the input sample and v is one of the possible values of this feature.
The functions that can be produced by a regression tree are all the piece-wise constant functions.
</para>
<para>
The ensemble of trees is produced by computing, in each step, a regression tree that approximates the gradient of the loss function, and adding it to the previous tree with coefficients that minimize the loss of the new tree.
The output of the ensemble produced by MART on a given instance is the sum of the tree outputs.
</para>
<list type="bullet">
<item><description>In case of a binary classification problem, the output is converted to a probability by using some form of calibration.</description></item>
<item><description>In case of a regression problem, the output is the predicted value of the function.</description></item>
<item><description>In case of a ranking problem, the instances are ordered by the output value of the ensemble.</description></item>
</list>
<para>For more information see:</para>
<list type="bullet">
<item><description><a href="https://en.wikipedia.org/wiki/Gradient_boosting#Gradient_tree_boosting">Wikipedia: Gradient boosting (Gradient tree boosting).</a></description></item>
<item><description><a href="https://projecteuclid.org/DPubS?service=UI&version=1.0&verb=Display&handle=euclid.aos/1013203451">Greedy function approximation: A gradient boosting machine.</a></description></item>
</list>
</remarks>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer.Options.UnbalancedSets">
<summary>
Option for using derivatives optimized for unbalanced sets.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer.LoadNameValue">
<summary>
The LoadName for the assembly containing the trainer.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Double)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer"/>
</summary>
<param name="env">The private instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="labelColumn">The name of the label column.</param>
<param name="featureColumn">The name of the feature column.</param>
<param name="weightColumn">The name for the column containing the initial weight.</param>
<param name="learningRate">The learning rate.</param>
<param name="minDatapointsInLeaves">The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data.</param>
<param name="numLeaves">The max number of leaves in each regression tree.</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer.Options)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer"/> by using the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer.Options"/> class.
</summary>
<param name="env">The instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer.Fit(Microsoft.Data.DataView.IDataView,Microsoft.Data.DataView.IDataView)">
<summary>
Trains a <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer"/> using both training and validation data, returns
a <see cref="T:Microsoft.ML.Data.BinaryPredictionTransformer`1"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer">
<summary>
Trains gradient boosted decision trees to the LambdaRank quasi-gradient.
</summary><remarks>
<para>
FastTree is an efficient implementation of the <a href="https://arxiv.org/abs/1505.01866">MART</a> gradient boosting algorithm.
Gradient boosting is a machine learning technique for regression problems.
It builds each regression tree in a step-wise fashion, using a predefined loss function to measure the error for each step and corrects for it in the next.
So this prediction model is actually an ensemble of weaker prediction models. In regression problems, boosting builds a series of of such trees in a step-wise fashion and then selects the optimal tree using an arbitrary differentiable loss function.
</para>
<para>
MART learns an ensemble of regression trees, which is a decision tree with scalar values in its leaves.
A decision (or regression) tree is a binary tree-like flow chart, where at each interior node one decides which of the two child nodes to continue to based on one of the feature values from the input.
At each leaf node, a value is returned. In the interior nodes, the decision is based on the test 'x <= v' where x is the value of the feature in the input sample and v is one of the possible values of this feature.
The functions that can be produced by a regression tree are all the piece-wise constant functions.
</para>
<para>
The ensemble of trees is produced by computing, in each step, a regression tree that approximates the gradient of the loss function, and adding it to the previous tree with coefficients that minimize the loss of the new tree.
The output of the ensemble produced by MART on a given instance is the sum of the tree outputs.
</para>
<list type="bullet">
<item><description>In case of a binary classification problem, the output is converted to a probability by using some form of calibration.</description></item>
<item><description>In case of a regression problem, the output is the predicted value of the function.</description></item>
<item><description>In case of a ranking problem, the instances are ordered by the output value of the ensemble.</description></item>
</list>
<para>For more information see:</para>
<list type="bullet">
<item><description><a href="https://en.wikipedia.org/wiki/Gradient_boosting#Gradient_tree_boosting">Wikipedia: Gradient boosting (Gradient tree boosting).</a></description></item>
<item><description><a href="https://projecteuclid.org/DPubS?service=UI&version=1.0&verb=Display&handle=euclid.aos/1013203451">Greedy function approximation: A gradient boosting machine.</a></description></item>
</list>
</remarks>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer.PredictionKind">
<summary>
The type of prediction for the trainer.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Double)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer"/>
</summary>
<param name="env">The private instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="labelColumn">The name of the label column.</param>
<param name="featureColumn">The name of the feature column.</param>
<param name="weightColumn">The name for the column containing the initial weight.</param>
<param name="learningRate">The learning rate.</param>
<param name="minDatapointsInLeaves">The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data.</param>
<param name="numLeaves">The max number of leaves in each regression tree.</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer.Options)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer"/> by using the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer.Options"/> class.
</summary>
<param name="env">The instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer.GetDatasetRegressionLabels(Microsoft.ML.Trainers.FastTree.Dataset)">
<summary>
Gets the regression labels that were stored in the dataset skeleton, or
constructs them from the ratings if absent. This returns null if the
dataset itself is null.
</summary>
<param name="set">The dataset</param>
<returns>The list of regression targets, or null if <paramref name="set"/> was null</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer.Fit(Microsoft.Data.DataView.IDataView,Microsoft.Data.DataView.IDataView)">
<summary>
Trains a <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer"/> using both training and validation data, returns
a <see cref="T:Microsoft.ML.Data.RegressionPredictionTransformer`1"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer">
<summary>
Trains gradient boosted decision trees to fit target values using a Tweedie loss function.
This learner is a generalization of Poisson, compound Poisson, and gamma regression.
</summary><remarks>
The Tweedie boosting model follows the mathematics established in <a href="https://arxiv.org/pdf/1508.06378.pdf">
Insurance Premium Prediction via Gradient Tree-Boosted Tweedie Compound Poisson Models.</a> from Yang, Quan, and Zou.
<para>For an introduction to Gradient Boosting, and more information, see:</para>
<para><a href="https://en.wikipedia.org/wiki/Gradient_boosting#Gradient_tree_boosting">Wikipedia: Gradient boosting (Gradient tree boosting)</a></para>
<para><a href="https://projecteuclid.org/DPubS?service=UI&version=1.0&verb=Display&handle=euclid.aos/1013203451">Greedy function approximation: A gradient boosting machine</a></para>
</remarks>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Double)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer"/>
</summary>
<param name="env">The private instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="labelColumn">The name of the label column.</param>
<param name="featureColumn">The name of the feature column.</param>
<param name="weightColumn">The name for the column containing the initial weight.</param>
<param name="learningRate">The learning rate.</param>
<param name="minDatapointsInLeaves">The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data.</param>
<param name="numLeaves">The max number of leaves in each regression tree.</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer.Options)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer"/> by using the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer.Options"/> class.
</summary>
<param name="env">The instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer.GetDatasetRegressionLabels(Microsoft.ML.Trainers.FastTree.Dataset)">
<summary>
Gets the regression labels that were stored in the dataset skeleton, or
constructs them from the ratings if absent. This returns null if the
dataset itself is null.
</summary>
<param name="set">The dataset</param>
<returns>The list of regression targets, or null if <paramref name="set"/> was null</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer.Fit(Microsoft.Data.DataView.IDataView,Microsoft.Data.DataView.IDataView)">
<summary>
Trains a <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer"/> using both training and validation data, returns
a <see cref="T:Microsoft.ML.Data.RegressionPredictionTransformer`1"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer">
<summary>
Trains gradient boosted decision trees to the LambdaRank quasi-gradient.
</summary><remarks>
<para>
FastTree is an efficient implementation of the <a href="https://arxiv.org/abs/1505.01866">MART</a> gradient boosting algorithm.
Gradient boosting is a machine learning technique for regression problems.
It builds each regression tree in a step-wise fashion, using a predefined loss function to measure the error for each step and corrects for it in the next.
So this prediction model is actually an ensemble of weaker prediction models. In regression problems, boosting builds a series of of such trees in a step-wise fashion and then selects the optimal tree using an arbitrary differentiable loss function.
</para>
<para>
MART learns an ensemble of regression trees, which is a decision tree with scalar values in its leaves.
A decision (or regression) tree is a binary tree-like flow chart, where at each interior node one decides which of the two child nodes to continue to based on one of the feature values from the input.
At each leaf node, a value is returned. In the interior nodes, the decision is based on the test 'x <= v' where x is the value of the feature in the input sample and v is one of the possible values of this feature.
The functions that can be produced by a regression tree are all the piece-wise constant functions.
</para>
<para>
The ensemble of trees is produced by computing, in each step, a regression tree that approximates the gradient of the loss function, and adding it to the previous tree with coefficients that minimize the loss of the new tree.
The output of the ensemble produced by MART on a given instance is the sum of the tree outputs.
</para>
<list type="bullet">
<item><description>In case of a binary classification problem, the output is converted to a probability by using some form of calibration.</description></item>
<item><description>In case of a regression problem, the output is the predicted value of the function.</description></item>
<item><description>In case of a ranking problem, the instances are ordered by the output value of the ensemble.</description></item>
</list>
<para>For more information see:</para>
<list type="bullet">
<item><description><a href="https://en.wikipedia.org/wiki/Gradient_boosting#Gradient_tree_boosting">Wikipedia: Gradient boosting (Gradient tree boosting).</a></description></item>
<item><description><a href="https://projecteuclid.org/DPubS?service=UI&version=1.0&verb=Display&handle=euclid.aos/1013203451">Greedy function approximation: A gradient boosting machine.</a></description></item>
</list>
</remarks>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.PredictionKind">
<summary>
The prediction kind for this trainer.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Double)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer"/>
</summary>
<param name="env">The private instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="labelColumn">The name of the label column.</param>
<param name="featureColumn">The name of the feature column.</param>
<param name="groupIdColumn">The name for the column containing the group ID. </param>
<param name="weightColumn">The name for the column containing the initial weight.</param>
<param name="numLeaves">The max number of leaves in each regression tree.</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="minDatapointsInLeaves">The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data.</param>
<param name="learningRate">The learning rate.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.Options)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer"/> by using the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.Options"/> class.
</summary>
<param name="env">The instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.CreateStandardTest(Microsoft.ML.Trainers.FastTree.Dataset)">
<summary>
Create standard test for dataset.
</summary>
<param name="dataset">dataset used for testing</param>
<returns>standard test for the dataset</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.CreateSpecialTrainSetTest">
<summary>
Create the special test for train set.
</summary>
<returns>test for train set</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.CreateSpecialValidSetTest">
<summary>
Create the special test for valid set.
</summary>
<returns>test for train set</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.CreateFirstTestSetTest">
<summary>
Create the test for the first test set.
</summary>
<returns>test for the first test set</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.GetTestGraphHeader">
<summary>
Get the header of test graph
</summary>
<returns>Test graph header</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.Fit(Microsoft.Data.DataView.IDataView,Microsoft.Data.DataView.IDataView)">
<summary>
Trains a <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer"/> using both training and validation data, returns
a <see cref="T:Microsoft.ML.Data.RankingPredictionTransformer`1"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.ParallelTrainer">
<summary>
Allows to choose Parallel FastTree Learning Algorithm.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.NumThreads">
<summary>
The number of threads to use.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.RngSeed">
<summary>
The seed of the random number generator.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.FeatureSelectSeed">
<summary>
The seed of the active feature selection.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.EntropyCoefficient">
<summary>
The entropy (regularization) coefficient between 0 and 1.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.HistogramPoolSize">
<summary>
The number of histograms in the pool (between 2 and numLeaves).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.DiskTranspose">
<summary>
Whether to utilize the disk or the data's native transposition facilities (where applicable) when performing the transpose.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.FeatureFlocks">
<summary>
Whether to collectivize features during dataset preparation to speed up training.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.CategoricalSplit">
<summary>
Whether to do split based on multiple categorical feature values.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.MaxCategoricalGroupsPerNode">
<summary>
Maximum categorical split groups to consider when splitting on a categorical feature. Split groups are a collection of split points. This is used to reduce overfitting when there many categorical features.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.MaxCategoricalSplitPoints">
<summary>
Maximum categorical split points to consider when splitting on a categorical feature.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.MinDocsPercentageForCategoricalSplit">
<summary>
Minimum categorical docs percentage in a bin to consider for a split.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.MinDocsForCategoricalSplit">
<summary>
Minimum categorical doc count in a bin to consider for a split.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.Bias">
<summary>
Bias for calculating gradient for each feature bin for a categorical feature.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.Bundling">
<summary>
Bundle low population bins. Bundle.None(0): no bundling, Bundle.AggregateLowPopulation(1): Bundle low population, Bundle.Adjacent(2): Neighbor low population bundle.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.MaxBins">
<summary>
Maximum number of distinct values (bins) per feature.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.SparsifyThreshold">
<summary>
Sparsity level needed to use sparse feature representation.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.FeatureFirstUsePenalty">
<summary>
The feature first use penalty coefficient.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.FeatureReusePenalty">
<summary>
The feature re-use penalty (regularization) coefficient.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.GainConfidenceLevel">
Only consider a gain if its likelihood versus a random choice gain is above a certain value.
So 0.95 would mean restricting to gains that have less than a 0.05 change of being generated randomly through choice of a random split.
<summary>
Tree fitting gain confidence requirement (should be in the range [0,1) ).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.SoftmaxTemperature">
<summary>
The temperature of the randomized softmax distribution for choosing the feature.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.ExecutionTimes">
<summary>
Print execution time breakdown to stdout.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.NumLeaves">
<summary>
The max number of leaves in each regression tree.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.MinDocumentsInLeafs">
<summary>
The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.NumTrees">
<summary>
Total number of decision trees to create in the ensemble.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.FeatureFraction">
<summary>
The fraction of features (chosen randomly) to use on each iteration.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.BaggingSize">
<summary>
Number of trees in each bag (0 for disabling bagging).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.BaggingTrainFraction">
<summary>
Percentage of training examples used in each bag.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.SplitFraction">
<summary>
The fraction of features (chosen randomly) to use on each split.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.Smoothing">
<summary>
Smoothing paramter for tree regularization.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.AllowEmptyTrees">
<summary>
When a root split is impossible, allow training to proceed.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.FeatureCompressionLevel">
<summary>
The level of feature compression to use.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.CompressEnsemble">
<summary>
Compress the tree Ensemble.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.MaxTreesAfterCompression">
<summary>
Maximum Number of trees after compression.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.PrintTestGraph">
<summary>
Print metrics graph for the first test set.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.PrintTrainValidGraph">
<summary>
Print Train and Validation metrics in graph.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.TreeOptions.TestFrequency">
<summary>
Calculate metric values for train/valid/test every k rounds.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.BestStepRankingRegressionTrees">
<summary>
Option for using best regression step trees.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.UseLineSearch">
<summary>
Should we use line search for a step size.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.NumPostBracketSteps">
<summary>
Number of post-bracket line search steps.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.MinStepSize">
<summary>
Minimum line search step size.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.OptimizationAlgorithm">
<summary>
Optimization algorithm to be used (GradientDescent, AcceleratedGradientDescent).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.EarlyStoppingRule">
<summary>
Early stopping rule. (Validation set (/valid) is required).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.EarlyStoppingMetrics">
<summary>
Early stopping metrics. (For regression, 1: L1, 2:L2; for ranking, 1:NDCG@1, 3:NDCG@3).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.EnablePruning">
<summary>
Enable post-training pruning to avoid overfitting. (a validation set is required).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.UseTolerantPruning">
<summary>
Use window and tolerance for pruning.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.PruningThreshold">
<summary>
The tolerance threshold for pruning.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.PruningWindowSize">
<summary>
The moving window size for pruning.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.LearningRates">
<summary>
The learning rate.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.Shrinkage">
<summary>
Shrinkage.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.DropoutRate">
<summary>
Dropout rate for tree regularization.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.GetDerivativesSampleRate">
<summary>
Sample each query 1 in k times in the GetDerivatives function.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.WriteLastEnsemble">
<summary>
Write the last ensemble instead of the one determined by early stopping.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.MaxTreeOutput">
<summary>
Upper bound on absolute value of single tree output.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.RandomStart">
<summary>
Training starts from random ordering (determined by /r1).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.FilterZeroLambdas">
<summary>
Filter zero lambdas during training.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.BaselineScoresFormula">
<summary>
Freeform defining the scores that should be used as the baseline ranker.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.BaselineAlphaRisk">
<summary>
Baseline alpha for tradeoffs of risk (0 is normal training).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BoostedTreeOptions.PositionDiscountFreeform">
<summary>
The discount freeform which specifies the per position discounts of documents in a query (uses a single variable P for position where P=0 is first position).
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FastTree">
<summary>
The Entry Point for the FastTree Binary Classifier.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer.Options)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer"/>
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.Int32,System.Double,System.Int32)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer"/>
</summary>
<param name="env">The private instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="labelColumn">The name of the label column.</param>
<param name="featureColumn">The name of the feature column.</param>
<param name="weightColumn">The name for the column containing the initial weight.</param>
<param name="numIterations">The number of iterations to use in learning the features.</param>
<param name="learningRate">The learning rate. GAMs work best with a small learning rate.</param>
<param name="maxBins">The maximum number of bins to use to approximate features</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer.Fit(Microsoft.Data.DataView.IDataView,Microsoft.Data.DataView.IDataView)">
<summary>
Trains a <see cref="T:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer"/> using both training and validation data, returns
a <see cref="T:Microsoft.ML.Data.BinaryPredictionTransformer`1"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamModelParameters">
<summary>
The model parameters class for Binary Classification GAMs
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamModelParameters.#ctor(Microsoft.ML.IHostEnvironment,System.Double[][],System.Double[][],System.Double,System.Int32,System.Int32[])">
<summary>
Construct a new Binary Classification GAM with the defined properties.
</summary>
<param name="env">The Host Environment</param>
<param name="binUpperBounds">An array of arrays of bin-upper-bounds for each feature.</param>
<param name="binEffects">Anay array of arrays of effect sizes for each bin for each feature.</param>
<param name="intercept">The intercept term for the model. Also referred to as the bias or the mean effect.</param>
<param name="inputLength">The number of features passed from the dataset. Used when the number of input features is
different than the number of shape functions. Use default if all features have a shape function.</param>
<param name="featureToInputMap">A map from the feature shape functions (as described by the binUpperBounds and BinEffects)
to the input feature. Used when the number of input features is different than the number of shape functions. Use default if all features have
a shape function.</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.GamModelParametersBase">
<summary>
The base class for GAM Model Parameters.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.Intercept">
<summary>
The model intercept. Also known as bias or mean effect.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.NumShapeFunctions">
<summary>
The number of shape functions used in the model.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.Microsoft#ML#Model#ICalculateFeatureContribution#FeatureContributionCalculator">
<summary>
Used to determine the contribution of each feature to the score of an example by <see cref="T:Microsoft.ML.Transforms.FeatureContributionCalculatingTransformer"/>.
For Generalized Additive Models (GAM), the contribution of a feature is equal to the shape function for the given feature evaluated at
the feature value.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.GetBinUpperBounds(System.Int32)">
<summary>
Get the bin upper bounds for each feature.
</summary>
<param name="featureIndex">The index of the feature (in the training vector) to get.</param>
<returns>The bin upper bounds. May be zero length if this feature has no bins.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.GetBinUpperBounds">
<summary>
Get all the bin upper bounds.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.GetBinEffects(System.Int32)">
<summary>
Get the binned weights for each feature.
</summary>
<param name="featureIndex">The index of the feature (in the training vector) to get.</param>
<returns>The binned effects for each feature. May be zero length if this feature has no bins.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.GetBinEffects">
<summary>
Get all the binned effects.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand">
<summary>
The GAM model visualization command. Because the data access commands must access private members of
<see cref="T:Microsoft.ML.Trainers.FastTree.GamModelParametersBase"/>, it is convenient to have the command itself nested within the base
predictor class.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand.Context.NumFeatures">
<summary>
These are the number of input features, as opposed to the number of features used within GAM
which may be lower.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand.Context.SaveIfNeeded(Microsoft.ML.IHost,Microsoft.ML.IChannel,System.String)">
<summary>
This will write out a file, if needed. In all cases if something is written it will return
a version number, with an indication based on sign of whether anything was actually written
in this call.
</summary>
<param name="host">The host from the command</param>
<param name="ch">The channel from the command</param>
<param name="outFile">The (optionally empty) output file</param>
<returns>Returns <c>null</c> if the model file could not be saved because <paramref name="outFile"/>
was <c>null</c> or whitespace. Otherwise, if the current version if newer than the last version saved,
it will save, and return that version. (In this case, the number is non-negative.) Otherwise, if the current
version was the last version saved, then it will return the bitwise not of that version number (in this case,
the number is negative).</returns>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand.Context.FeatureInfo.UpperBounds">
<summary>
The upper bounds of each bin.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand.Context.FeatureInfo.BinEffects">
<summary>
The amount added to the model for a document falling in a given bin.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand.Context.FeatureInfo.DocCounts">
<summary>
The number of documents in each bin.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand.Context.FeatureInfo.Version">
<summary>
The version of the GAM context that has these values.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand.Context.FeatureInfo.CategoricalFeatureIndex">
<summary>
For features belonging to the same categorical, this value will be the same,
Set to -1 for non-categoricals.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamModelParametersBase.VisualizationCommand.Init(Microsoft.ML.IChannel)">
<summary>
Attempts to initialize required items, from the input model file. In the event that anything goes
wrong, this method will throw.
</summary>
<param name="ch">The channel</param>
<returns>A structure containing essential information about the GAM dataset that enables
operations on top of that structure.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.RegressionGamTrainer.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.Int32,System.Double,System.Int32)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer"/>
</summary>
<param name="env">The private instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="labelColumn">The name of the label column.</param>
<param name="featureColumn">The name of the feature column.</param>
<param name="weightColumn">The name for the column containing the initial weight.</param>
<param name="numIterations">The number of iterations to use in learning the features.</param>
<param name="learningRate">The learning rate. GAMs work best with a small learning rate.</param>
<param name="maxBins">The maximum number of bins to use to approximate features</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.RegressionGamTrainer.Fit(Microsoft.Data.DataView.IDataView,Microsoft.Data.DataView.IDataView)">
<summary>
Trains a <see cref="T:Microsoft.ML.Trainers.FastTree.RegressionGamTrainer"/> using both training and validation data, returns
a <see cref="T:Microsoft.ML.Data.RegressionPredictionTransformer`1"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.RegressionGamModelParameters">
<summary>
The model parameters class for Binary Classification GAMs
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.RegressionGamModelParameters.#ctor(Microsoft.ML.IHostEnvironment,System.Double[][],System.Double[][],System.Double,System.Int32,System.Int32[])">
<summary>
Construct a new Regression GAM with the defined properties.
</summary>
<param name="env">The Host Environment</param>
<param name="binUpperBounds">An array of arrays of bin-upper-bounds for each feature.</param>
<param name="binEffects">Anay array of arrays of effect sizes for each bin for each feature.</param>
<param name="intercept">The intercept term for the model. Also referred to as the bias or the mean effect.</param>
<param name="inputLength">The number of features passed from the dataset. Used when the number of input features is
different than the number of shape functions. Use default if all features have a shape function.</param>
<param name="featureToInputMap">A map from the feature shape functions (as described by the binUpperBounds and BinEffects)
to the input feature. Used when the number of input features is different than the number of shape functions. Use default if all features have
a shape function.</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3">
<summary>
Generalized Additive Model Trainer.
</summary>
<remarks>
<para>
Generalized Additive Models, or GAMs, model the data as a set of linearly independent features
similar to a linear model. For each feature, the GAM trainer learns a non-linear function,
called a "shape function", that computes the response as a function of the feature's value.
(In contrast, a linear model fits a linear response (e.g. a line) to each feature.)
To score an example, the outputs of all the shape functions are summed and the score is the total value.
</para>
<para>
This GAM trainer is implemented using shallow gradient boosted trees (e.g. tree stumps) to learn nonparametric
shape functions, and is based on the method described in Lou, Caruana, and Gehrke.
<a href='http://www.cs.cornell.edu/~yinlou/papers/lou-kdd12.pdf'>"Intelligible Models for Classification and Regression."</a> KDD'12, Beijing, China. 2012.
After training, an intercept is added to represent the average prediction over the training set,
and the shape functions are normalized to represent the deviation from the average prediction. This results
in models that are easily interpreted simply by inspecting the intercept and the shape functions.
See the sample below for an example of how to train a GAM model and inspect and interpret the results.
</para>
</remarks>
<example>
<format type="text/markdown">
<]
]]>
</format>
</example>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.OptionsBase.GainConfidenceLevel">
Only consider a gain if its likelihood versus a random choice gain is above a certain value.
So 0.95 would mean restricting to gains that have less than a 0.05 change of being generated randomly through choice of a random split.
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.HasValidSet">
<summary>
Whether a validation set was passed in
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.TrainMainEffectsModel(Microsoft.ML.IChannel)">
<summary>
Training algorithm for the single-feature functions f(x)
</summary>
<param name="ch">The channel to write to</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.UpdateScores(System.Int32)">
<summary>
Update scores for all tracked datasets
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.UpdateScoresForSet(Microsoft.ML.Trainers.FastTree.Dataset,System.Double[],System.Int32)">
<summary>
Updates the scores for a dataset.
</summary>
<param name="dataset">The dataset to use.</param>
<param name="scores">The current scores for this dataset</param>
<param name="iteration">The iteration of the algorithm.
Used to look up the sub-graph to use to update the score.</param>
<returns></returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.CombineGraphs(Microsoft.ML.IChannel)">
<summary>
Combine the single-feature single-tree graphs to a single-feature model
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.DefineDocumentThreadBlocks(System.Int32,System.Int32,System.Int32[]@)">
<summary>
Distribute the documents into blocks to be computed on each thread
</summary>
<param name="numDocs">The number of documents in the dataset</param>
<param name="blocks">An array containing the starting point for each thread;
the next position is the exclusive ending point for the thread.</param>
<param name="numThreads">The number of threads used.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.CenterGraph">
<summary>
Center the graph using the mean response per feature on the training set.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.CreateEfficientBinning">
<summary>
Process bins such that only bin upper bounds and bin effects remain where
the effect changes.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.LeafSplitHelper.GetLeafSplitGain(System.Int32,System.Double,System.Double)">
<summary>
Returns the split gain for a particular leaf. Used on two leaves to calculate
the squared error gain for a particular leaf.
</summary>
<param name="count">Number of documents in this leaf</param>
<param name="sumTargets">Sum of the target values for this leaf</param>
<param name="sumWeights">Sum of the weights for this leaf, not meaningful if
<see cref="P:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.HasWeights"/> is <c>false</c></param>
<returns>The gain in least squared error</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.LeafSplitHelper.CalculateSplittedLeafOutput(System.Int32,System.Double,System.Double)">
<summary>
Calculates the output value for a leaf after splitting.
</summary>
<param name="count">Number of documents in this leaf</param>
<param name="sumTargets">Sum of the target values for this leaf</param>
<param name="sumWeights">Sum of the weights for this leaf, not meaningful if
<see cref="P:Microsoft.ML.Trainers.FastTree.GamTrainerBase`3.HasWeights"/> is <c>false</c></param>
<returns>The output value for a leaf</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.RandomForestTrainerBase`3.#ctor(Microsoft.ML.IHostEnvironment,`0,Microsoft.ML.SchemaShape.Column,System.Boolean)">
<summary>
Constructor invoked by the maml code-path.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.RandomForestTrainerBase`3.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.SchemaShape.Column,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Boolean)">
<summary>
Constructor invoked by the API code-path.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.FastForestClassificationModelParameters.PredictionKind">
<summary>
The type of prediction for this trainer.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FastForestClassification">
<summary>
Trains a random forest to fit target values using least-squares.
</summary><remarks>
Decision trees are non-parametric models that perform a sequence of simple tests on inputs.
This decision procedure maps them to outputs found in the training dataset whose inputs were similar to the instance being processed.
A decision is made at each node of the binary tree data structure based on a measure of similarity that maps each instance recursively through the branches of the tree until the appropriate leaf node is reached and the output decision returned.
<para>Decision trees have several advantages:</para>
<list type="bullet">
<item><description>They are efficient in both computation and memory usage during training and prediction. </description></item>
<item><description>They can represent non-linear decision boundaries.</description></item>
<item><description>They perform integrated feature selection and classification. </description></item>
<item><description>They are resilient in the presence of noisy features.</description></item>
</list>
<para>Fast forest is a random forest implementation.
The model consists of an ensemble of decision trees. Each tree in a decision forest outputs a Gaussian distribution by way of prediction.
An aggregation is performed over the ensemble of trees to find a Gaussian distribution closest to the combined distribution for all trees in the model.
This decision forest classifier consists of an ensemble of decision trees.</para>
<para>Generally, ensemble models provide better coverage and accuracy than single decision trees.
Each tree in a decision forest outputs a Gaussian distribution.</para>
<para>For more see: </para>
<list type="bullet">
<item><description><a href="https://en.wikipedia.org/wiki/Random_forest">Wikipedia: Random forest</a></description></item>
<item><description><a href="http://jmlr.org/papers/volume7/meinshausen06a/meinshausen06a.pdf">Quantile regression forest</a></description></item>
<item><description><a href="https://blogs.technet.microsoft.com/machinelearning/2014/09/10/from-stumps-to-trees-to-forests/">From Stumps to Trees to Forests</a></description></item>
</list>
</remarks>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastForestClassification.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestClassification"/>
</summary>
<param name="env">The private instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="labelColumn">The name of the label column.</param>
<param name="featureColumn">The name of the feature column.</param>
<param name="weightColumn">The name for the column containing the initial weight.</param>
<param name="numLeaves">The max number of leaves in each regression tree.</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="minDatapointsInLeaves">The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastForestClassification.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Trainers.FastTree.FastForestClassification.Options)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestClassification"/> by using the <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestClassification.Options"/> class.
</summary>
<param name="env">The instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastForestClassification.Fit(Microsoft.Data.DataView.IDataView,Microsoft.Data.DataView.IDataView)">
<summary>
Trains a <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestClassification"/> using both training and validation data, returns
a <see cref="T:Microsoft.ML.Data.BinaryPredictionTransformer`1"/>.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastForestRegressionModelParameters.QuantileStatistics.#ctor(System.Single[],System.Single[],System.Boolean)">
<summary>
data array will be modified because of sorting if it is not already sorted yet and this class owns the data.
Modifying the data outside will lead to erroneous output by this class
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastForestRegressionModelParameters.QuantileStatistics.GetQuantile(System.Single)">
<summary>
There are many ways to estimate quantile. This implementations is based on R-8, SciPy-(1/3,1/3)
https://en.wikipedia.org/wiki/Quantile#Estimating_the_quantiles_of_a_population
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FastForestRegression">
<summary>
Trains a random forest to fit target values using least-squares.
</summary><remarks>
Decision trees are non-parametric models that perform a sequence of simple tests on inputs.
This decision procedure maps them to outputs found in the training dataset whose inputs were similar to the instance being processed.
A decision is made at each node of the binary tree data structure based on a measure of similarity that maps each instance recursively through the branches of the tree until the appropriate leaf node is reached and the output decision returned.
<para>Decision trees have several advantages:</para>
<list type="bullet">
<item><description>They are efficient in both computation and memory usage during training and prediction. </description></item>
<item><description>They can represent non-linear decision boundaries.</description></item>
<item><description>They perform integrated feature selection and classification. </description></item>
<item><description>They are resilient in the presence of noisy features.</description></item>
</list>
<para>Fast forest is a random forest implementation.
The model consists of an ensemble of decision trees. Each tree in a decision forest outputs a Gaussian distribution by way of prediction.
An aggregation is performed over the ensemble of trees to find a Gaussian distribution closest to the combined distribution for all trees in the model.
This decision forest classifier consists of an ensemble of decision trees.</para>
<para>Generally, ensemble models provide better coverage and accuracy than single decision trees.
Each tree in a decision forest outputs a Gaussian distribution.</para>
<para>For more see: </para>
<list type="bullet">
<item><description><a href="https://en.wikipedia.org/wiki/Random_forest">Wikipedia: Random forest</a></description></item>
<item><description><a href="http://jmlr.org/papers/volume7/meinshausen06a/meinshausen06a.pdf">Quantile regression forest</a></description></item>
<item><description><a href="https://blogs.technet.microsoft.com/machinelearning/2014/09/10/from-stumps-to-trees-to-forests/">From Stumps to Trees to Forests</a></description></item>
</list>
</remarks>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastForestRegression.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestRegression"/>
</summary>
<param name="env">The private instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="labelColumn">The name of the label column.</param>
<param name="featureColumn">The name of the feature column.</param>
<param name="weightColumn">The optional name for the column containing the initial weight.</param>
<param name="numLeaves">The max number of leaves in each regression tree.</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="minDatapointsInLeaves">The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastForestRegression.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Trainers.FastTree.FastForestRegression.Options)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestRegression"/> by using the <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestRegression.Options"/> class.
</summary>
<param name="env">The instance of <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastForestRegression.Fit(Microsoft.Data.DataView.IDataView,Microsoft.Data.DataView.IDataView)">
<summary>
Trains a <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestRegression"/> using both training and validation data, returns
a <see cref="T:Microsoft.ML.Data.RegressionPredictionTransformer`1"/>.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.RegressionTreeBase">
<summary>
A container base class for exposing <see cref="T:Microsoft.ML.Trainers.FastTree.InternalRegressionTree"/>'s and
<see cref="T:Microsoft.ML.Trainers.FastTree.InternalQuantileRegressionTree"/>'s attributes to users.
This class should not be mutable, so it contains a lot of read-only members.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._tree">
<summary>
<see cref="T:Microsoft.ML.Trainers.FastTree.RegressionTreeBase"/> is an immutable wrapper over <see cref="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._tree"/> for exposing some tree's
attribute to users.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._lteChild">
<summary>
See <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LteChild"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._gtChild">
<summary>
See <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GtChild"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._numericalSplitFeatureIndexes">
<summary>
See <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitFeatureIndexes"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._numericalSplitThresholds">
<summary>
See <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitThresholds"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._categoricalSplitFlags">
<summary>
See <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.CategoricalSplitFlags"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._leafValues">
<summary>
See <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LeafValues"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._splitGains">
<summary>
See <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.SplitGains"/>.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LteChild">
<summary>
<see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LteChild"/>[i] is the i-th node's child index used when
(1) the numerical feature indexed by <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitFeatureIndexes"/>[i] is less than the
threshold <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitThresholds"/>[i], or
(2) the categorical features indexed by <see cref="M:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GetCategoricalCategoricalSplitFeatureRangeAt(System.Int32)"/>'s
returned value with nodeIndex=i is NOT a sub-set of <see cref="M:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GetCategoricalSplitFeaturesAt(System.Int32)"/> with
nodeIndex=i.
Note that the case (1) happens only when <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.CategoricalSplitFlags"/>[i] is true and otherwise (2)
occurs. A non-negative returned value means a node (i.e., not a leaf); for example, 2 means the 3rd node in
the underlying <see cref="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._tree"/>. A negative returned value means a leaf; for example, -1 stands for the
<see langword="~"/>(-1)-th leaf in the underlying <see cref="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._tree"/>. Note that <see langword="~"/> is the
bitwise complement operator in C#; for details, see
https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/operators/bitwise-complement-operator.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GtChild">
<summary>
<see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GtChild"/>[i] is the i-th node's child index used when the two conditions, (1) and (2),
described in <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LteChild"/>'s document are not true. Its return value follows the format
used in <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LteChild"/>.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitFeatureIndexes">
<summary>
<see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitFeatureIndexes"/>[i] is the feature index used the splitting function of the
i-th node. This value is valid only if <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.CategoricalSplitFlags"/>[i] is false.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitThresholds">
<summary>
<see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitThresholds"/>[i] is the threshold on feature indexed by
<see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumericalSplitFeatureIndexes"/>[i], where i is the i-th node's index
(for example, i is 1 for the 2nd node in <see cref="F:Microsoft.ML.Trainers.FastTree.RegressionTreeBase._tree"/>).
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.CategoricalSplitFlags">
<summary>
Determine the types of splitting function. If <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.CategoricalSplitFlags"/>[i] is true, the i-th
node's uses categorical splitting function. Otherwise, traditional numerical split is used.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LeafValues">
<summary>
<see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LeafValues"/>[i] is the learned value at the i-th leaf.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GetCategoricalSplitFeaturesAt(System.Int32)">
<summary>
Return categorical thresholds used at node indexed by nodeIndex. If the considered input feature does NOT
matche any of values returned by <see cref="M:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GetCategoricalSplitFeaturesAt(System.Int32)"/>, we call it a
less-than-threshold event and therefore <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.LteChild"/>[nodeIndex] is the child node that input
should go next. The returned value is valid only if <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.CategoricalSplitFlags"/>[nodeIndex] is true.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GetCategoricalCategoricalSplitFeatureRangeAt(System.Int32)">
<summary>
Return categorical thresholds' range used at node indexed by nodeIndex. A categorical split at node indexed
by nodeIndex can consider multiple consecutive input features at one time; their range is specified by
<see cref="M:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.GetCategoricalCategoricalSplitFeatureRangeAt(System.Int32)"/>. The returned value is always a 2-element
array; its 1st element is the starting index and its 2nd element is the endining index of a feature segment.
The returned value is valid only if <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.CategoricalSplitFlags"/>[nodeIndex] is true.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.SplitGains">
<summary>
The gains obtained by splitting data at nodes. Its i-th value is computed from to the split at the i-th node.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumLeaves">
<summary>
Number of leaves in the tree. Note that <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumLeaves"/> does not take non-leaf nodes into account.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumNodes">
<summary>
Number of nodes in the tree. This doesn't include any leaves. For example, a tree with node0->node1,
node0->leaf3, node1->leaf1, node1->leaf2, <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumNodes"/> and <see cref="P:Microsoft.ML.Trainers.FastTree.RegressionTreeBase.NumLeaves"/> should
be 2 and 3, respectively.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.RegressionTree">
<summary>
A container class for exposing <see cref="T:Microsoft.ML.Trainers.FastTree.InternalRegressionTree"/>'s attributes to users.
This class should not be mutable, so it contains a lot of read-only members. Note that
<see cref="T:Microsoft.ML.Trainers.FastTree.RegressionTree"/> is identical to <see cref="T:Microsoft.ML.Trainers.FastTree.RegressionTreeBase"/> but in
another derived class <see cref="T:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree"/> some attributes are added.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree">
<summary>
A container class for exposing <see cref="T:Microsoft.ML.Trainers.FastTree.InternalQuantileRegressionTree"/>'s attributes to users.
This class should not be mutable, so it contains a lot of read-only members. In addition to
things inherited from <see cref="T:Microsoft.ML.Trainers.FastTree.RegressionTreeBase"/>, we add <see cref="M:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree.GetLeafSamplesAt(System.Int32)"/>
and <see cref="M:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree.GetLeafSampleWeightsAt(System.Int32)"/> to expose (sub-sampled) training labels falling into
the leafIndex-th leaf and their weights.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree._leafSamples">
<summary>
Sample labels from training data. <see cref="F:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree._leafSamples"/>[i] stores the labels falling into the
i-th leaf.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree._leafSampleWeights">
<summary>
Sample labels' weights from training data. <see cref="F:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree._leafSampleWeights"/>[i] stores the weights for
labels falling into the i-th leaf. <see cref="F:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree._leafSampleWeights"/>[i][j] is the weight of
<see cref="F:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree._leafSamples"/>[i][j].
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree.GetLeafSamplesAt(System.Int32)">
<summary>
Return the training labels falling into the specified leaf.
</summary>
<param name="leafIndex">The index of the specified leaf.</param>
<returns>Training labels</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree.GetLeafSampleWeightsAt(System.Int32)">
<summary>
Return the weights for training labels falling into the specified leaf. If <see cref="M:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree.GetLeafSamplesAt(System.Int32)"/>
and <see cref="M:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree.GetLeafSampleWeightsAt(System.Int32)"/> use the same input, the i-th returned value of this function is
the weight of the i-th label in <see cref="M:Microsoft.ML.Trainers.FastTree.QuantileRegressionTree.GetLeafSamplesAt(System.Int32)"/>.
</summary>
<param name="leafIndex">The index of the specified leaf.</param>
<returns>Training labels' weights</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.SumupPerformanceCommand">
<summary>
This is an internal utility command to measure the performance of the IntArray sumup operation.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.TrivialGradientWrapper">
<summary>
Trivial weights wrapper. Creates proxy class holding the targets.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.BestStepRegressionGradientWrapper">
<summary>
Provides weights used when best regression step option is on.
</summary>
Second-derivatives used as weights in a leaf when one makes Newton-Raphson step (taken in account when regression tree is trained).
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.QueryWeightsGradientWrapper">
<summary>
Wraps targets with query weights. Regression tree is built for weighted data, and weights are used for mean
calculation at Newton-Raphson step.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.QueryWeightsBestResressionStepGradientWrapper">
<summary>
Wraps targets whan both query weights and best step regression options are active.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.DcgCalculator._secondLevelcomparers">
<summary>
Contains the instances for a second Level comparer, which gets applied after the initial rank
based ordering has happened. The array stores one second level comparer per thread.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.#ctor(System.Int32,System.String,System.Int32)">
<summary>
Constructs a DCG calculator
</summary>
<param name="maxDocsPerQuery">the maximum number of documents per query</param>
<param name="sortingAlgorithm">a string describing the sorting algorithm to use</param>
<param name="topNDocsForIdealDcg">specifies the ideal DCG@ computation.</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.MaxDcg(System.Int16[],System.Int32[],System.Int32,System.Int32[][])">
<summary>
Calculates the natural-based max DCG at a given truncation
</summary>
<param name="labels">vector of labels</param>
<param name="boundaries">vector of query boundaries</param>
<param name="trunc">truncation to use</param>
<param name="labelCounts"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.MaxDcgQuery(System.Int16[],System.Int32,System.Int32,System.Int32,System.Int32[])">
<summary>
Calculates the natural-based max DCG at a given truncation for a query
</summary>
<param name="labels">vector of labels</param>
<param name="begin">Index of the first document</param>
<param name="labelCounts"></param>
<param name="trunc">truncation to use</param>
<param name="numDocuments"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.Ndcg3(Microsoft.ML.Trainers.FastTree.Dataset,System.Int16[],System.Double[])">
<summary>
Efficient computation of average NDCG@3 for the entire dataset
Note that it is virtual and MPI provides faster implementations for MPI
</summary>
<param name="dataset">the dataset</param>
<param name="scores">vector of scores</param>
<param name="labels"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.DCG3(System.Double[],System.Int16[],System.Int32,System.Int32)">
<summary>
Efficient computation of natural-based pessimistic DCG@3 for a given query
</summary>
<param name="scores">vector of scores</param>
<param name="labels">vector of labels</param>
<param name="begin">index of first document in query</param>
<param name="end">index of first document in next query</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.Ndcg1(Microsoft.ML.Trainers.FastTree.Dataset,System.Int16[],System.Double[])">
<summary>
Efficient computation of average NDCG@1 for the entire dataset
Note that it is virtual and MPI provides faster implemetations for MPI
</summary>
<param name="dataset">the dataset</param>
<param name="labels"></param>
<param name="scores">the vector of score from previous rounds</param>
<returns>average NDCG@1 for an entire dataset</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.DCG1(System.Double[],System.Int16[],System.Int32,System.Int32)">
<summary>
Calculates the natural-based pessimistic DCG@1 of scores(query)
</summary>
<param name="scores">vector of scores</param>
<param name="labels">vector of labels</param>
<param name="begin">index of first document in query</param>
<param name="end">index of first document in next query</param>
<returns>DCG@1</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.NdcgRangeFromScores(Microsoft.ML.Trainers.FastTree.Dataset,System.Int16[],System.Double[])">
<summary>
calculates the average NDCG given the scores array
For performance reason it duplicates some
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.SortRankingResults(System.Int16[],System.Int32,System.Int32,System.Int32,System.Int32,System.Int32[],System.Double[])">
<summary>
Orders the queries based on the given comparer.
</summary>
<param name="labels">The label for all query URL pairs</param>
<param name="threadIndex">Specifies the thread which is executing this code</param>
<param name="scoreBegin">position of the first query-URL pair to sort in the score array</param>
<param name="labelBegin">position of the first query-URL pair to sort in the label array</param>
<param name="count">number of query-URL pairs</param>
<param name="permutation">resulting query order array</param>
<param name="scores">The scores for all query URL pairs</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DcgCalculator.OrderingFromScores(Microsoft.ML.Trainers.FastTree.Dataset,System.Double[])">
<summary>
Calculates the order of documents. This returns an array with as many elements
as there are documents, where the subarray in a query's boundary will contain
elements from 0 up to but not including the number of documents in the query.
The first value in this subarray will contain the index of the document in the
subarray at top position (highest ranked), and the last value the index of the
document with bottom position (lowest ranked).
</summary>
<param name="dataset">The dataset over which to calculate the DCG.</param>
<param name="scores">The scores for all documents within the dataset.</param>
<returns></returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.DescendingStablePessimisticPermutationComparer">
<summary>
Compares two integers that are indices into a vector of doubles.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.DescendingStablePermutationComparer">
<summary>
Compares two integers that are indices into a vector of doubles.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.DescendingStableIdealComparer">
<summary>
Implements an HRS based comparer to sort the ranking results for the first N results.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DescendingStableIdealComparer.#ctor(System.Int32)">
<summary>
Creates an instance of the DescendingStableIdealComparer for the TOP N query/URL pairs
</summary>
<param name="comparefirstN">Specifies the TOP N query/URL pairs which should be used for sorting</param>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.DescendingStableIdealComparer.CompareFirstN">
<summary>
Specifies the TOP N query/URL pairs which should be used for sorting
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.DescendingStableIdealComparer.Labels">
<summary>
The HRS labels for all query/URL pairs
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.DescendingStableIdealComparer.LabelsOffset">
<summary>
The position inside the Labels where the this query starts for which the URLs should be reshuffled.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DescendingStableIdealComparer.System#Collections#Generic#IComparer{System#Int32}#Compare(System.Int32,System.Int32)">
<summary>
Compare two HRS ratings for query/URL pairs
</summary>
<param name="x">position for query/URL pair 1</param>
<param name="y">position for query/URL pair 2</param>
<returns></returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.#ctor(System.Int32,System.Int32)">
<summary>
Constructor
</summary>
<param name="numDocuments">number of document</param>
<param name="maxLeaves">number of leaves</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.#ctor(Microsoft.ML.Trainers.FastTree.InternalRegressionTree,Microsoft.ML.Trainers.FastTree.Dataset)">
<summary>
Constructs partitioning object based on the documents and RegressionTree splits
NOTE: It has been optimized for speed and multiprocs with 10x gain on naive LINQ implementation
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.NumDocs">
<summary>
Returns the total number of documents handled by the partitioning
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.Initialize">
<summary>
Resets the data structure, as if it was newly created
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.GetDistribution(System.Double[],System.Double[],System.Int32,System.Random,System.Int32,System.Double[]@)">
<summary>
Does sampling with replacement on each leaf node and returns leaf count times of sample labels.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.Split(System.Int32,Microsoft.ML.Trainers.FastTree.IIntArrayForwardIndexer,System.UInt32,System.Int32)">
<summary>
Splits the documents of a specified leaf to its two children based on a feature and a threshold value
</summary>
<param name="leaf">the leaf being split</param>
<param name="indexer"></param>
<param name="threshold">the threshold</param>
<param name="gtChildIndex">Index of child node that contains documents whose split
feature value is greater than the split threshold</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.Split(System.Int32,Microsoft.ML.Trainers.FastTree.IntArray,System.Collections.Generic.HashSet{System.Int32},System.Int32)">
<summary>
Splits the documents of a specified leaf to its two children based on a feature and a threshold value
</summary>
<param name="leaf">the leaf being split</param>
<param name="bins">Split feature flock's bin</param>
<param name="categoricalIndices">Catgeorical feature indices</param>
<param name="gtChildIndex">Index of child node that contains documents whose split
feature value is greater than the split threshold</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.GetLeafDocumentPartitions(System.Int32,Microsoft.ML.Trainers.FastTree.IIntArrayForwardIndexer,System.UInt32,System.Int32[]@,System.Int32@,System.Int32[]@,System.Int32@)">
<summary>
Get the document partitions of a specified leaf if it is split based on a feature and a threshold value.
</summary>
<param name="leaf">the leaf being split</param>
<param name="indexer">the indexer to access the feature value</param>
<param name="threshold">the threshold</param>
<param name="leftDocuments">[out] the left documents split from the leaf</param>
<param name="leftDocumentSize">[out] the size of left documents</param>
<param name="rightDocuments">[out] the right documents split from the leaf</param>
<param name="rightDocumentSize">[out] the size of right documents</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.DocumentsInLeaf(System.Int32)">
<summary>
Returns an enumeration of the document indices associated with a specified leaf, in ascending order
</summary>
<param name="leaf">the leaf index</param>
<returns>the enumeration</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.NumDocsInLeaf(System.Int32)">
<summary>
How many documents are associated with a specified leaf
</summary>
<param name="leaf">the leaf</param>
<returns>the number of documents</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.Mean(System.Double[],System.Int32,System.Boolean)">
<summary>
Calculates the mean of a double array only on the elements that correspond to a specified leaf in the tree
</summary>
<param name="array">the double array</param>
<param name="leaf">the leaf index</param>
<param name="filterZeros"></param>
<returns>the mean</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.DocumentPartitioning.Mean(System.Double[],System.Double[],System.Int32,System.Boolean)">
<summary>
Calculates the weighted mean of a double array only on the elements that correspond to a specified leaf in the tree
</summary>
<param name="array">the double array</param>
<param name="sampleWeights">Weights of array elements</param>
<param name="leaf">the leaf index</param>
<param name="filterZeros"></param>
<returns>the mean</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IEarlyStoppingCriterion.CheckScore(System.Single,System.Single,System.Boolean@)">
<summary>
Check if the learning should stop or not.
</summary>
<param name="validationScore">A non negative number. Higher score means better result unless "_lowerIsBetter" is true.</param>
<param name="trainingScore">A non negative number. Higher score means better result unless "_lowerIsBetter" is true.</param>
<param name="isBestCandidate">True if the current result is the best ever.</param>
<returns>If true, the learning should stop.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.EarlyStoppingCriterion`1.CheckBestScore(System.Single)">
<summary>
Check if the given score is the best ever. The best score will be stored at this._bestScore.
</summary>
<param name="score">The latest score</param>
<returns>True if the given score is the best ever.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.MovingWindowEarlyStoppingCriterion.GetRecentAvg(System.Collections.Generic.Queue{System.Single})">
<summary>
Calculate the average score in the given list of scores.
</summary>
<returns>The moving average.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.MovingWindowEarlyStoppingCriterion.GetRecentBest(System.Collections.Generic.IEnumerable{System.Single})">
<summary>
Get the best score in the given list of scores.
</summary>
<param name="recentScores">The list of scores.</param>
<returns>The best score.</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.GLEarlyStoppingCriterion">
<summary>
Loss of Generality (GL).
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.LPEarlyStoppingCriterion">
<summary>
Low Progress (LP).
This rule fires when the improvements on the score stall.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.PQEarlyStoppingCriterion">
<summary>
Generality to Progress Ratio (PQ).
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.UPEarlyStoppingCriterion">
<summary>
Consecutive Loss in Generality (UP).
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.LassoBasedEnsembleCompressor">
<summary>
This implementation is based on:
Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization
Paths for Generalized Linear Models via Coordinate Descent.
https://www-stat.stanford.edu/~hastie/Papers/glmnet.pdf
</summary>
<remarks>Author was Yasser Ganjisaffar during his internship.</remarks>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.GradientDescent._numberOfDroppedTrees">
number of trees dropped in this iteration
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.IGradientAdjuster">
<summary>
Interface for wrapping with weights of gradient target values
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IGradientAdjuster.AdjustTargetAndSetWeights(System.Double[],Microsoft.ML.Trainers.FastTree.ObjectiveFunctionBase,System.Double[]@)">
<summary>
Create wrapping of gradient target values
</summary>
<param name="gradient"></param>
<param name="objFunction">Objective functions can be used for constructing weights</param>
<param name="targetWeights"></param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.RandomForestOptimizer">
<summary>
This is dummy optimizer. As Random forest does not have any boosting based optimization, this is place holder to be consistent
with other fast tree based applications
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.OptimizationAlgorithm.SmoothTree(Microsoft.ML.Trainers.FastTree.InternalRegressionTree,System.Double)">
<summary>
Regularize a regression tree with smoothing paramter alpha.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.SignatureParallelTrainer">
<summary>
Signature of Parallel trainer.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FindBestThresholdFromRawArrayFun">
<summary>
delegate function. This function is implemented in TLC, and called by TLC++. It will find best threshold
from raw histogram data (countByBin, sumTargetsByBin, sumWeightsByBin, numDocsInLeaf, sumTargets, sumWeights)
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.IParallelTraining">
<summary>
Interface used for parallel training.
Mainly contains three parts:
1. interactive with IO: <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.GetLocalBinConstructionFeatures(System.Int32)" />, <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.SyncGlobalBoundary(System.Int32,System.Int32,System.Double[][])" />.
Data will be partitioned by rows in Data parallel and Voting Parallel.
To speed up the find bin process, it let different workers to find bins for different features.
Then perform global sync up.
In Feature parallel, every machines holds all data, so this is unneeded.
2. interactive with TreeLearner: <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.InitIteration(System.Boolean[]@)" />, <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.CacheHistogram(System.Boolean,System.Int32,System.Int32,Microsoft.ML.Trainers.FastTree.SufficientStatsBase,System.Boolean)" />, <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.IsNeedFindLocalBestSplit" />,
<see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.IsSkipNonSplittableHistogram" />, <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.FindGlobalBestSplit(Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates,Microsoft.ML.Trainers.FastTree.FindBestThresholdFromRawArrayFun,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.SplitInfo[])" />, <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.GetGlobalDataCountInLeaf(System.Int32,System.Int32@)" />, <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.PerformGlobalSplit(System.Int32,System.Int32,System.Int32,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.SplitInfo)" />.
A full process is:
Use <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.InitIteration(System.Boolean[]@)" /> to alter local active features.
Use <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.GetGlobalDataCountInLeaf(System.Int32,System.Int32@)" /> to check smaller leaf and larger leaf.
Use <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.CacheHistogram(System.Boolean,System.Int32,System.Int32,Microsoft.ML.Trainers.FastTree.SufficientStatsBase,System.Boolean)" />, <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.IsNeedFindLocalBestSplit" /> and <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.IsSkipNonSplittableHistogram" /> to interactive with Feature histograms.
Use <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.FindGlobalBestSplit(Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates,Microsoft.ML.Trainers.FastTree.FindBestThresholdFromRawArrayFun,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.SplitInfo[])" /> to sync up global best split
Use <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.PerformGlobalSplit(System.Int32,System.Int32,System.Int32,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.SplitInfo)" /> to record global num_data in leaves.
3. interactive with Application : <see cref="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.GlobalMean(Microsoft.ML.Trainers.FastTree.Dataset,Microsoft.ML.Trainers.FastTree.InternalRegressionTree,Microsoft.ML.Trainers.FastTree.DocumentPartitioning,System.Double[],System.Boolean)" />.
Output of leaves is calculated by newton step ( - sum(first_order_gradients) / sum(second_order_gradients)).
If data is partitioned by row, it needs to a sync up for these sum result.
So It needs to call this to get the real output of leaves.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.InitEnvironment">
<summary>
Initialize the network connection.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.FinalizeEnvironment">
<summary>
Finalize the network.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.InitTreeLearner(Microsoft.ML.Trainers.FastTree.Dataset,System.Int32,System.Int32,System.Int32@)">
<summary>
Initialize once while construct tree learner.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.FinalizeTreeLearner">
<summary>
Finalize while tree learner is freed.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.InitIteration(System.Boolean[]@)">
<summary>
Initialize every time before training a tree.
will alter activeFeatures in Feature parallel.
Because it only need to find threshold for part of features in feature parallel.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.FinalizeIteration">
<summary>
Finalize after trained one tree.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.CacheHistogram(System.Boolean,System.Int32,System.Int32,Microsoft.ML.Trainers.FastTree.SufficientStatsBase,System.Boolean)">
<summary>
Cache Histogram, it will be used for global aggregate.
Only used in Data parallel and Voting Parallel
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.IsNeedFindLocalBestSplit">
<summary>
Only return False in Data parallel.
Data parallel find best threshold after merged global histograms.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.IsSkipNonSplittableHistogram">
<summary>
True if need to skip non-splittable histogram.
Only will return False in Voting parallel.
That is because local doesn't have global histograms in Voting parallel,
So the information about NonSplittable is not correct, and we cannot skip it.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.FindGlobalBestSplit(Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates,Microsoft.ML.Trainers.FastTree.FindBestThresholdFromRawArrayFun,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.SplitInfo[])">
<summary>
Find best split among machines.
will save result in bestSplits.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.GetGlobalDataCountInLeaf(System.Int32,System.Int32@)">
<summary>
Get global num_data on specific leaf.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.PerformGlobalSplit(System.Int32,System.Int32,System.Int32,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.SplitInfo)">
<summary>
Used to record the global num_data on leaves.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.GlobalMean(Microsoft.ML.Trainers.FastTree.Dataset,Microsoft.ML.Trainers.FastTree.InternalRegressionTree,Microsoft.ML.Trainers.FastTree.DocumentPartitioning,System.Double[],System.Boolean)">
<summary>
Get Global mean on different machines for data partitioning in tree.
Used for calculating leaf output value.
will return a array this is the mean output of all leaves.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.GetLocalBinConstructionFeatures(System.Int32)">
<summary>
Get indices of features that should be find bin in local.
After construct local boundary, should call <see href="SyncGlobalBoundary" />
to get boundaries for all features.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.IParallelTraining.SyncGlobalBoundary(System.Int32,System.Int32,System.Double[][])">
<summary>
Sync Global feature bucket.
used in Data parallel and Voting parallel.
Data are partitioned by row. To speed up the Global find bin process,
we can let different workers construct Bin Boundary for different features,
then perform a global sync up.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TestResult.RawValue">
<summary>
Raw value used for calculating final test result value.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TestResult.Factor">
<summary>
The factor used for calculating final test result value.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TestResult.Operator">
<summary>
The operator used for calculating final test result value.
Final value = Operator(RawValue, Factor)
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TestResult.LowerIsBetter">
<summary>
Indicates that the lower value of this metric is better
This is used for early stopping (with TestHistory and TestWindowWithTolerance)
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.RegressionTest.#ctor(Microsoft.ML.Trainers.FastTree.ScoreTracker,System.Nullable{System.Int32})">
<param name="scoreTracker"></param>
<param name="resultType">1: L1, 2: L2. Otherwise, return all.</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner">
<summary>
Trains regression trees.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.#ctor(Microsoft.ML.Trainers.FastTree.Dataset,System.Int32,System.Int32,System.Double,System.Double,System.Double,System.Double,System.Int32,System.Int32,System.Double,System.Boolean,System.Boolean,System.Double,System.Int32,System.Int32,System.Double,Microsoft.ML.Trainers.FastTree.IParallelTraining,System.Double,Microsoft.ML.Trainers.FastTree.Bundle,System.Int32,System.Double)">
<summary>
Creates a new LeastSquaresRegressionTreeLearner
</summary>
<param name="trainData">Data to train from</param>
<param name="numLeaves">Maximum leaves in tree</param>
<param name="minDocsInLeaf">Minimum allowable documents in leaf</param>
<param name="entropyCoefficient">Add the information gain of a split to the gain
times this value. Practically, this will favor more balanced splits</param>
<param name="featureFirstUsePenalty">Features never used before effectively
have this amount subtracted from their gain</param>
<param name="featureReusePenalty">Features used before effectively have
this amount subtracted from their gain</param>
<param name="softmaxTemperature">Regularization parameter, where we become
increasingly likely to select a non-optimal split feature the higher the
temperature is</param>
<param name="histogramPoolSize">Number of feature histograms to cache</param>
<param name="randomSeed">The seed to use for sampling</param>
<param name="splitFraction"></param>
<param name="filterZeros">Whether we should ignore zero lambdas for the
purpose of tree learning (generally a bad idea except for when zero indicates
that the document should be ignored)</param>
<param name="allowEmptyTrees">If false, failure to split the root will result in error, or if
true will result in null being returned when we try to fit a tree</param>
<param name="gainConfidenceLevel">Only consider a gain if its likelihood versus a random
choice gain is above a certain value (so 0.95 would mean restricting to gains that have less
than a 0.05 change of being generated randomly through choice of a random split).</param>
<param name="maxCategoricalGroupsPerNode">Maximum categorical split points to consider when splitting on a
categorical feature.</param>
<param name="maxCategoricalSplitPointPerNode"></param>
<param name="bsrMaxTreeOutput">-1 if best step ranking is to be disabled, otherwise it
is interpreted as being similar to the maximum output for a leaf</param>
<param name="parallelTraining"></param>
<param name="minDocsPercentageForCategoricalSplit"></param>
<param name="bundling"></param>
<param name="minDocsForCategoricalSplit"></param>
<param name="bias"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.FitTargets(Microsoft.ML.IChannel,System.Boolean[],System.Double[])">
<summary>
Learns a new tree for the current outputs
</summary>
<returns>A regression tree</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.Initialize(System.Boolean[])">
<summary>
Clears data structures
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.FindBestSplitOfRoot(System.Double[])">
<summary>
finds best feature/threshold split of root node. Fills in BestSplitInfoPerLeaf[0]
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.FindBestSplitOfSiblings(System.Int32,System.Int32,Microsoft.ML.Trainers.FastTree.DocumentPartitioning,System.Double[])">
<summary>
Finds best feature/threshold split of <paramref name="lteChild"/> and <paramref name="gtChild"/>,
and fills in the corresponding elements of <see cref="F:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.BestSplitInfoPerLeaf"/>.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.FindAndSetBestFeatureForLeaf(Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates)">
<summary>
After the gain for each feature has been computed, this function chooses the gain maximizing feature
and sets its info in the right places
This method is overriden in MPI version of the code
</summary>
<param name="leafSplitCandidates">the FindBestThesholdleafSplitCandidates data structure that contains the best split information</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.FindBestThresholdForFlockThreadWorker(System.Int32)">
<summary>
The multithreading entry-point: finds the best threshold for a given flock at a given leaf.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.GetActiveFeatures(System.Int32,System.Int32)">
<summary>
Returns the set of features that are active within a particular range.
</summary>
<param name="min">The inclusive lower bound of the feature indices</param>
<param name="lim">The exclusive upper bound of the feature indices</param>
<returns>The feature indices within the range that are active</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.GetLeafSplitGain(System.Int32,System.Double,System.Double)">
<summary>
Returns the split gain for a particular leaf. Used on two leaves to calculate
the squared error gain for a particular leaf.
</summary>
<param name="count">Number of documents in this leaf</param>
<param name="sumTargets">Sum of the target values for this leaf</param>
<param name="sumWeights">Sum of the weights for this leaf, not meaningful if
<see cref="P:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.HasWeights"/> is <c>false</c></param>
<returns>The gain in least squared error</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.CalculateSplittedLeafOutput(System.Int32,System.Double,System.Double)">
<summary>
Calculates the output value for a leaf after splitting.
</summary>
<param name="count">Number of documents in this leaf</param>
<param name="sumTargets">Sum of the target values for this leaf</param>
<param name="sumWeights">Sum of the weights for this leaf, not meaningful if
<see cref="P:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.HasWeights"/> is <c>false</c></param>
<returns>The output value for a leaf</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.FindBestThresholdFromHistogram(Microsoft.ML.Trainers.FastTree.SufficientStatsBase,Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates,System.Int32)">
<summary>
Finds the best threshold to split on, and sets the appropriate values in the LeafSplitCandidates data structure.
</summary>
<param name="histogram">The sufficient stats accumulated for the flock</param>
<param name="leafSplitCandidates">The LeafSplitCandidates data structure</param>
<param name="flock">The index of the flock containing this feature</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates">
<summary>
Contains the memory data structures required for finding the best threshold for a given
feature at a given leaf.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates.Initialize(System.Int32,Microsoft.ML.Trainers.FastTree.DocumentPartitioning,System.Double[],System.Double[],System.Boolean)">
<summary>
Initializes the object for a specific leaf, with a certain subset of documents.
</summary>
<param name="leafIndex">The leaf index</param>
<param name="partitioning">The partitioning object that knows which documents have reached that leaf</param>
<param name="targets">The array of targets, which the regression tree is trying to fit</param>
<param name="weights">The array of weights for the document</param>
<param name="filterZeros">Whether filtering of zero gradients was turned on or not</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates.Initialize(System.Double[],System.Double[],System.Boolean)">
<summary>
Initializes the object for computing the root node split
</summary>
<param name="targets">the array of targets, which the regression tree is trying to fit</param>
<param name="weights"></param>
<param name="filterZeros"></param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.LeafSplitCandidates.Initialize">
<summary>
Initializes the object to do nothing
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.LeastSquaresRegressionTreeLearner.SplitInfo">
<summary>
A struct to store information about each leaf for splitting
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.TreeLearner.GetSizeOfReservedMemory">
<summary>
Get size of reserved memory for the tree learner.
The default implementation returns 0 directly, and the subclasses can return
different value if it reserves memory for training.
</summary>
<returns>size of reserved memory</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.TreeLearnerException">
<summary>
An exception class for an error which occurs in the midst of learning a tree.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.WinLossCalculator.#ctor(System.Int32,System.String)">
<summary>
Constructs a WinLoss calculator
</summary>
<param name="maxDocsPerQuery">the maximum number of documents per query</param>
<param name="sortingAlgorithm">a string describing the sorting algorithm to use</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.WinLossCalculator.WinLossRangeFromScores(Microsoft.ML.Trainers.FastTree.Dataset,System.Int16[],System.Double[])">
<summary>
calculates the average WinLoss given the scores array
For performance reason it duplicates some
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1">
<summary>
A list of <see cref="T:Microsoft.ML.Trainers.FastTree.RegressionTreeBase"/>'s derived class. To compute the output value of a
<see cref="T:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1"/>, we need to compute the output values of all trees in <see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.Trees"/>,
scale those values via <see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.TreeWeights"/>, and finally sum the scaled values and <see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.Bias"/> up.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.Bias">
<summary>
When doing prediction, this is a value added to the weighted sum of all <see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.Trees"/>' outputs.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.TreeWeights">
<summary>
<see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.TreeWeights"/>[i] is the i-th <see cref="T:Microsoft.ML.Trainers.FastTree.RegressionTreeBase"/>'s weight in <see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.Trees"/>.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.Trees">
<summary>
<see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.Trees"/>[i] is the i-th <see cref="T:Microsoft.ML.Trainers.FastTree.RegressionTreeBase"/> in <see cref="P:Microsoft.ML.Trainers.FastTree.TreeEnsemble`1.Trees"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.InternalQuantileRegressionTree._labelsDistribution">
<summary>
Holds the labels of sampled instances for this tree. This value can be null when training, for example, random forest (FastForest).
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.InternalQuantileRegressionTree._instanceWeights">
<summary>
Holds the weights of sampled instances for this tree. This value can be null when training, for example, random forest (FastForest).
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalQuantileRegressionTree.LoadSampledLabels(Microsoft.ML.Data.VBuffer{System.Single}@,System.Single[],System.Single[],System.Int32,System.Int32)">
<summary>
Loads the sampled labels of this tree to the distribution array for the sparse instance type.
By calling for all the trees, the distribution array will have all the samples from all the trees
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalQuantileRegressionTree.ExtractLeafSamplesAndTheirWeights(System.Double[][]@,System.Double[][]@)">
<summary>
Copy training examples' labels and their weights to external variables.
</summary>
<param name="leafSamples">List of label collections. The type of a collection is a double array. The i-th label collection contains training examples' labels falling into the i-th leaf.</param>
<param name="leafSampleWeights">List of labels' weight collections. The type of a collection is a double array. The i-th collection contains weights of labels falling into the i-th leaf.
Specifically, leafSampleWeights[i][j] is the weight of leafSamples[i][j].</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.InternalRegressionTree">
Note that <see cref="T:Microsoft.ML.Trainers.FastTree.InternalRegressionTree"/> is shared between FastTree and LightGBM assemblies,
so <see cref="T:Microsoft.ML.Trainers.FastTree.InternalRegressionTree"/> has <see cref="T:Microsoft.ML.BestFriendAttribute"/>.
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.InternalRegressionTree._previousLeafValue">
<summary>
The value of this non-leaf node, prior to split when it was a leaf.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.CategoricalSplit">
<summary>
Indicates if a node's split feature was categorical.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.CategoricalSplitFeatures">
<summary>
Array of categorical values for the categorical feature that might be chosen as
a split feature for a node.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.CategoricalSplitFeatureRanges">
<summary>
For a given categorical feature that is chosen as a split feature for a node, this
array contains its start and end range in the input feature vector at prediction time.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.TreeType">
<summary>
Code to identify the type of tree in binary serialization. These values are
persisted, so they should remain consistent for the sake of deserialization
backwards compatibility.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.#ctor(System.Int32)">
<summary>
constructs a regression tree with an upper bound on depth
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.Create(System.Int32,System.Int32[],System.Double[],System.Single[],System.Single[],System.Int32[],System.Int32[],System.Double[],System.Int32[][],System.Boolean[])">
<summary>
Create a Regression Tree object from raw tree contents.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.NumLeaves">
<summary>
The current number of leaves in the tree.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.NumNodes">
<summary>
The current number of nodes in the tree. This doesn't include the number of leaves. For example, a tree,
node0
/ \
node1 leaf3
/ \
leaf1 leaf2
<see cref="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.NumNodes"/> and <see cref="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.NumLeaves"/> should be 2 and 3, respectively.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.MaxNumLeaves">
<summary>
The maximum number of leaves the internal structure of this tree can support.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.MaxNumNodes">
<summary>
The maximum number of nodes this tree can support.
</summary>
</member>
<member name="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.Weight">
<summary>
Weight of this tree in an <see cref="T:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble"/>.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.GetLteChildForNode(System.Int32)">
<summary>
Retrieve the less-than-threshold child node of the specified parent node.
</summary>
<param name="node">A 0-based index to specify a parent node. This value should be smaller than <see cref="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.NumNodes"/>.</param>
<returns>A child node's index of the specified node.</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.LeafValue(System.Int32)">
<summary>
Return the prediction value learned at the specified leaf node.
</summary>
<param name="leaf">A 0-based index to specify a leaf node. This value should be smaller than <see cref="P:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.NumLeaves"/>.</param>
<returns>The value associated with the specified leaf</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.ScaleOutputsBy(System.Double)">
<summary>
Scales all of the output values at the leaves of the tree by a given scalar
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.GetOutput(Microsoft.ML.Trainers.FastTree.Dataset.RowForwardIndexer.Row)">
<summary>
Evaluates the regression tree on a given document.
</summary>
<param name="featureBins"></param>
<returns>the real-valued regression tree output</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.GetOutput(System.Int32[])">
<summary>
evaluates the regression tree on a given binnedinstance.
</summary>
<param name="binnedInstance">A previously binned instance/document</param>
<returns>the real-valued regression tree output</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.GetLeafFrom(Microsoft.ML.Data.VBuffer{System.Single}@,System.Int32)">
<summary>
Returns leaf index the instance falls into, if we start the search from the <paramref name="root"/> node.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.GetLeaf(Microsoft.ML.Data.VBuffer{System.Single}@,System.Collections.Generic.List{System.Int32}@)">
<summary>
Returns the leaf node for the given feature vector, and populates 'path' with the list of internal nodes in the
path from the root to that leaf. If 'path' is null a new list is initialized. All elements in 'path' are cleared
before filling in the current path nodes.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.GetOutputs(Microsoft.ML.Trainers.FastTree.Dataset)">
<summary>
returns the hypothesis output for an entire dataset
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.Split(System.Int32,System.Int32,System.Int32[],System.Int32[],System.Boolean,System.UInt32,System.Double,System.Double,System.Double,System.Double)">
<summary>
Turns a leaf of the tree into an interior node with two leaf-children.
</summary>
<param name="leaf">The index of the leaf to split.</param>
<param name="feature">The index of the feature used to split this leaf (as
it indexes the array of DerivedFeature instances passed to the to tree ensemble format).</param>
<param name="categoricalSplitFeatures">Thresholds for categorical split.</param>
<param name="categoricalSplitRange"></param>
<param name="categoricalSplit"></param>
<param name="threshold">The </param>
<param name="lteValue">The value of the leaf on the LTE side.</param>
<param name="gtValue">The value of the leaf on the GT side.</param>
<param name="gain">The splitgain of this split. This does not
affect the logic of the tree evaluation.</param>
<param name="gainPValue">The p-value associated with this split,
indicating confidence that this is a better than random split.
This does not affect the logic of the tree evaluation.</param>
<returns>Returns the node index</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.ToTreeEnsembleFormat(System.Text.StringBuilder,System.Text.StringBuilder,Microsoft.ML.Trainers.FastTree.FeaturesToContentMap,System.Int32@,System.Collections.Generic.Dictionary{System.Int32,System.Int32})">
<summary>
Returns a representation of the tree in the production
decision tree format (SHAREDDYNAMICRANKROOT\TreeEnsembleRanker\Tree.h).
The intent is that this
</summary>
<param name="sbEvaluator">Append the new evaluator to this stringbuilder.</param>
<param name="sbInput">Append any hitherto unused [Input:#] sections
to this stringbuilder.</param>
<param name="featureContents">The feature to content map.</param>
<param name="evaluatorCounter">A running count of evaluators. When
this method returns it should have one more entry.</param>
<param name="featureToId">A map of feature index (in the features array)
to the ID as it will be written in the file. This instance should be
used for all </param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.OptimizePathToLeaf(System.Int32)">
<summary>
Sets the path to a leaf to be indexed by 0,1,2,3,... and sets the leaf index to 0
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalRegressionTree.SwapNodePositions(System.Int32,System.Int32)">
<summary>
swaps the positions of two nodes in the tree, without any functional change to the tree
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble._firstInputInitializationContent">
<summary>
String appended to the text representation of <see cref="T:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble"/>. This is mainly used in <see cref="M:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble.ToTreeEnsembleIni(Microsoft.ML.Trainers.FastTree.FeaturesToContentMap,System.String,System.Boolean,System.Boolean)"/>.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble.PopulateRawThresholds(Microsoft.ML.Trainers.FastTree.Dataset)">
<summary>
Converts the bin based thresholds to the raw real-valued thresholds.
To be called after training the ensemble.
</summary>
<param name="dataset">The dataset from which to get the bin upper bounds per feature</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble.RemapFeatures(System.Int32[])">
<summary>
Remaps the features, to a new feature space. Is called in the event that the features
in the training <see cref="T:Microsoft.ML.Trainers.FastTree.Dataset"/> structure are different from the ones in the
original pipeline (possibly due to trivialization of input features), and so need to
be remapped back to the original space. Note that the tree once modified in this way
will no longer have features pointing to the original training <see cref="T:Microsoft.ML.Trainers.FastTree.Dataset"/>,
so this should be called only after <see cref="M:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble.PopulateRawThresholds(Microsoft.ML.Trainers.FastTree.Dataset)"/> is called.
</summary>
<param name="oldToNewFeatures">The mapping from old original features, into the new features</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble.ToTreeEnsembleIni(Microsoft.ML.Trainers.FastTree.FeaturesToContentMap,System.String,System.Boolean,System.Boolean)">
<summary>
returns the ensemble in the production TreeEnsemble format
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble.GetFeatureContributions(Microsoft.ML.Data.VBuffer{System.Single}@,Microsoft.ML.Data.VBuffer{System.Single}@,Microsoft.ML.Data.BufferBuilder{System.Single}@)">
<summary>
Returns a vector of feature contributions for a given example.
<paramref name="builder"/> is used as a buffer to accumulate the contributions across trees.
If <paramref name="builder"/> is null, it will be created, otherwise it will be reused.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.InternalTreeEnsemble.GetMaxFeatureIndex">
<summary>
Retrieve the max feature index used across all node's split functions.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.FeaturesToContentMap">
<summary>
A class that given either a <see cref="T:Microsoft.ML.Data.RoleMappedSchema"/>
provides a mechanism for getting the corresponding input INI content for the features.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FeaturesToContentMap.#ctor(Microsoft.ML.Data.RoleMappedSchema)">
<summary>
Maps input features names to their input INI content based on the metadata of the
features column. If the <c>IniContent</c> slotwise string metadata is present, that
is used, or else default content is derived from the slot names.
</summary>
<seealso cref="F:Microsoft.ML.Data.AnnotationUtils.Kinds.SlotNames"/>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Algorithms.FindFirstGE``1(``0[],``0)">
<summary>
Returns the index of the first array position that is larger than or equal to val
</summary>
<typeparam name="T">an IComparable type</typeparam>
<param name="array">a sorted array of values</param>
<param name="val">the value to search for</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Algorithms.FindLastLE``1(``0[],``0)">
<summary>
Returns the index of the last array position that is less than or equal to val
</summary>
<typeparam name="T">an IComparable type</typeparam>
<param name="array">a sorted array of values</param>
<param name="val">the value to search for</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Algorithms.TopK``1(``0[],System.Int32,System.Int32,``0[],System.Int32[])">
<summary>
Finds the largest k entries in an array (with offset and length)
</summary>
<typeparam name="T">An IComparible type</typeparam>
<param name="array">The array being searched</param>
<param name="offset">An offset into the array</param>
<param name="length">The length of the search</param>
<param name="topK">The values of the top K</param>
<param name="topKPositions">The positions of the top K</param>
<returns>The number of entries set in topK and topKPositions (length could be less than K)</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Algorithms.Min``1(``0[],System.Int32@)">
<summary>
Fidns the minimum and the argmin in an array of values
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Algorithms.MergeSortedUniqued``1(``0[][])">
<summary>
Takes an arbitrary array of sorted uniqued IComparables and returns a sorted uniqued merge
</summary>
<typeparam name="T">An IComparable </typeparam>
<param name="arrays">An array of sorted uniqued arrays</param>
<returns>A sorted and uniqued merge</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.BlockingThreadPool">
<summary>
This class wraps the standard .NET ThreadPool and adds the following functionality:
1) the user explicitly defines a maximum of concurrently running threads
2) if the maximum is k, and k work items are already running, a call to RunOrBlock will block until a
thread is available
3) a work item can be any function with 6 or less arguments
4) a work item knows the index of the thread it is running on - this can be used if the threads share \
common resources
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BlockingThreadPool.Initialize(System.Int32)">
<summary>
constructor
</summary>
<param name="numThreads">the maximal number of concurrent threads</param>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.BufferPoolManager">
<summary>
This class enables basic buffer pooling.
It supports different types of buffers and returns buffers of the requested size or larger.
This class was implemented to reduce frequent allocation/deallocation of large buffers which caused fragmentation of the large object heap.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BufferPoolManager.MinBufferSizeInBytes">
<summary>
The minimum size in bytes for a buffer to be stored in the pool
This is the minimum size in bytes for an object to be stored in the large object heap
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.FastTree.BufferPoolManager._bufferPools">
<summary>
A dictionary containing all buffer pool types
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BufferPoolManager.TakeBuffer``1(System.Int32)">
<summary>
Gets a buffer from the pool with at least the same size as passed as input parameter
</summary>
<typeparam name="T">Pool type</typeparam>
<param name="size">Minimum size required</param>
<returns>The buffer requested</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BufferPoolManager.ReturnBuffer``1(``0[]@)">
<summary>
Returns a buffer back to the pool.
It only keeps buffers bigger than MaxBufferSizeInBytes = 85K bytes
</summary>
<param name="buffer">The buffer array to add to the pool of buffers</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BufferPoolManager.ReleaseAllAvailableBuffers(System.Type)">
<summary>
Releases all available buffers in a specific pool
</summary>
<param name="type">Buffer pool type</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BufferPoolManager.ReleaseAllAvailableBuffers">
<summary>
Releases all available buffers in all pools
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BufferPoolManager.InitializeBufferPool``1">
<summary>
Initializes a new buffer pool of a specific type
</summary>
<typeparam name="T">Type of buffer to initialize</typeparam>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.BufferPoolManager.InitializeBufferPool(System.Type)">
<summary>
Initializes a new buffer pool of a specific type
</summary>
<param name="type">Type of buffer to initialize</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.FastTreeIniFileUtils.AddCalibrationToIni(Microsoft.ML.IHost,System.String,Microsoft.ML.Calibrators.ICalibrator)">
<summary>
Get the calibration summary in INI format
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.LinqExtensions.CumulativeSum``1(System.Collections.Generic.IEnumerable{System.Int32})">
<summary>
RunningLength. Converts sequence like 1, 2, 3, 4
to 1, 3, 6, 10
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.MappedObjectPool`1">
<summary>
Implements a paging mechanism on indexed objects.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.MappedObjectPool`1.#ctor(`0[],System.Int32)">
<summary>
Initializes a new instance of the <see cref="T:Microsoft.ML.Trainers.FastTree.MappedObjectPool`1"/> class.
</summary>
<param name="pool">A pool of objects on top of which the paging mechanism is built</param>
<param name="maxIndex">The maximal index</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.MappedObjectPool`1.Get(System.Int32,`0@)">
<summary>
If the given index maps to a cached object, that object is retrieved and the return value is true.
If the index is not cached, an object from the pool is retrieved (possibly paging-out the least-recently used) and the return value is false.
</summary>
<param name="index">The requested index</param>
<param name="obj">The retrieved object</param>
<returns>true if the index was found, false if a new object was assigned from the pool</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.MappedObjectPool`1.Reset">
<summary>
Resets the MappedObjectPool
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.PseudorandomFunction">
<summary>
This class defines a psuedorandom function, mapping a number to
a hard to predict but deterministic other number, through some
nefarious means.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.StreamExtensions.UndisposableStream">
<summary>
A stream class that suppresses the dispose signal
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.StreamExtensions.ReadCompressed(System.IO.Stream)">
<summary>
Reads a compressed array of byte from the stream (written by WriteCompressed)
</summary>
<param name="stream">The stream to read from</param>
<returns>The decompressed bytes</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.StreamExtensions.WriteCompressed(System.IO.Stream,System.Byte[],System.Int32,System.Int32)">
<summary>
Writes an array of bytes to the stream with compression
</summary>
<param name="stream">Stream to write to</param>
<param name="array">Array to write</param>
<param name="offset">The byte offset into the array to write</param>
<param name="count">The number of bytes from the array to write</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.StreamExtensions.WriteCompressed(System.IO.Stream,System.Byte[])">
<summary>
Writes an array of bytes to the stream with compression
</summary>
<param name="stream">Stream to write to</param>
<param name="array">Array to write</param>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.ThreadTaskManager.MakeTask(System.Collections.Generic.IEnumerable{System.Action})">
<summary>
Makes a new task using the subtasks
</summary>
<param name="subTasks">subtasks composing the task</param>
<returns>An IThreadTask to run the tasks</returns>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.ThreadTaskManager.MakeTask(System.Action{System.Int32},System.Int32)">
<summary>
Makes a new task from the supplied action that takes an integer argument, from 0...max
</summary>
<param name="subTaskAction">Action to run</param>
<param name="maxArgument">The max range of the argument</param>
<returns>A task that runs the action using each value of the argument from 0...max</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.IThreadTask">
<summary>
Interface for a decomposable task that runs on many threads
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Timer">
<summary>
Static class for timing events.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Timer.TimerState.ToString">
<summary>
Gets a string summary of the total times.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Timer.GetTicks(Microsoft.ML.Trainers.FastTree.TimerEvent)">
<summary>
Returns the total number of CPU ticks spent in the specified timer so far.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Timer.Time(Microsoft.ML.Trainers.FastTree.TimerEvent)">
<summary>
Creates a timed event which, when disposed, adds to the total time of that event type.
</summary>
<param name="e">The type of event</param>
<returns>A timed event</returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.Timer.TimedEvent">
<summary>
An object which, when disposed, adds to the total time of that event type.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.FastTree.Timer.GetString">
<summary>
Gets a string summary of the total times.
</summary>
<returns></returns>
</member>
<member name="T:Microsoft.ML.Trainers.FastTree.ToByteArrayExtensions">
<summary>
This class contains extension methods that support binary serialization of some base C# types
and arrays of these types.
SizeInBytes - the number of bytes in the binary representation
type.ToByteArray(buffer, ref position) - will write the binary representation of the type to
the byte buffer at the given position, and will increment the position to the end of
the representation
byte[].ToXXX(ref position) - converts the binary representation back into the original type
</summary>
</member>
<member name="T:Microsoft.ML.Data.TreeEnsembleFeaturizerBindableMapper">
<summary>
A bindable mapper wrapper for tree ensembles, that creates a bound mapper with three outputs:
1. A vector containing the individual tree outputs of the tree ensemble.
2. An indicator vector for the leaves that the feature vector falls on in the tree ensemble.
3. An indicator vector for the internal nodes on the paths that the feature vector falls on in the tree ensemble.
</summary>
</member>
<member name="F:Microsoft.ML.Data.TreeEnsembleFeaturizerBindableMapper.BoundMapper.TreeValuesColumnId">
<summary>
Column index of values predicted by all trees in an ensemble in <see cref="P:Microsoft.ML.Data.TreeEnsembleFeaturizerBindableMapper.BoundMapper.OutputSchema"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Data.TreeEnsembleFeaturizerBindableMapper.BoundMapper.LeafIdsColumnId">
<summary>
Column index of leaf IDs containing the considered example in <see cref="P:Microsoft.ML.Data.TreeEnsembleFeaturizerBindableMapper.BoundMapper.OutputSchema"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Data.TreeEnsembleFeaturizerBindableMapper.BoundMapper.PathIdsColumnId">
<summary>
Column index of path IDs which specify the paths the considered example passing through per tree in <see cref="P:Microsoft.ML.Data.TreeEnsembleFeaturizerBindableMapper.BoundMapper.OutputSchema"/>.
</summary>
</member>
<member name="M:Microsoft.ML.Data.TreeEnsembleFeaturizerBindableMapper.BoundMapper.GetDependenciesForNewColumns(System.Collections.Generic.IEnumerable{Microsoft.Data.DataView.DataViewSchema.Column})">
<summary>
Given a set of columns, return the input columns that are needed to generate those output columns.
</summary>
</member>
<member name="T:Microsoft.ML.Data.TreeEnsembleFeaturizerTransform">
<member name="TreeEnsembleFeaturizerTransform">
<summary>
Trains a tree ensemble, or loads it from a file, then maps a numeric feature vector to outputs.
</summary>
<remarks>
In machine learning it is a pretty common and powerful approach to utilize the already trained model in the process of defining features.
<para>One such example would be the use of model's scores as features to downstream models. For example, we might run clustering on the original features,
and use the cluster distances as the new feature set.
Instead of consuming the model's output, we could go deeper, and extract the 'intermediate outputs' that are used to produce the final score. </para>
There are a number of famous or popular examples of this technique:
<list type="bullet">
<item><description>A deep neural net trained on the ImageNet dataset, with the last layer removed, is commonly used to compute the 'projection' of the image into the 'semantic feature space'.
It is observed that the Euclidean distance in this space often correlates with the 'semantic similarity': that is, all pictures of pizza are located close together,
and far away from pictures of kittens. </description></item>
<item><description>A matrix factorization and/or LDA model is also often used to extract the 'latent topics' or 'latent features' associated with users and items.</description></item>
<item><description>The weights of the linear model are often used as a crude indicator of 'feature importance'. At the very minimum, the 0-weight features are not needed by the model,
and there's no reason to compute them. </description></item>
</list>
<para>
Tree featurizer uses the decision tree ensembles for feature engineering in the same fashion as above.
It trains a tree ensemble, or loads it from a file, then maps a numeric feature vector to three outputs:
</para>
<list type="number">
<item><description>A vector containing the individual tree outputs of the tree ensemble.</description></item>
<item><description>A vector indicating the leaves that the feature vector falls on in the tree ensemble.</description></item>
<item><description>A vector indicating the paths that the feature vector falls on in the tree ensemble.</description></item>
</list>
If a both a model file and a trainer are specified - will use the model file. If neither are specified,
will train a default FastTree model.
This can handle key labels by training a regression model towards their optionally permuted indices.
<para>Let's assume that we've built a tree ensemble of 100 trees with 100 leaves each (it doesn't matter whether boosting was used or not in training).
If we associate each leaf of each tree with a sequential integer, we can, for every incoming example x,
produce an indicator vector L(x), where Li(x) = 1 if the example x 'falls' into the leaf #i, and 0 otherwise.</para>
<para>Thus, for every example x, we produce a 10000-valued vector L, with exactly 100 1s and the rest zeroes.
This 'leaf indicator' vector can be considered the ensemble-induced 'footprint' of the example.</para>
<para>The 'distance' between two examples in the L-space is actually a Hamming distance, and is equal to the number of trees that do not distinguish the two examples.</para>
<para>We could repeat the same thought process for the non-leaf, or internal, nodes of the trees (we know that each tree has exactly 99 of them in our 100-leaf example),
and produce another indicator vector, N (size 9900), for each example, indicating the 'trajectory' of each example through each of the trees.</para>
<para>The distance in the combined 19900-dimensional LN-space will be equal to the number of 'decisions' in all trees that 'agree' on the given pair of examples.</para>
<para>The TreeLeafFeaturizer is also producing the third vector, T, which is defined as Ti(x) = output of tree #i on example x.</para>
</remarks>
<example>
<code language="csharp">
pipeline.Add(new TreeLeafFeaturizer())
</code>
</example>
</member>
</member>
<member name="T:Microsoft.ML.Data.TreeEnsembleFeaturizerTransform.ArgumentsForEntryPoint">
<summary>
REVIEW: Ideally we should have only one arguments class by using IComponentFactory for the model.
For now it probably warrants a REVIEW comment here in case we'd like to merge these two arguments in the future.
Also, it might be worthwhile to extract the common arguments to a base class.
</summary>
</member>
<member name="T:Microsoft.ML.TreeExtensions">
<summary>
Tree <see cref="T:Microsoft.ML.TrainCatalogBase"/> extension methods.
</summary>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastTree(Microsoft.ML.RegressionCatalog.RegressionTrainers,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Double)">
<summary>
Predict a target using a decision tree regression model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RegressionCatalog"/>.</param>
<param name="labelColumnName">The name of the label column.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="numLeaves">The maximum number of leaves per decision tree.</param>
<param name="minDatapointsInLeaves">The minimal number of datapoints allowed in a leaf of a regression tree, out of the subsampled data.</param>
<param name="learningRate">The learning rate.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastTree(Microsoft.ML.RegressionCatalog.RegressionTrainers,Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer.Options)">
<summary>
Predict a target using a decision tree regression model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RegressionCatalog"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastTree(Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Double)">
<summary>
Predict a target using a decision tree binary classification model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.BinaryClassificationCatalog"/>.</param>
<param name="labelColumnName">The name of the label column.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="numLeaves">The maximum number of leaves per decision tree.</param>
<param name="minDatapointsInLeaves">The minimal number of datapoints allowed in a leaf of the tree, out of the subsampled data.</param>
<param name="learningRate">The learning rate.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastTree(Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers,Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer.Options)">
<summary>
Predict a target using a decision tree binary classification model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.BinaryClassificationCatalog"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastTree(Microsoft.ML.RankingCatalog.RankingTrainers,System.String,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Double)">
<summary>
Ranks a series of inputs based on their relevance, training a decision tree ranking model through the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RankingCatalog"/>.</param>
<param name="labelColumnName">The name of the label column.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="rowGroupColumnName">The name of the group column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="numLeaves">The maximum number of leaves per decision tree.</param>
<param name="minDatapointsInLeaves">The minimal number of datapoints allowed in a leaf of the tree, out of the subsampled data.</param>
<param name="learningRate">The learning rate.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastTree(Microsoft.ML.RankingCatalog.RankingTrainers,Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer.Options)">
<summary>
Ranks a series of inputs based on their relevance, training a decision tree ranking model through the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RankingCatalog"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.GeneralizedAdditiveModels(Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers,System.String,System.String,System.String,System.Int32,System.Double,System.Int32)">
<summary>
Predict a target using generalized additive models trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.BinaryClassificationCatalog"/>.</param>
<param name="labelColumnName">The name of the label column.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="numIterations">The number of iterations to use in learning the features.</param>
<param name="learningRate">The learning rate. GAMs work best with a small learning rate.</param>
<param name="maxBins">The maximum number of bins to use to approximate features.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.GeneralizedAdditiveModels(Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers,Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer.Options)">
<summary>
Predict a target using generalized additive models trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.BinaryClassificationGamTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.BinaryClassificationCatalog"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.GeneralizedAdditiveModels(Microsoft.ML.RegressionCatalog.RegressionTrainers,System.String,System.String,System.String,System.Int32,System.Double,System.Int32)">
<summary>
Predict a target using generalized additive models trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.RegressionGamTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RegressionCatalog"/>.</param>
<param name="labelColumnName">The name of the label column.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="numIterations">The number of iterations to use in learning the features.</param>
<param name="learningRate">The learning rate. GAMs work best with a small learning rate.</param>
<param name="maxBins">The maximum number of bins to use to approximate features.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.GeneralizedAdditiveModels(Microsoft.ML.RegressionCatalog.RegressionTrainers,Microsoft.ML.Trainers.FastTree.RegressionGamTrainer.Options)">
<summary>
Predict a target using generalized additive models trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.RegressionGamTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RegressionCatalog"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastTreeTweedie(Microsoft.ML.RegressionCatalog.RegressionTrainers,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32,System.Double)">
<summary>
Predict a target using a decision tree regression model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RegressionCatalog"/>.</param>
<param name="labelColumnName">The name of the label column.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="numLeaves">The maximum number of leaves per decision tree.</param>
<param name="minDatapointsInLeaves">The minimal number of datapoints allowed in a leaf of the tree, out of the subsampled data.</param>
<param name="learningRate">The learning rate.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastTreeTweedie(Microsoft.ML.RegressionCatalog.RegressionTrainers,Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer.Options)">
<summary>
Predict a target using a decision tree regression model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RegressionCatalog"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastForest(Microsoft.ML.RegressionCatalog.RegressionTrainers,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32)">
<summary>
Predict a target using a decision tree regression model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestRegression"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RegressionCatalog"/>.</param>
<param name="labelColumnName">The name of the label column.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="numLeaves">The maximum number of leaves per decision tree.</param>
<param name="minDatapointsInLeaves">The minimal number of datapoints allowed in a leaf of the tree, out of the subsampled data.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastForest(Microsoft.ML.RegressionCatalog.RegressionTrainers,Microsoft.ML.Trainers.FastTree.FastForestRegression.Options)">
<summary>
Predict a target using a decision tree regression model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestRegression"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.RegressionCatalog"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastForest(Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers,System.String,System.String,System.String,System.Int32,System.Int32,System.Int32)">
<summary>
Predict a target using a decision tree regression model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestClassification"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.BinaryClassificationCatalog"/>.</param>
<param name="labelColumnName">The name of the label column.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="numTrees">Total number of decision trees to create in the ensemble.</param>
<param name="numLeaves">The maximum number of leaves per decision tree.</param>
<param name="minDatapointsInLeaves">The minimal number of datapoints allowed in a leaf of the tree, out of the subsampled data.</param>
</member>
<member name="M:Microsoft.ML.TreeExtensions.FastForest(Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers,Microsoft.ML.Trainers.FastTree.FastForestClassification.Options)">
<summary>
Predict a target using a decision tree regression model trained with the <see cref="T:Microsoft.ML.Trainers.FastTree.FastForestClassification"/>.
</summary>
<param name="catalog">The <see cref="T:Microsoft.ML.BinaryClassificationCatalog"/>.</param>
<param name="options">Algorithm advanced settings.</param>
</member>
</members>
</doc>