diff options
author | Patrick Simianer <p@simianer.de> | 2012-02-21 09:34:59 +0100 |
---|---|---|
committer | Patrick Simianer <p@simianer.de> | 2012-02-21 09:34:59 +0100 |
commit | 00e37f970923b54904c8d1abc72ff738968bf66d (patch) | |
tree | ae040f93fc80f1e9bac89f99fd0fbb6b3dcc9e1a /dtrain | |
parent | 564b14a16c798c82609f4bca1092c1a84614b6d3 (diff) |
fixed output, removed obsolete files
Diffstat (limited to 'dtrain')
-rwxr-xr-x | dtrain/binning.rb | 35 | ||||
-rw-r--r-- | dtrain/countofcounts | 3894 | ||||
-rw-r--r-- | dtrain/dtrain.cc | 9 | ||||
-rw-r--r-- | dtrain/dtrain.h | 2 | ||||
-rw-r--r-- | dtrain/test/mira_update/Hildreth.cpp | 187 | ||||
-rw-r--r-- | dtrain/test/mira_update/Hildreth.h | 10 | ||||
-rw-r--r-- | dtrain/test/mira_update/dtrain.cc | 532 | ||||
-rw-r--r-- | dtrain/test/mira_update/sample.h | 101 |
8 files changed, 838 insertions, 3932 deletions
diff --git a/dtrain/binning.rb b/dtrain/binning.rb deleted file mode 100755 index 674f3246..00000000 --- a/dtrain/binning.rb +++ /dev/null @@ -1,35 +0,0 @@ -bins = [] -bin_sz = 0 -1.upto(3).each { |i| - bin_sz = STDIN.gets.strip.split(",")[1].to_i - bins.push [[i], bin_sz] -} - -cur_bin = [] -cur_bin_sz = 0 -while line = STDIN.gets - count, countcount = line.strip.split "," - count = count.to_i - countcount = countcount.to_i - if (cur_bin_sz + countcount) > bin_sz - bins.push [cur_bin, cur_bin_sz] - cur_bin = [] - cur_bin_sz = countcount - else - cur_bin.push count - cur_bin_sz += countcount - end -end -bins.push [cur_bin, cur_bin_sz] - -c = 0 -e = 0 -bins.each { |i| - puts "#{e} | #{i[0].size}: #{i[0][0]}.. #{i[1]}" if i[0].size > 0 - c += 1 if i[0].size > 0 - e += 1 -} -puts "#{c} bins (#{bins.size})" -puts "bin sz #{bin_sz}" - - diff --git a/dtrain/countofcounts b/dtrain/countofcounts deleted file mode 100644 index 8752b430..00000000 --- a/dtrain/countofcounts +++ /dev/null @@ -1,3894 +0,0 @@ -1,3109341 -2,1653564 -3,1047846 -4,746546 -5,576342 -6,462409 -7,382522 -8,326122 -9,280761 -10,248005 -11,219883 -12,195357 -13,177502 -14,162512 -15,146810 -16,135532 -17,127895 -18,116317 -19,109424 -20,101733 -21,96171 -22,90290 -23,86513 -24,80554 -25,76357 -26,71542 -27,66948 -28,63351 -29,62493 -30,59451 -31,56776 -32,54647 -33,52288 -34,50734 -35,48864 -36,47018 -37,43835 -38,41643 -39,40810 -40,39881 -41,38763 -42,36919 -43,36048 -44,36420 -45,34982 -46,32632 -47,32232 -48,30701 -49,31100 -50,29307 -51,28854 -52,28667 -53,27831 -54,26538 -55,24617 -56,24209 -57,24913 -58,24453 -59,23128 -60,23117 -61,20645 -62,21411 -63,21381 -64,19852 -65,20106 -66,19860 -67,19871 -68,18737 -69,19420 -70,18356 -71,16948 -72,17434 -73,17103 -74,16566 -75,16785 -76,16381 -77,17743 -78,15293 -79,15092 -80,15462 -81,15281 -82,14881 -83,14414 -84,13980 -85,13567 -86,14289 -87,13362 -88,13180 -89,13000 -90,13411 -91,13127 -92,12604 -93,12891 -94,12322 -95,11551 -96,11498 -97,10663 -98,11241 -99,11067 -100,12183 -101,10934 -102,10123 -103,10244 -104,10706 -105,10779 -106,10372 -107,10154 -108,9795 -109,9411 -110,9266 -111,8978 -112,9633 -113,9300 -114,8855 -115,9192 -116,9715 -117,9074 -118,8455 -119,9198 -120,8952 -121,8638 -122,9264 -123,8575 -124,8187 -125,8075 -126,7949 -127,7346 -128,8175 -129,8196 -130,7796 -131,7756 -132,7383 -133,7100 -134,7715 -135,7240 -136,8133 -137,7135 -138,7245 -139,6860 -140,7233 -141,7376 -142,6640 -143,5938 -144,6692 -145,6874 -146,5889 -147,5949 -148,5848 -149,7053 -150,6370 -151,6640 -152,5809 -153,5702 -154,6014 -155,5977 -156,5184 -157,5249 -158,5074 -159,5432 -160,5782 -161,5617 -162,5340 -163,5363 -164,5155 -165,4635 -166,4715 -167,5498 -168,5658 -169,4627 -170,5140 -171,5053 -172,4757 -173,5054 -174,4944 -175,5252 -176,4586 -177,5554 -178,5457 -179,4911 -180,5263 -181,4955 -182,5444 -183,4985 -184,5050 -185,4516 -186,4821 -187,4681 -188,4158 -189,4683 -190,4650 -191,4337 -192,3807 -193,4102 -194,4456 -195,4219 -196,4023 -197,3695 -198,4083 -199,4158 -200,4099 -201,4052 -202,4157 -203,3765 -204,3919 -205,3631 -206,3723 -207,3566 -208,3624 -209,3897 -210,3903 -211,4648 -212,3917 -213,3740 -214,3535 -215,2930 -216,3875 -217,4038 -218,3947 -219,3427 -220,3498 -221,3301 -222,3785 -223,3542 -224,3390 -225,3314 -226,3267 -227,3237 -228,3601 -229,3491 -230,3147 -231,3205 -232,3151 -233,3368 -234,3571 -235,2929 -236,2667 -237,3196 -238,3314 -239,3579 -240,3285 -241,3487 -242,3142 -243,3007 -244,2771 -245,2751 -246,2899 -247,2995 -248,3254 -249,2795 -250,2270 -251,2898 -252,3488 -253,2561 -254,2419 -255,2513 -256,2812 -257,2659 -258,2465 -259,3274 -260,2857 -261,2723 -262,2618 -263,2618 -264,2591 -265,2181 -266,2551 -267,2729 -268,3529 -269,3620 -270,2945 -271,2325 -272,2231 -273,2812 -274,2232 -275,3137 -276,2480 -277,2465 -278,2694 -279,3016 -280,2809 -281,2696 -282,2729 -283,2442 -284,2272 -285,2280 -286,1842 -287,2062 -288,2071 -289,2360 -290,2377 -291,1924 -292,2391 -293,2182 -294,1988 -295,1979 -296,1820 -297,2183 -298,1880 -299,2577 -300,2485 -301,2201 -302,1977 -303,1686 -304,2559 -305,2448 -306,2324 -307,2460 -308,2407 -309,2923 -310,2093 -311,1927 -312,1923 -313,2375 -314,1962 -315,1995 -316,1601 -317,2013 -318,2318 -319,1835 -320,1641 -321,1571 -322,1814 -323,1960 -324,1964 -325,1804 -326,1922 -327,2053 -328,1899 -329,1808 -330,1900 -331,1403 -332,1549 -333,1992 -334,1946 -335,1903 -336,1763 -337,2074 -338,1821 -339,1476 -340,1911 -341,2141 -342,1822 -343,1233 -344,1638 -345,1865 -346,2185 -347,2068 -348,1784 -349,2124 -350,2047 -351,1768 -352,2022 -353,1902 -354,1813 -355,1766 -356,1309 -357,1628 -358,1667 -359,1797 -360,1426 -361,1929 -362,2089 -363,1991 -364,1959 -365,2130 -366,1494 -367,1261 -368,1219 -369,1621 -370,1352 -371,1712 -372,1603 -373,1581 -374,1624 -375,1914 -376,1629 -377,1419 -378,947 -379,1014 -380,1403 -381,1234 -382,1181 -383,1378 -384,1549 -385,1221 -386,1359 -387,1511 -388,1316 -389,1068 -390,1514 -391,1309 -392,1561 -393,1519 -394,992 -395,1122 -396,1291 -397,1070 -398,1430 -399,1869 -400,1927 -401,1610 -402,1481 -403,960 -404,1501 -405,1259 -406,1117 -407,1127 -408,1075 -409,1231 -410,974 -411,1018 -412,1234 -413,1557 -414,1442 -415,1623 -416,1080 -417,1747 -418,1592 -419,987 -420,850 -421,1212 -422,1407 -423,1123 -424,1230 -425,1448 -426,1637 -427,1226 -428,1147 -429,1154 -430,971 -431,858 -432,797 -433,1212 -434,1267 -435,1110 -436,1184 -437,1365 -438,1112 -439,715 -440,1090 -441,957 -442,1141 -443,1093 -444,1048 -445,1004 -446,876 -447,1041 -448,1209 -449,1118 -450,1039 -451,1223 -452,1064 -453,1368 -454,1164 -455,1567 -456,974 -457,920 -458,601 -459,637 -460,983 -461,1261 -462,921 -463,845 -464,1059 -465,1000 -466,972 -467,970 -468,1078 -469,1021 -470,1011 -471,1195 -472,1144 -473,1130 -474,1345 -475,1101 -476,1166 -477,1110 -478,926 -479,743 -480,679 -481,619 -482,839 -483,703 -484,773 -485,801 -486,1432 -487,1166 -488,1015 -489,635 -490,1039 -491,1005 -492,665 -493,1046 -494,873 -495,864 -496,888 -497,830 -498,981 -499,1152 -500,775 -501,844 -502,916 -503,671 -504,769 -505,883 -506,1019 -507,804 -508,682 -509,592 -510,684 -511,765 -512,801 -513,863 -514,701 -515,671 -516,781 -517,662 -518,671 -519,478 -520,709 -521,594 -522,686 -523,853 -524,714 -525,821 -526,637 -527,582 -528,617 -529,824 -530,921 -531,794 -532,817 -533,832 -534,699 -535,894 -536,611 -537,413 -538,336 -539,439 -540,689 -541,464 -542,545 -543,778 -544,711 -545,838 -546,798 -547,1023 -548,767 -549,696 -550,676 -551,634 -552,493 -553,675 -554,453 -555,758 -556,528 -557,725 -558,474 -559,854 -560,688 -561,802 -562,781 -563,753 -564,525 -565,452 -566,483 -567,741 -568,706 -569,693 -570,651 -571,637 -572,516 -573,260 -574,161 -575,907 -576,790 -577,645 -578,502 -579,547 -580,799 -581,613 -582,340 -583,542 -584,904 -585,629 -586,581 -587,491 -588,943 -589,640 -590,414 -591,818 -592,792 -593,623 -594,593 -595,653 -596,414 -597,462 -598,586 -599,661 -600,520 -601,666 -602,646 -603,554 -604,500 -605,315 -606,435 -607,479 -608,498 -609,564 -610,605 -611,577 -612,424 -613,544 -614,492 -615,365 -616,529 -617,826 -618,510 -619,591 -620,522 -621,552 -622,743 -623,580 -624,643 -625,820 -626,362 -627,352 -628,613 -629,882 -630,644 -631,900 -632,765 -633,487 -634,437 -635,623 -636,574 -637,505 -638,789 -639,489 -640,495 -641,558 -642,730 -643,521 -644,311 -645,625 -646,372 -647,616 -648,510 -649,497 -650,491 -651,717 -652,420 -653,401 -654,506 -655,450 -656,471 -657,240 -658,422 -659,480 -660,413 -661,807 -662,617 -663,416 -664,301 -665,208 -666,293 -667,442 -668,611 -669,360 -670,316 -671,458 -672,521 -673,813 -674,546 -675,254 -676,323 -677,303 -678,445 -679,481 -680,478 -681,318 -682,333 -683,457 -684,343 -685,356 -686,225 -687,344 -688,381 -689,549 -690,272 -691,360 -692,306 -693,347 -694,532 -695,483 -696,262 -697,316 -698,417 -699,452 -700,644 -701,371 -702,175 -703,539 -704,253 -705,417 -706,318 -707,242 -708,393 -709,425 -710,372 -711,502 -712,431 -713,396 -714,211 -715,375 -716,469 -717,308 -718,310 -719,337 -720,380 -721,447 -722,291 -723,412 -724,267 -725,208 -726,277 -727,284 -728,613 -729,360 -730,335 -731,310 -732,198 -733,498 -734,282 -735,350 -736,283 -737,399 -738,358 -739,409 -740,413 -741,330 -742,326 -743,412 -744,361 -745,319 -746,154 -747,233 -748,361 -749,257 -750,390 -751,471 -752,257 -753,138 -754,208 -755,496 -756,494 -757,405 -758,426 -759,162 -760,385 -761,316 -762,350 -763,375 -764,422 -765,381 -766,382 -767,280 -768,261 -769,457 -770,503 -771,367 -772,211 -773,245 -774,171 -775,247 -776,301 -777,280 -778,208 -779,215 -780,166 -781,132 -782,239 -783,300 -784,278 -785,374 -786,196 -787,246 -788,292 -789,319 -790,155 -791,420 -792,281 -793,297 -794,142 -795,256 -796,412 -797,220 -798,326 -799,152 -800,89 -801,378 -802,240 -803,430 -804,151 -805,181 -806,83 -807,179 -808,261 -809,127 -810,227 -811,381 -812,282 -813,476 -814,324 -815,306 -816,298 -817,330 -818,499 -819,537 -820,351 -821,181 -822,172 -823,306 -824,353 -825,331 -826,226 -827,204 -828,180 -829,494 -830,475 -831,238 -832,416 -833,264 -834,212 -835,349 -836,311 -837,171 -838,284 -839,160 -840,286 -841,298 -842,115 -843,218 -844,220 -845,357 -846,188 -847,198 -848,356 -849,156 -850,161 -851,167 -852,131 -853,175 -854,364 -855,362 -856,180 -857,60 -858,107 -859,143 -860,355 -861,271 -862,210 -863,134 -864,162 -865,157 -866,191 -867,188 -868,174 -869,234 -870,108 -871,212 -872,579 -873,408 -874,210 -875,182 -876,248 -877,420 -878,668 -879,283 -880,414 -881,298 -882,353 -883,149 -884,253 -885,213 -886,264 -887,406 -888,303 -889,244 -890,245 -891,205 -892,169 -893,335 -894,134 -895,471 -896,320 -897,150 -898,194 -899,262 -900,198 -901,234 -902,272 -903,312 -904,205 -905,183 -906,220 -907,290 -908,249 -909,156 -910,107 -911,433 -912,196 -913,140 -914,220 -915,282 -916,249 -917,401 -918,344 -919,153 -920,264 -921,317 -922,299 -923,253 -924,212 -925,196 -926,248 -927,328 -928,190 -929,178 -930,91 -931,137 -932,254 -933,459 -934,323 -935,287 -936,204 -937,78 -938,158 -939,180 -940,257 -941,344 -942,303 -943,257 -944,103 -945,244 -946,165 -947,199 -948,137 -949,160 -950,160 -951,95 -952,142 -953,158 -954,182 -955,127 -956,223 -957,260 -958,247 -959,203 -960,246 -961,234 -962,161 -963,206 -964,225 -965,182 -966,201 -967,241 -968,84 -969,301 -970,414 -971,290 -972,267 -973,104 -974,28 -975,366 -976,300 -977,169 -978,107 -979,143 -980,214 -981,128 -982,293 -983,447 -984,298 -985,243 -986,173 -987,269 -988,187 -989,88 -990,141 -991,183 -992,152 -993,56 -994,57 -995,149 -996,251 -997,166 -998,132 -999,110 -1000,277 -1001,194 -1002,209 -1003,287 -1004,222 -1005,211 -1006,199 -1007,309 -1008,230 -1009,231 -1010,226 -1011,217 -1012,173 -1013,43 -1014,92 -1015,138 -1016,45 -1017,103 -1018,48 -1019,233 -1020,132 -1021,280 -1022,323 -1023,152 -1024,98 -1025,105 -1026,127 -1027,186 -1028,221 -1029,151 -1030,221 -1031,81 -1032,55 -1033,141 -1034,43 -1035,89 -1036,127 -1037,160 -1038,126 -1039,86 -1040,37 -1041,109 -1042,81 -1043,80 -1044,174 -1045,153 -1046,280 -1047,260 -1048,157 -1049,147 -1050,91 -1051,48 -1052,181 -1053,479 -1054,214 -1055,150 -1056,315 -1057,106 -1058,284 -1059,166 -1060,52 -1061,43 -1062,58 -1063,108 -1064,32 -1065,140 -1066,148 -1067,84 -1068,129 -1069,240 -1070,99 -1071,107 -1072,206 -1073,70 -1074,184 -1075,172 -1076,48 -1077,145 -1078,181 -1079,59 -1080,61 -1081,213 -1082,137 -1083,104 -1084,195 -1085,92 -1086,182 -1087,194 -1088,302 -1089,123 -1090,78 -1091,62 -1092,136 -1093,180 -1094,109 -1095,73 -1096,99 -1097,117 -1098,34 -1099,74 -1100,160 -1101,123 -1102,175 -1103,249 -1104,286 -1105,315 -1106,120 -1107,167 -1108,59 -1109,140 -1110,109 -1111,30 -1112,62 -1113,82 -1114,28 -1115,36 -1116,126 -1117,80 -1118,106 -1119,136 -1120,149 -1121,150 -1122,85 -1123,157 -1124,255 -1125,156 -1126,39 -1127,84 -1128,141 -1129,32 -1130,102 -1131,153 -1132,113 -1133,125 -1134,126 -1135,71 -1136,35 -1137,49 -1138,127 -1139,206 -1140,125 -1141,86 -1142,199 -1143,185 -1144,110 -1145,197 -1146,115 -1147,141 -1148,196 -1149,149 -1150,58 -1151,52 -1152,70 -1153,23 -1154,250 -1155,267 -1156,186 -1157,87 -1158,193 -1159,279 -1160,176 -1161,227 -1162,155 -1163,120 -1164,123 -1165,106 -1166,103 -1167,118 -1168,61 -1169,48 -1170,183 -1171,246 -1172,210 -1173,104 -1174,35 -1175,27 -1176,134 -1177,95 -1178,86 -1179,44 -1180,203 -1181,76 -1182,209 -1183,110 -1184,99 -1185,131 -1186,65 -1187,85 -1188,145 -1189,178 -1190,141 -1191,115 -1192,104 -1193,151 -1194,217 -1195,174 -1196,292 -1197,94 -1198,46 -1199,130 -1200,91 -1201,111 -1202,117 -1203,90 -1204,88 -1205,50 -1206,75 -1207,40 -1208,50 -1209,260 -1210,112 -1211,56 -1212,28 -1213,43 -1214,159 -1215,128 -1216,91 -1217,38 -1218,37 -1219,69 -1220,93 -1221,216 -1222,100 -1223,27 -1224,197 -1225,129 -1226,70 -1227,36 -1228,151 -1229,89 -1230,137 -1231,229 -1232,69 -1233,123 -1234,124 -1235,186 -1236,62 -1237,78 -1238,168 -1239,138 -1240,43 -1241,16 -1242,100 -1243,179 -1244,71 -1245,1 -1246,54 -1247,175 -1248,27 -1249,125 -1250,325 -1251,124 -1252,52 -1253,89 -1254,72 -1255,71 -1256,95 -1257,130 -1258,138 -1259,30 -1260,3 -1261,62 -1262,127 -1263,52 -1264,193 -1265,134 -1266,149 -1267,122 -1268,102 -1269,83 -1270,102 -1271,53 -1272,41 -1273,141 -1274,71 -1275,86 -1276,141 -1277,44 -1278,21 -1279,6 -1280,30 -1281,30 -1282,42 -1283,34 -1284,121 -1285,193 -1286,182 -1287,142 -1288,148 -1289,268 -1290,280 -1291,108 -1292,127 -1293,142 -1294,97 -1295,141 -1296,158 -1297,105 -1298,73 -1299,34 -1300,113 -1301,120 -1302,130 -1303,50 -1304,54 -1305,84 -1306,93 -1307,21 -1308,8 -1309,49 -1310,102 -1311,110 -1312,128 -1313,232 -1314,130 -1315,66 -1317,58 -1318,25 -1319,13 -1320,138 -1321,122 -1322,34 -1323,60 -1324,64 -1325,89 -1326,249 -1327,104 -1328,139 -1329,73 -1330,111 -1331,44 -1332,45 -1333,23 -1334,43 -1335,114 -1336,53 -1337,67 -1338,229 -1339,219 -1340,139 -1341,54 -1342,41 -1343,48 -1344,58 -1345,9 -1346,75 -1347,23 -1348,14 -1349,76 -1350,43 -1351,102 -1352,170 -1353,100 -1354,66 -1355,27 -1356,14 -1357,103 -1358,132 -1359,41 -1360,133 -1361,59 -1362,252 -1363,112 -1364,55 -1365,81 -1366,62 -1367,72 -1368,39 -1369,154 -1370,193 -1371,104 -1372,3 -1374,75 -1375,87 -1376,16 -1377,38 -1378,141 -1379,56 -1380,30 -1381,66 -1382,171 -1383,294 -1384,74 -1385,116 -1386,125 -1387,114 -1388,65 -1389,22 -1390,3 -1391,23 -1392,144 -1393,123 -1394,51 -1395,26 -1396,56 -1397,80 -1398,39 -1399,149 -1400,116 -1401,132 -1402,29 -1403,8 -1404,71 -1405,148 -1406,162 -1407,94 -1408,68 -1409,229 -1410,165 -1411,105 -1412,112 -1413,93 -1414,64 -1415,187 -1416,43 -1417,91 -1418,161 -1419,179 -1420,221 -1421,289 -1422,185 -1423,60 -1424,63 -1425,19 -1426,139 -1427,84 -1428,9 -1430,16 -1431,130 -1432,56 -1433,31 -1434,24 -1435,43 -1436,35 -1437,97 -1438,87 -1439,19 -1440,130 -1441,72 -1442,38 -1443,20 -1444,71 -1445,126 -1446,104 -1447,35 -1448,63 -1449,24 -1450,23 -1451,98 -1452,34 -1453,120 -1454,76 -1455,70 -1456,47 -1457,82 -1458,53 -1459,7 -1460,93 -1461,54 -1462,89 -1463,83 -1464,70 -1465,49 -1466,64 -1467,42 -1468,59 -1469,92 -1470,46 -1471,74 -1472,148 -1473,103 -1474,41 -1475,150 -1476,120 -1477,93 -1478,107 -1479,100 -1480,214 -1481,298 -1482,202 -1483,249 -1484,103 -1485,20 -1486,121 -1487,134 -1488,178 -1489,64 -1490,136 -1491,194 -1492,37 -1494,151 -1495,104 -1496,87 -1497,144 -1498,100 -1499,55 -1500,1 -1502,9 -1503,2 -1504,59 -1505,40 -1506,7 -1507,64 -1508,77 -1509,14 -1510,68 -1511,63 -1512,188 -1513,107 -1514,70 -1515,54 -1516,43 -1517,193 -1518,176 -1519,36 -1520,25 -1521,19 -1522,84 -1523,59 -1524,24 -1525,106 -1526,102 -1527,132 -1528,63 -1529,5 -1530,101 -1531,96 -1532,19 -1534,73 -1535,94 -1536,92 -1537,36 -1538,47 -1539,28 -1540,88 -1541,62 -1542,224 -1543,69 -1544,18 -1545,83 -1546,33 -1547,111 -1548,56 -1549,80 -1550,61 -1551,15 -1552,49 -1553,129 -1554,40 -1555,38 -1556,22 -1557,19 -1558,26 -1559,27 -1560,115 -1561,103 -1562,68 -1563,111 -1564,61 -1565,113 -1566,137 -1567,60 -1568,122 -1569,90 -1570,6 -1571,132 -1572,91 -1573,37 -1574,72 -1575,101 -1576,104 -1577,41 -1578,15 -1579,3 -1580,105 -1581,60 -1582,108 -1583,66 -1584,63 -1585,225 -1586,80 -1587,2 -1588,115 -1589,68 -1590,16 -1591,4 -1592,35 -1593,4 -1595,102 -1596,75 -1597,59 -1598,43 -1599,140 -1600,132 -1601,51 -1602,20 -1603,8 -1604,53 -1605,132 -1606,48 -1607,67 -1608,82 -1609,19 -1612,146 -1613,113 -1614,43 -1615,179 -1616,125 -1617,46 -1618,90 -1619,97 -1620,146 -1621,25 -1622,18 -1623,119 -1624,54 -1625,35 -1626,197 -1627,76 -1628,56 -1629,136 -1630,143 -1631,37 -1632,92 -1633,112 -1634,39 -1635,74 -1636,55 -1637,56 -1638,95 -1639,147 -1640,18 -1641,122 -1642,130 -1643,35 -1644,25 -1646,69 -1647,54 -1648,21 -1650,77 -1651,97 -1652,78 -1653,23 -1654,98 -1655,89 -1656,87 -1657,39 -1658,53 -1659,31 -1660,3 -1661,70 -1662,35 -1663,67 -1664,83 -1665,180 -1666,61 -1667,1 -1668,47 -1669,15 -1670,109 -1671,252 -1672,54 -1673,76 -1674,31 -1675,55 -1676,20 -1677,37 -1678,19 -1679,5 -1680,62 -1681,94 -1682,274 -1683,70 -1684,47 -1685,109 -1686,20 -1687,53 -1688,75 -1689,51 -1690,14 -1691,36 -1692,9 -1693,80 -1694,126 -1695,87 -1696,18 -1697,22 -1698,7 -1699,8 -1700,2 -1701,120 -1702,41 -1703,57 -1704,144 -1705,160 -1706,21 -1707,83 -1708,90 -1709,21 -1710,61 -1711,71 -1712,57 -1713,18 -1714,45 -1715,37 -1716,71 -1717,20 -1719,27 -1720,10 -1721,2 -1723,75 -1724,102 -1725,173 -1726,50 -1727,101 -1728,66 -1729,75 -1730,36 -1731,34 -1732,37 -1733,17 -1734,75 -1735,35 -1736,68 -1737,60 -1738,13 -1740,55 -1741,40 -1742,25 -1743,51 -1744,91 -1745,38 -1746,10 -1747,15 -1748,75 -1749,40 -1750,26 -1751,43 -1752,38 -1753,106 -1754,77 -1755,35 -1756,51 -1757,10 -1758,20 -1759,5 -1760,48 -1761,43 -1762,8 -1763,49 -1764,69 -1765,6 -1766,92 -1767,44 -1768,52 -1769,88 -1770,28 -1771,17 -1772,198 -1773,177 -1774,48 -1775,41 -1776,35 -1777,8 -1779,11 -1780,13 -1781,6 -1782,26 -1783,7 -1786,27 -1787,7 -1788,71 -1789,63 -1790,13 -1791,41 -1792,133 -1793,51 -1794,53 -1795,14 -1798,4 -1799,28 -1800,110 -1801,31 -1803,40 -1804,13 -1805,135 -1806,74 -1807,25 -1808,44 -1809,73 -1810,21 -1811,117 -1812,64 -1813,9 -1815,73 -1816,30 -1817,17 -1818,82 -1819,69 -1820,17 -1821,56 -1822,56 -1823,10 -1824,11 -1825,117 -1826,41 -1827,3 -1828,99 -1829,33 -1830,68 -1831,45 -1832,97 -1833,158 -1834,83 -1835,117 -1836,112 -1837,52 -1838,5 -1839,36 -1840,56 -1841,19 -1842,37 -1843,13 -1845,14 -1846,93 -1847,30 -1848,27 -1849,27 -1850,89 -1851,74 -1852,21 -1853,23 -1854,20 -1855,1 -1856,25 -1857,14 -1858,9 -1859,5 -1860,11 -1861,14 -1862,40 -1863,28 -1864,26 -1865,15 -1866,10 -1867,2 -1868,7 -1869,3 -1870,16 -1871,4 -1872,87 -1873,32 -1877,11 -1878,6 -1880,116 -1881,70 -1882,67 -1883,98 -1884,24 -1885,2 -1886,45 -1887,42 -1888,14 -1889,90 -1890,82 -1891,32 -1893,45 -1894,28 -1895,10 -1896,47 -1897,37 -1898,49 -1899,10 -1900,34 -1901,27 -1902,40 -1903,79 -1904,19 -1908,44 -1909,85 -1910,45 -1911,160 -1912,89 -1913,12 -1915,22 -1916,132 -1917,37 -1918,42 -1919,23 -1921,10 -1922,10 -1923,4 -1924,10 -1925,38 -1926,10 -1927,2 -1930,15 -1931,36 -1932,172 -1933,132 -1934,68 -1935,34 -1936,46 -1937,7 -1938,71 -1939,35 -1940,13 -1941,65 -1942,74 -1943,21 -1945,128 -1946,53 -1947,2 -1948,77 -1949,31 -1951,13 -1952,24 -1953,8 -1954,61 -1955,101 -1956,80 -1957,122 -1958,44 -1959,3 -1960,27 -1961,69 -1962,17 -1965,80 -1966,18 -1968,82 -1969,41 -1970,81 -1971,26 -1972,73 -1973,75 -1974,27 -1975,4 -1977,38 -1978,109 -1979,86 -1980,17 -1982,63 -1983,74 -1984,58 -1985,34 -1986,43 -1987,31 -1988,66 -1989,19 -1990,47 -1991,97 -1992,32 -1994,51 -1995,25 -1996,1 -1998,49 -1999,20 -2001,6 -2002,7 -2003,30 -2004,15 -2006,52 -2007,28 -2008,2 -2009,4 -2010,1 -2011,52 -2012,21 -2013,13 -2014,11 -2015,10 -2016,51 -2017,33 -2018,4 -2019,40 -2020,62 -2021,10 -2023,70 -2024,29 -2026,9 -2027,33 -2028,8 -2030,55 -2031,49 -2032,80 -2033,55 -2034,54 -2035,43 -2036,23 -2037,22 -2038,39 -2039,24 -2041,15 -2042,3 -2043,88 -2044,37 -2048,7 -2049,3 -2050,10 -2051,12 -2052,70 -2053,157 -2054,61 -2056,64 -2057,92 -2058,44 -2059,6 -2060,4 -2061,27 -2062,38 -2063,19 -2064,89 -2065,14 -2071,57 -2072,43 -2073,24 -2074,45 -2075,10 -2076,43 -2077,72 -2078,55 -2079,18 -2080,6 -2081,29 -2082,40 -2083,42 -2084,11 -2087,11 -2088,74 -2089,58 -2090,18 -2091,6 -2092,66 -2093,33 -2094,73 -2095,149 -2096,46 -2097,4 -2098,23 -2099,73 -2100,29 -2101,21 -2102,12 -2103,54 -2104,14 -2105,106 -2106,38 -2107,36 -2108,48 -2109,32 -2110,63 -2111,37 -2112,4 -2114,8 -2115,35 -2116,11 -2117,21 -2118,7 -2119,58 -2120,28 -2121,49 -2122,83 -2123,66 -2124,12 -2131,18 -2132,73 -2133,56 -2134,84 -2135,43 -2141,5 -2142,2 -2143,25 -2144,15 -2145,13 -2146,9 -2147,1 -2148,24 -2149,72 -2150,147 -2151,47 -2153,28 -2154,64 -2155,12 -2159,10 -2160,7 -2161,42 -2162,10 -2164,53 -2165,23 -2166,23 -2167,11 -2168,28 -2169,9 -2171,4 -2172,26 -2173,42 -2174,8 -2176,10 -2177,35 -2178,12 -2179,73 -2180,45 -2181,11 -2182,59 -2183,62 -2184,13 -2185,60 -2186,18 -2188,8 -2189,11 -2190,2 -2192,72 -2193,14 -2194,32 -2195,6 -2196,23 -2197,22 -2198,70 -2199,21 -2200,40 -2201,23 -2204,17 -2205,3 -2206,132 -2207,70 -2208,116 -2209,75 -2210,7 -2214,34 -2215,5 -2216,44 -2217,4 -2219,10 -2220,6 -2221,69 -2222,32 -2223,18 -2224,177 -2225,81 -2226,30 -2227,18 -2228,100 -2229,53 -2230,24 -2231,41 -2232,12 -2237,56 -2238,58 -2239,4 -2240,6 -2241,38 -2242,58 -2243,54 -2244,17 -2246,10 -2247,9 -2248,6 -2250,31 -2251,76 -2252,42 -2253,2 -2254,96 -2255,40 -2257,50 -2258,106 -2259,31 -2260,39 -2261,19 -2262,22 -2263,8 -2264,9 -2265,12 -2266,55 -2267,111 -2268,77 -2269,67 -2270,11 -2272,13 -2273,8 -2274,115 -2275,40 -2286,9 -2287,10 -2288,30 -2289,59 -2290,64 -2291,75 -2292,10 -2299,4 -2300,2 -2302,53 -2303,13 -2304,23 -2305,13 -2308,19 -2309,5 -2312,40 -2313,83 -2314,10 -2315,14 -2316,4 -2317,3 -2318,2 -2319,50 -2320,32 -2321,2 -2323,131 -2324,67 -2325,23 -2326,46 -2327,17 -2332,60 -2333,23 -2335,32 -2336,9 -2337,61 -2338,63 -2339,12 -2343,24 -2344,14 -2345,11 -2346,3 -2349,53 -2350,24 -2353,5 -2354,4 -2355,66 -2356,33 -2357,2 -2359,71 -2360,42 -2361,22 -2363,4 -2366,76 -2367,116 -2368,65 -2369,11 -2370,14 -2371,47 -2372,8 -2373,16 -2374,6 -2376,6 -2377,3 -2378,32 -2379,25 -2380,38 -2381,23 -2384,51 -2385,13 -2388,54 -2389,35 -2390,44 -2391,12 -2398,36 -2399,12 -2400,55 -2401,19 -2402,67 -2403,35 -2404,42 -2405,16 -2406,59 -2407,15 -2408,10 -2409,5 -2410,26 -2411,11 -2413,25 -2414,8 -2416,41 -2417,32 -2418,9 -2419,79 -2420,44 -2421,37 -2422,21 -2423,2 -2424,80 -2425,36 -2426,53 -2427,48 -2428,4 -2429,85 -2430,27 -2431,49 -2432,59 -2433,29 -2435,52 -2436,17 -2437,6 -2438,3 -2440,69 -2441,58 -2446,88 -2447,48 -2448,12 -2450,4 -2451,96 -2452,36 -2453,34 -2454,90 -2455,51 -2456,11 -2457,43 -2458,21 -2459,41 -2460,68 -2461,19 -2462,3 -2463,26 -2464,12 -2466,129 -2467,30 -2468,81 -2469,23 -2470,8 -2471,7 -2472,30 -2473,9 -2474,4 -2475,2 -2480,50 -2481,49 -2482,17 -2484,33 -2485,16 -2486,85 -2487,19 -2488,17 -2489,8 -2491,5 -2492,32 -2493,34 -2494,11 -2497,42 -2498,15 -2499,28 -2500,17 -2501,97 -2502,20 -2508,69 -2509,64 -2510,16 -2512,17 -2513,171 -2514,55 -2516,54 -2517,24 -2518,14 -2519,66 -2520,104 -2521,42 -2523,70 -2524,47 -2528,14 -2529,9 -2533,39 -2534,22 -2535,42 -2536,16 -2537,38 -2538,7 -2542,6 -2545,7 -2546,7 -2547,11 -2548,2 -2550,25 -2551,23 -2552,4 -2553,17 -2554,55 -2555,6 -2557,33 -2558,6 -2560,26 -2561,65 -2562,12 -2563,94 -2564,34 -2573,22 -2574,19 -2575,13 -2576,52 -2577,48 -2578,11 -2586,70 -2587,42 -2591,75 -2592,56 -2593,7 -2594,6 -2595,6 -2596,2 -2598,27 -2599,18 -2603,26 -2604,18 -2605,15 -2606,86 -2607,43 -2609,25 -2610,16 -2620,52 -2621,27 -2622,4 -2623,15 -2624,1 -2626,6 -2627,18 -2628,6 -2629,58 -2630,39 -2631,70 -2632,24 -2641,42 -2642,55 -2643,49 -2644,4 -2650,5 -2651,1 -2656,13 -2657,82 -2658,31 -2661,78 -2662,18 -2663,49 -2664,16 -2669,21 -2670,11 -2675,36 -2676,17 -2677,41 -2678,10 -2679,64 -2680,10 -2688,32 -2689,12 -2690,14 -2691,16 -2694,18 -2695,44 -2696,19 -2700,10 -2701,44 -2702,8 -2703,52 -2704,14 -2705,34 -2706,6 -2708,89 -2709,22 -2710,14 -2711,2 -2715,66 -2716,68 -2717,26 -2718,4 -2719,4 -2721,26 -2722,23 -2728,18 -2729,8 -2730,12 -2731,4 -2738,66 -2739,41 -2742,33 -2743,19 -2744,32 -2745,33 -2746,7 -2747,4 -2748,4 -2749,182 -2750,131 -2751,42 -2753,36 -2754,12 -2755,1 -2757,42 -2758,20 -2761,102 -2762,18 -2767,27 -2768,9 -2769,11 -2770,83 -2771,55 -2772,8 -2776,21 -2777,13 -2779,30 -2780,89 -2781,45 -2790,27 -2791,7 -2792,54 -2793,43 -2794,14 -2796,51 -2797,14 -2799,46 -2800,20 -2804,11 -2805,3 -2806,50 -2807,42 -2808,18 -2809,7 -2810,2 -2811,37 -2812,19 -2818,37 -2819,28 -2827,96 -2828,45 -2829,6 -2831,7 -2832,29 -2833,9 -2836,54 -2837,11 -2838,33 -2839,34 -2840,8 -2841,61 -2842,20 -2844,62 -2845,96 -2846,27 -2848,52 -2849,17 -2853,4 -2854,8 -2855,6 -2856,55 -2857,35 -2858,2 -2860,12 -2861,37 -2862,3 -2870,11 -2871,2 -2874,10 -2875,2 -2876,72 -2877,62 -2878,10 -2881,53 -2882,18 -2884,24 -2885,6 -2889,5 -2890,2 -2906,55 -2907,49 -2908,23 -2909,58 -2910,73 -2911,24 -2912,30 -2913,5 -2921,37 -2922,58 -2923,89 -2924,21 -2926,25 -2927,34 -2928,13 -2929,1 -2933,53 -2934,21 -2935,74 -2936,27 -2938,40 -2939,10 -2940,76 -2941,39 -2942,95 -2943,39 -2953,57 -2954,60 -2955,17 -2962,37 -2963,15 -2964,35 -2965,18 -2970,54 -2971,18 -2975,45 -2976,30 -2980,62 -2981,21 -2983,24 -2984,13 -2988,26 -2989,10 -2999,30 -3000,4 -3002,4 -3003,5 -3005,12 -3006,18 -3007,15 -3010,27 -3011,4 -3026,45 -3027,18 -3038,36 -3039,12 -3041,8 -3042,4 -3044,12 -3045,36 -3046,8 -3047,10 -3048,8 -3049,39 -3050,95 -3051,30 -3052,4 -3053,38 -3054,18 -3059,68 -3060,56 -3061,7 -3063,47 -3064,104 -3065,46 -3071,55 -3072,38 -3073,6 -3075,24 -3076,8 -3079,22 -3080,9 -3087,100 -3088,28 -3098,15 -3099,4 -3102,65 -3103,114 -3104,22 -3105,5 -3106,1 -3109,8 -3110,4 -3116,8 -3117,52 -3118,37 -3119,23 -3120,8 -3124,11 -3125,2 -3126,80 -3127,57 -3128,11 -3131,41 -3132,39 -3133,15 -3134,52 -3135,34 -3141,23 -3142,16 -3158,17 -3159,15 -3162,32 -3163,21 -3167,8 -3168,9 -3171,17 -3172,5 -3178,9 -3179,3 -3187,6 -3191,63 -3192,94 -3193,23 -3196,57 -3197,34 -3200,30 -3201,12 -3204,109 -3205,76 -3208,112 -3209,37 -3210,2 -3214,9 -3215,48 -3216,15 -3218,13 -3219,61 -3220,55 -3221,8 -3225,57 -3226,12 -3227,84 -3228,56 -3230,40 -3231,53 -3232,54 -3233,25 -3235,43 -3236,18 -3237,93 -3238,60 -3241,4 -3242,2 -3248,15 -3249,27 -3250,14 -3253,7 -3260,71 -3261,56 -3262,19 -3267,29 -3268,15 -3269,6 -3270,2 -3272,1 -3273,10 -3274,5 -3275,37 -3276,16 -3281,30 -3282,35 -3283,8 -3287,5 -3288,3 -3291,16 -3292,82 -3293,27 -3300,50 -3301,27 -3311,29 -3312,37 -3314,25 -3315,10 -3317,47 -3318,14 -3319,26 -3320,9 -3326,3 -3327,3 -3328,49 -3329,51 -3330,40 -3331,8 -3344,9 -3345,2 -3354,11 -3355,3 -3357,3 -3358,34 -3359,11 -3367,29 -3368,45 -3369,14 -3375,22 -3376,4 -3392,12 -3393,4 -3394,13 -3395,12 -3399,65 -3400,20 -3404,36 -3405,48 -3406,9 -3412,33 -3413,8 -3416,8 -3417,4 -3421,14 -3422,3 -3423,3 -3424,6 -3425,1 -3427,6 -3429,30 -3430,77 -3431,52 -3432,23 -3433,5 -3435,4 -3436,56 -3437,69 -3438,12 -3442,25 -3443,17 -3448,38 -3449,9 -3452,11 -3453,9 -3454,48 -3455,35 -3460,13 -3461,3 -3465,44 -3466,16 -3468,27 -3469,16 -3473,49 -3474,50 -3475,19 -3476,1 -3501,39 -3502,22 -3506,11 -3507,16 -3508,1 -3511,39 -3512,16 -3515,84 -3516,28 -3522,101 -3523,42 -3525,41 -3526,17 -3528,41 -3529,48 -3530,8 -3534,26 -3535,6 -3536,50 -3537,22 -3538,1 -3540,39 -3541,19 -3546,16 -3547,10 -3550,9 -3551,3 -3559,20 -3560,3 -3571,45 -3572,19 -3576,6 -3577,3 -3580,7 -3592,27 -3593,9 -3601,10 -3602,5 -3606,44 -3607,13 -3612,8 -3613,1 -3623,24 -3624,89 -3625,15 -3626,6 -3627,3 -3633,11 -3634,25 -3635,9 -3636,51 -3637,23 -3639,17 -3640,9 -3641,29 -3642,7 -3646,23 -3647,13 -3648,28 -3649,7 -3650,6 -3651,28 -3652,63 -3653,7 -3657,46 -3658,12 -3662,58 -3663,15 -3669,33 -3670,11 -3675,26 -3676,7 -3702,9 -3703,1 -3711,32 -3712,12 -3715,7 -3716,3 -3722,45 -3723,54 -3724,15 -3725,104 -3726,30 -3727,21 -3728,16 -3732,28 -3733,16 -3734,5 -3743,23 -3744,19 -3748,4 -3749,2 -3763,10 -3764,1 -3765,34 -3766,11 -3787,7 -3788,3 -3795,65 -3796,11 -3805,10 -3806,62 -3807,25 -3811,38 -3812,38 -3813,11 -3819,6 -3820,7 -3821,57 -3822,11 -3824,37 -3825,17 -3836,100 -3837,48 -3842,99 -3843,38 -3847,61 -3848,21 -3850,9 -3851,14 -3852,3 -3853,13 -3854,11 -3856,61 -3857,6 -3859,32 -3860,2 -3865,26 -3866,15 -3867,13 -3868,4 -3873,5 -3874,72 -3875,52 -3877,15 -3878,10 -3884,33 -3885,12 -3888,11 -3889,2 -3899,29 -3900,14 -3901,59 -3902,17 -3909,4 -3910,3 -3914,49 -3915,20 -3916,1 -3919,29 -3920,10 -3921,43 -3922,15 -3929,3 -3933,18 -3934,4 -3936,15 -3937,11 -3940,8 -3945,37 -3946,42 -3947,11 -3948,25 -3949,15 -3953,46 -3954,7 -3963,30 -3964,7 -3966,30 -3967,18 -3975,10 -3976,3 -3980,4 -3981,23 -3982,7 -3983,25 -3984,9 -3994,49 -3995,37 -4003,37 -4004,20 -4017,55 -4018,10 -4029,26 -4030,9 -4031,36 -4032,16 -4039,9 -4040,4 -4057,52 -4058,20 -4059,2 -4060,1 -4067,32 -4068,154 -4069,47 -4073,6 -4074,1 -4075,14 -4076,17 -4077,5 -4085,4 -4086,20 -4087,12 -4088,1 -4099,21 -4100,6 -4104,92 -4105,25 -4106,9 -4107,3 -4130,39 -4131,17 -4133,15 -4134,3 -4152,90 -4153,55 -4169,27 -4170,16 -4172,18 -4173,15 -4174,11 -4175,5 -4186,57 -4187,6 -4188,13 -4189,26 -4190,11 -4204,52 -4205,45 -4206,20 -4213,44 -4214,14 -4219,80 -4220,34 -4228,27 -4229,4 -4235,25 -4236,10 -4237,41 -4238,18 -4240,8 -4241,7 -4248,37 -4249,7 -4250,28 -4251,8 -4253,27 -4254,9 -4255,57 -4256,12 -4276,23 -4277,8 -4294,7 -4295,4 -4301,27 -4302,19 -4315,26 -4316,68 -4317,64 -4318,7 -4346,8 -4347,2 -4350,39 -4351,21 -4358,2 -4359,1 -4361,8 -4362,4 -4363,35 -4364,13 -4366,2 -4378,34 -4379,19 -4382,43 -4383,19 -4387,19 -4388,59 -4389,20 -4411,19 -4412,11 -4431,7 -4432,22 -4433,13 -4439,14 -4440,10 -4442,66 -4443,30 -4450,61 -4451,56 -4462,29 -4463,15 -4467,19 -4468,59 -4469,25 -4470,17 -4471,9 -4500,64 -4501,31 -4520,38 -4521,77 -4522,85 -4523,34 -4534,11 -4535,4 -4540,14 -4541,3 -4544,49 -4545,22 -4546,2 -4571,35 -4572,18 -4578,10 -4579,11 -4582,32 -4583,12 -4590,63 -4591,32 -4600,83 -4601,23 -4628,8 -4629,4 -4631,52 -4632,19 -4648,24 -4649,11 -4691,43 -4692,31 -4704,10 -4705,4 -4708,31 -4709,10 -4722,4 -4723,15 -4724,40 -4725,8 -4726,5 -4727,1 -4750,29 -4751,12 -4756,32 -4757,7 -4759,3 -4760,72 -4761,16 -4770,50 -4771,26 -4791,46 -4792,22 -4796,21 -4797,12 -4798,2 -4870,25 -4871,11 -4875,94 -4876,39 -4877,83 -4878,22 -4893,24 -4894,10 -4899,8 -4900,3 -4906,12 -4907,3 -4910,24 -4911,13 -4942,29 -4943,10 -4967,25 -4968,5 -4973,69 -4974,35 -4976,26 -4977,47 -4978,9 -4979,104 -4980,76 -4981,5 -4985,29 -4986,13 -4989,24 -4990,11 -5011,32 -5012,11 -5013,40 -5014,15 -5024,13 -5025,5 -5026,38 -5027,21 -5042,50 -5043,21 -5044,16 -5045,4 -5085,52 -5086,35 -5092,9 -5093,2 -5097,50 -5098,35 -5119,51 -5120,39 -5131,35 -5132,24 -5138,23 -5139,18 -5166,29 -5167,16 -5168,9 -5169,1 -5184,53 -5185,29 -5191,23 -5192,10 -5193,5 -5194,1 -5208,27 -5209,2 -5215,26 -5216,8 -5218,32 -5219,7 -5221,25 -5222,18 -5229,54 -5230,17 -5232,19 -5233,9 -5239,4 -5240,5 -5249,37 -5250,15 -5262,33 -5263,90 -5264,23 -5266,33 -5267,9 -5280,47 -5281,8 -5290,36 -5291,9 -5298,70 -5299,27 -5307,21 -5308,13 -5322,41 -5323,26 -5328,37 -5329,12 -5332,37 -5333,29 -5388,20 -5389,50 -5390,18 -5401,8 -5402,7 -5403,110 -5404,31 -5410,50 -5411,17 -5422,8 -5423,6 -5424,19 -5425,13 -5429,6 -5430,2 -5436,17 -5437,7 -5440,14 -5441,11 -5450,23 -5451,15 -5461,43 -5462,9 -5463,1 -5464,11 -5465,4 -5467,2 -5468,2 -5476,12 -5477,4 -5490,63 -5491,21 -5493,21 -5494,10 -5499,9 -5503,16 -5504,8 -5505,33 -5506,11 -5520,37 -5521,35 -5522,3 -5528,21 -5529,9 -5547,10 -5548,7 -5576,21 -5577,10 -5589,91 -5590,35 -5591,9 -5592,4 -5599,46 -5600,2 -5692,3 -5700,44 -5701,36 -5714,29 -5715,5 -5728,35 -5729,21 -5767,46 -5768,14 -5773,12 -5774,5 -5780,17 -5781,16 -5793,8 -5794,4 -5813,29 -5814,14 -5843,17 -5844,2 -5855,8 -5856,1 -5875,13 -5876,3 -5877,49 -5878,20 -5887,75 -5888,28 -5893,24 -5894,11 -5907,39 -5908,17 -5909,16 -5910,4 -5935,58 -5936,12 -5948,9 -5949,4 -5959,28 -5960,8 -5984,21 -5985,11 -6012,36 -6013,12 -6031,29 -6032,6 -6033,5 -6034,1 -6035,115 -6036,19 -6045,38 -6046,8 -6096,55 -6097,12 -6103,41 -6104,16 -6143,47 -6144,9 -6164,54 -6165,13 -6166,40 -6167,24 -6178,17 -6179,6 -6205,27 -6206,10 -6211,8 -6212,4 -6237,94 -6238,33 -6253,15 -6254,6 -6268,19 -6269,4 -6276,7 -6277,3 -6278,5 -6279,3 -6280,46 -6281,5 -6330,41 -6331,18 -6364,75 -6365,25 -6395,3 -6396,3 -6417,11 -6418,4 -6423,50 -6424,13 -6426,14 -6427,4 -6453,18 -6454,6 -6460,24 -6461,8 -6467,215 -6468,80 -6490,22 -6491,9 -6495,97 -6496,44 -6518,34 -6519,5 -6547,11 -6548,1 -6550,38 -6551,13 -6577,52 -6578,24 -6613,73 -6614,18 -6619,3 -6620,1 -6651,18 -6652,8 -6660,38 -6661,9 -6684,3 -6685,1 -6693,85 -6694,27 -6744,107 -6745,41 -6773,6 -6774,4 -6781,7 -6782,2 -6800,38 -6801,19 -6804,23 -6805,2 -6818,32 -6819,44 -6820,24 -6840,44 -6841,30 -6844,67 -6845,24 -6849,5 -6853,98 -6854,38 -6876,14 -6877,5 -6895,33 -6896,9 -6902,38 -6903,9 -6919,60 -6920,26 -6972,10 -6973,7 -7009,10 -7010,5 -7019,63 -7020,15 -7082,20 -7083,8 -7099,12 -7100,4 -7110,41 -7111,6 -7124,28 -7125,10 -7132,9 -7133,4 -7196,7 -7197,5 -7202,28 -7203,11 -7215,20 -7216,11 -7221,13 -7222,6 -7266,20 -7267,7 -7277,19 -7278,2 -7338,5 -7339,2 -7370,48 -7371,22 -7429,70 -7430,40 -7434,117 -7435,31 -7447,12 -7448,5 -7603,11 -7604,7 -7621,26 -7622,12 -7626,50 -7627,24 -7641,33 -7642,9 -7702,32 -7703,7 -7707,77 -7708,62 -7733,84 -7734,39 -7791,38 -7792,18 -7798,28 -7799,6 -7803,52 -7804,6 -7851,35 -7852,7 -7890,5 -7891,2 -7917,34 -7918,14 -7919,93 -7920,19 -7921,26 -7922,16 -7924,4 -7933,47 -7934,17 -7964,60 -7965,19 -8044,8 -8045,1 -8067,45 -8068,17 -8107,44 -8108,11 -8165,36 -8166,13 -8223,36 -8224,20 -8225,4 -8230,55 -8231,24 -8241,93 -8242,30 -8268,28 -8269,8 -8336,52 -8337,30 -8467,38 -8468,14 -8472,46 -8473,18 -8522,58 -8523,25 -8605,35 -8606,12 -8637,33 -8638,8 -8639,22 -8640,12 -8658,9 -8659,4 -8678,23 -8679,6 -8695,38 -8696,12 -8734,57 -8735,45 -8766,6 -8767,2 -8773,40 -8774,15 -8792,24 -8793,4 -8805,23 -8806,10 -8851,57 -8852,24 -8927,38 -8928,12 -8971,22 -8972,12 -8988,27 -8989,15 -9018,3 -9019,2 -9074,11 -9075,27 -9076,11 -9087,23 -9088,16 -9090,17 -9091,9 -9130,10 -9131,3 -9140,38 -9141,12 -9149,13 -9150,8 -9228,13 -9229,5 -9267,17 -9268,10 -9294,10 -9295,2 -9343,4 -9344,1 -9347,5 -9348,1 -9432,6 -9433,4 -9493,33 -9494,10 -9571,5 -9572,2 -9575,19 -9576,4 -9584,38 -9585,16 -9608,23 -9609,18 -9678,6 -9679,1 -9740,72 -9741,33 -9756,5 -9757,6 -9838,10 -9839,3 -9961,13 -9962,11 -9971,23 -9972,14 -10025,45 -10026,12 -10042,3 -10043,1 -10106,45 -10107,12 -10108,34 -10109,19 -10192,31 -10193,14 -10308,8 -10309,9 -10337,7 -10338,3 -10375,36 -10376,12 -10433,28 -10434,16 -10461,12 -10462,4 -10532,43 -10533,10 -10665,24 -10666,12 -10719,29 -10720,14 -10747,31 -10748,10 -10749,94 -10750,32 -10769,27 -10770,2 -10836,24 -10837,10 -10880,42 -10881,14 -10893,7 -10894,5 -10902,75 -10903,31 -10919,54 -10920,32 -10924,8 -10925,3 -11011,9 -11012,10 -11048,43 -11049,13 -11055,21 -11056,4 -11100,2 -11101,3 -11144,10 -11145,5 -11261,116 -11262,46 -11325,12 -11326,5 -11422,51 -11423,11 -11426,24 -11427,5 -11458,66 -11459,30 -11460,28 -11461,13 -11468,17 -11469,15 -11483,23 -11484,12 -11529,25 -11530,2 -11654,57 -11655,27 -11681,63 -11682,28 -11831,19 -11832,9 -11985,8 -11986,4 -12120,21 -12121,10 -12124,39 -12125,13 -12169,7 -12170,3 -12251,27 -12252,10 -12393,55 -12394,18 -12479,50 -12480,15 -12496,18 -12497,5 -12576,38 -12577,14 -12581,39 -12582,18 -12601,50 -12602,11 -12734,10 -12735,2 -12777,6 -12778,1 -12956,29 -12957,12 -12965,37 -12966,9 -12969,31 -12970,8 -12971,23 -12972,12 -13073,7 -13074,3 -13161,20 -13162,11 -13166,91 -13167,21 -13549,54 -13550,35 -14022,11 -14023,15 -14605,89 -14606,26 -14726,14 -14727,9 -14736,5 -14737,3 -15075,4 -15076,3 -15097,31 -15098,6 -15507,8 -15508,3 -15544,45 -15545,15 -15845,5 -15846,1 -16252,26 -16253,6 -16356,2 -16357,2 -16440,3 -16441,1 -16601,17 -16602,6 -16876,2 -16877,1 -16891,86 -16892,38 -17220,55 -17221,39 -17394,56 -17395,15 -17412,46 -17413,13 -17512,74 -17513,28 -17999,35 -18000,5 -18152,17 -18153,12 -18208,14 -18209,6 -19008,61 -19009,25 -19273,7 -19274,3 -19456,86 -19457,37 -19469,8 -19470,2 -19730,47 -19731,20 -19887,43 -19888,9 -20060,46 -20061,29 -20172,39 -20173,28 -20222,77 -20223,22 -20318,42 -20319,15 -20397,85 -20398,42 -20510,29 -20511,8 -21096,41 -21097,13 -21164,91 -21165,28 -21303,32 -21304,7 -21784,27 -21785,14 -22129,69 -22130,18 -22730,82 -22731,28 -24532,68 -24533,28 -25235,55 -25236,30 -25530,67 -25531,33 -25635,22 -25636,12 -27225,12 -27226,7 -27304,38 -27305,11 -27430,17 -27431,4 -27748,38 -27749,4 -28270,28 -28271,12 -28825,20 -28826,20 -30839,49 -30840,19 -33547,27 -33548,11 -34454,23 -34455,17 -35878,26 -35879,8 -36707,53 -36708,16 -40907,14 -40908,11 -42274,22 -42275,6 -42374,22 -42375,8 -43321,76 -43322,25 -46114,15 -46115,6 -51382,16 -51383,5 -52018,24 -52019,5 -59344,45 -59345,12 -64689,40 -64690,46 -75620,16 -75621,5 -80872,65 -80873,19 -94174,40 -94175,15 -124261,22 -124262,10 diff --git a/dtrain/dtrain.cc b/dtrain/dtrain.cc index 18024bae..cdf95c95 100644 --- a/dtrain/dtrain.cc +++ b/dtrain/dtrain.cc @@ -216,6 +216,7 @@ main(int argc, char** argv) score_t max_score = 0.; unsigned best_it = 0; float overall_time = 0.; + unsigned pair_count = 0, feature_count = 0; // output cfg if (!quiet) { @@ -400,10 +401,12 @@ main(int argc, char** argv) if (pair_sampling == "PRO") PROsampling(samples, pairs); npairs += pairs.size(); + pair_count += 2*pairs.size(); for (vector<pair<ScoredHyp,ScoredHyp> >::iterator it = pairs.begin(); it != pairs.end(); it++) { score_t rank_error = it->second.score - it->first.score; + feature_count += it->first.f.size() + it->second.f.size(); if (!gamma) { // perceptron if (rank_error > 0) { @@ -534,9 +537,11 @@ if (false) { cerr << " avg #rank err: "; cerr << rank_errors/(float)in_sz << endl; cerr << " avg #margin viol: "; - cerr << margin_violations/float(in_sz) << endl; - cerr << " non0 feature count: "; + cerr << margin_violations/(float)in_sz << endl; + cerr << " non0 feature count: " << endl; cerr << nonz << endl; + cerr << " avg f count: "; + cerr << feature_count/(float)pair_count; } if (hstreaming) { diff --git a/dtrain/dtrain.h b/dtrain/dtrain.h index 3d76bd7f..14ef410e 100644 --- a/dtrain/dtrain.h +++ b/dtrain/dtrain.h @@ -13,7 +13,7 @@ #include "filelib.h" -//#define DTRAIN_LOCAL +#define DTRAIN_LOCAL #define DTRAIN_DOTS 100 // when to display a '.' #define DTRAIN_GRAMMAR_DELIM "########EOS########" diff --git a/dtrain/test/mira_update/Hildreth.cpp b/dtrain/test/mira_update/Hildreth.cpp new file mode 100644 index 00000000..0e67eb15 --- /dev/null +++ b/dtrain/test/mira_update/Hildreth.cpp @@ -0,0 +1,187 @@ +#include "Hildreth.h" +#include "sparse_vector.h" + +using namespace std; + +namespace Mira { + vector<double> Hildreth::optimise (vector< SparseVector<double> >& a, vector<double>& b) { + + size_t i; + int max_iter = 10000; + double eps = 0.00000001; + double zero = 0.000000000001; + + vector<double> alpha ( b.size() ); + vector<double> F ( b.size() ); + vector<double> kkt ( b.size() ); + + double max_kkt = -1e100; + + size_t K = b.size(); + + double A[K][K]; + bool is_computed[K]; + for ( i = 0; i < K; i++ ) + { + A[i][i] = a[i].dot(a[i]); + is_computed[i] = false; + } + + int max_kkt_i = -1; + + + for ( i = 0; i < b.size(); i++ ) + { + F[i] = b[i]; + kkt[i] = F[i]; + if ( kkt[i] > max_kkt ) + { + max_kkt = kkt[i]; + max_kkt_i = i; + } + } + + int iter = 0; + double diff_alpha; + double try_alpha; + double add_alpha; + + while ( max_kkt >= eps && iter < max_iter ) + { + + diff_alpha = A[max_kkt_i][max_kkt_i] <= zero ? 0.0 : F[max_kkt_i]/A[max_kkt_i][max_kkt_i]; + try_alpha = alpha[max_kkt_i] + diff_alpha; + add_alpha = 0.0; + + if ( try_alpha < 0.0 ) + add_alpha = -1.0 * alpha[max_kkt_i]; + else + add_alpha = diff_alpha; + + alpha[max_kkt_i] = alpha[max_kkt_i] + add_alpha; + + if ( !is_computed[max_kkt_i] ) + { + for ( i = 0; i < K; i++ ) + { + A[i][max_kkt_i] = a[i].dot(a[max_kkt_i] ); // for version 1 + //A[i][max_kkt_i] = 0; // for version 1 + is_computed[max_kkt_i] = true; + } + } + + for ( i = 0; i < F.size(); i++ ) + { + F[i] -= add_alpha * A[i][max_kkt_i]; + kkt[i] = F[i]; + if ( alpha[i] > zero ) + kkt[i] = abs ( F[i] ); + } + max_kkt = -1e100; + max_kkt_i = -1; + for ( i = 0; i < F.size(); i++ ) + if ( kkt[i] > max_kkt ) + { + max_kkt = kkt[i]; + max_kkt_i = i; + } + + iter++; + } + + return alpha; + } + + vector<double> Hildreth::optimise (vector< SparseVector<double> >& a, vector<double>& b, double C) { + + size_t i; + int max_iter = 10000; + double eps = 0.00000001; + double zero = 0.000000000001; + + vector<double> alpha ( b.size() ); + vector<double> F ( b.size() ); + vector<double> kkt ( b.size() ); + + double max_kkt = -1e100; + + size_t K = b.size(); + + double A[K][K]; + bool is_computed[K]; + for ( i = 0; i < K; i++ ) + { + A[i][i] = a[i].dot(a[i]); + is_computed[i] = false; + } + + int max_kkt_i = -1; + + + for ( i = 0; i < b.size(); i++ ) + { + F[i] = b[i]; + kkt[i] = F[i]; + if ( kkt[i] > max_kkt ) + { + max_kkt = kkt[i]; + max_kkt_i = i; + } + } + + int iter = 0; + double diff_alpha; + double try_alpha; + double add_alpha; + + while ( max_kkt >= eps && iter < max_iter ) + { + + diff_alpha = A[max_kkt_i][max_kkt_i] <= zero ? 0.0 : F[max_kkt_i]/A[max_kkt_i][max_kkt_i]; + try_alpha = alpha[max_kkt_i] + diff_alpha; + add_alpha = 0.0; + + if ( try_alpha < 0.0 ) + add_alpha = -1.0 * alpha[max_kkt_i]; + else if (try_alpha > C) + add_alpha = C - alpha[max_kkt_i]; + else + add_alpha = diff_alpha; + + alpha[max_kkt_i] = alpha[max_kkt_i] + add_alpha; + + if ( !is_computed[max_kkt_i] ) + { + for ( i = 0; i < K; i++ ) + { + A[i][max_kkt_i] = a[i].dot(a[max_kkt_i] ); // for version 1 + //A[i][max_kkt_i] = 0; // for version 1 + is_computed[max_kkt_i] = true; + } + } + + for ( i = 0; i < F.size(); i++ ) + { + F[i] -= add_alpha * A[i][max_kkt_i]; + kkt[i] = F[i]; + if (alpha[i] > C - zero) + kkt[i]=-kkt[i]; + else if (alpha[i] > zero) + kkt[i] = abs(F[i]); + + } + max_kkt = -1e100; + max_kkt_i = -1; + for ( i = 0; i < F.size(); i++ ) + if ( kkt[i] > max_kkt ) + { + max_kkt = kkt[i]; + max_kkt_i = i; + } + + iter++; + } + + return alpha; + } +} diff --git a/dtrain/test/mira_update/Hildreth.h b/dtrain/test/mira_update/Hildreth.h new file mode 100644 index 00000000..8d791085 --- /dev/null +++ b/dtrain/test/mira_update/Hildreth.h @@ -0,0 +1,10 @@ +#include "sparse_vector.h" + +namespace Mira { + class Hildreth { + public : + static std::vector<double> optimise(std::vector< SparseVector<double> >& a, std::vector<double>& b); + static std::vector<double> optimise(std::vector< SparseVector<double> >& a, std::vector<double>& b, double C); + }; +} + diff --git a/dtrain/test/mira_update/dtrain.cc b/dtrain/test/mira_update/dtrain.cc new file mode 100644 index 00000000..933417a4 --- /dev/null +++ b/dtrain/test/mira_update/dtrain.cc @@ -0,0 +1,532 @@ +#include "common.h" +#include "kbestget.h" +#include "util.h" +#include "sample.h" +#include "Hildreth.h" + +#include "ksampler.h" + +// boost compression +#include <boost/iostreams/device/file.hpp> +#include <boost/iostreams/filtering_stream.hpp> +#include <boost/iostreams/filter/gzip.hpp> +//#include <boost/iostreams/filter/zlib.hpp> +//#include <boost/iostreams/filter/bzip2.hpp> +using namespace boost::iostreams; + + +#ifdef DTRAIN_DEBUG +#include "tests.h" +#endif + + +/* + * init + * + */ +bool +init(int argc, char** argv, po::variables_map* cfg) +{ + po::options_description conff( "Configuration File Options" ); + size_t k, N, T, stop, n_pairs; + string s, f, update_type; + conff.add_options() + ( "decoder_config", po::value<string>(), "configuration file for cdec" ) + ( "kbest", po::value<size_t>(&k)->default_value(DTRAIN_DEFAULT_K), "k for kbest" ) + ( "ngrams", po::value<size_t>(&N)->default_value(DTRAIN_DEFAULT_N), "N for Ngrams" ) + ( "filter", po::value<string>(&f)->default_value("unique"), "filter kbest list" ) + ( "epochs", po::value<size_t>(&T)->default_value(DTRAIN_DEFAULT_T), "# of iterations T" ) + ( "input", po::value<string>(), "input file" ) + ( "scorer", po::value<string>(&s)->default_value(DTRAIN_DEFAULT_SCORER), "scoring metric" ) + ( "output", po::value<string>(), "output weights file" ) + ( "stop_after", po::value<size_t>(&stop)->default_value(0), "stop after X input sentences" ) + ( "weights_file", po::value<string>(), "input weights file (e.g. from previous iteration)" ) + ( "wprint", po::value<string>(), "weights to print on each iteration" ) + ( "noup", po::value<bool>()->zero_tokens(), "do not update weights" ); + + po::options_description clo("Command Line Options"); + clo.add_options() + ( "config,c", po::value<string>(), "dtrain config file" ) + ( "quiet,q", po::value<bool>()->zero_tokens(), "be quiet" ) + ( "update-type", po::value<string>(&update_type)->default_value("mira"), "perceptron or mira" ) + ( "n-pairs", po::value<size_t>(&n_pairs)->default_value(10), "number of pairs used to compute update" ) + ( "verbose,v", po::value<bool>()->zero_tokens(), "be verbose" ) +#ifndef DTRAIN_DEBUG + ; +#else + ( "test", "run tests and exit"); +#endif + po::options_description config_options, cmdline_options; + + config_options.add(conff); + cmdline_options.add(clo); + cmdline_options.add(conff); + + po::store( parse_command_line(argc, argv, cmdline_options), *cfg ); + if ( cfg->count("config") ) { + ifstream config( (*cfg)["config"].as<string>().c_str() ); + po::store( po::parse_config_file(config, config_options), *cfg ); + } + po::notify(*cfg); + + if ( !cfg->count("decoder_config") || !cfg->count("input") ) { + cerr << cmdline_options << endl; + return false; + } + if ( cfg->count("noup") && cfg->count("decode") ) { + cerr << "You can't use 'noup' and 'decode' at once." << endl; + return false; + } + if ( cfg->count("filter") && (*cfg)["filter"].as<string>() != "unique" + && (*cfg)["filter"].as<string>() != "no" ) { + cerr << "Wrong 'filter' type: '" << (*cfg)["filter"].as<string>() << "'." << endl; + } + #ifdef DTRAIN_DEBUG + if ( !cfg->count("test") ) { + cerr << cmdline_options << endl; + return false; + } + #endif + return true; +} + + +// output formatting +ostream& _nopos( ostream& out ) { return out << resetiosflags( ios::showpos ); } +ostream& _pos( ostream& out ) { return out << setiosflags( ios::showpos ); } +ostream& _prec2( ostream& out ) { return out << setprecision(2); } +ostream& _prec5( ostream& out ) { return out << setprecision(5); } + + + + +/* + * dtrain + * + */ +int +main( int argc, char** argv ) +{ + cout << setprecision( 5 ); + // handle most parameters + po::variables_map cfg; + if ( ! init(argc, argv, &cfg) ) exit(1); // something is wrong +#ifdef DTRAIN_DEBUG + if ( cfg.count("test") ) run_tests(); // run tests and exit +#endif + bool quiet = false; + if ( cfg.count("quiet") ) quiet = true; + bool verbose = false; + if ( cfg.count("verbose") ) verbose = true; + bool noup = false; + if ( cfg.count("noup") ) noup = true; + const size_t k = cfg["kbest"].as<size_t>(); + const size_t N = cfg["ngrams"].as<size_t>(); + const size_t T = cfg["epochs"].as<size_t>(); + const size_t stop_after = cfg["stop_after"].as<size_t>(); + const string filter_type = cfg["filter"].as<string>(); + const string update_type = cfg["update-type"].as<string>(); + const size_t n_pairs = cfg["n-pairs"].as<size_t>(); + const string output_file = cfg["output"].as<string>(); + if ( !quiet ) { + cout << endl << "dtrain" << endl << "Parameters:" << endl; + cout << setw(25) << "k " << k << endl; + cout << setw(25) << "N " << N << endl; + cout << setw(25) << "T " << T << endl; + if ( cfg.count("stop-after") ) + cout << setw(25) << "stop_after " << stop_after << endl; + if ( cfg.count("weights") ) + cout << setw(25) << "weights " << cfg["weights"].as<string>() << endl; + cout << setw(25) << "input " << "'" << cfg["input"].as<string>() << "'" << endl; + cout << setw(25) << "filter " << "'" << filter_type << "'" << endl; + } + + vector<string> wprint; + if ( cfg.count("wprint") ) { + boost::split( wprint, cfg["wprint"].as<string>(), boost::is_any_of(" ") ); + } + + // setup decoder, observer + register_feature_functions(); + SetSilent(true); + ReadFile ini_rf( cfg["decoder_config"].as<string>() ); + if ( !quiet ) + cout << setw(25) << "cdec cfg " << "'" << cfg["decoder_config"].as<string>() << "'" << endl; + Decoder decoder( ini_rf.stream() ); + //KBestGetter observer( k, filter_type ); + MT19937 rng; + KSampler observer( k, &rng ); + + // scoring metric/scorer + string scorer_str = cfg["scorer"].as<string>(); + double (*scorer)( NgramCounts&, const size_t, const size_t, size_t, vector<float> ); + if ( scorer_str == "bleu" ) { + scorer = &bleu; + } else if ( scorer_str == "stupid_bleu" ) { + scorer = &stupid_bleu; + } else if ( scorer_str == "smooth_bleu" ) { + scorer = &smooth_bleu; + } else if ( scorer_str == "approx_bleu" ) { + scorer = &approx_bleu; + } else { + cerr << "Don't know scoring metric: '" << scorer_str << "', exiting." << endl; + exit(1); + } + // for approx_bleu + NgramCounts global_counts( N ); // counts for 1 best translations + size_t global_hyp_len = 0; // sum hypothesis lengths + size_t global_ref_len = 0; // sum reference lengths + // this is all BLEU implmentations + vector<float> bleu_weights; // we leave this empty -> 1/N; TODO? + if ( !quiet ) cout << setw(26) << "scorer '" << scorer_str << "'" << endl << endl; + + // init weights + Weights weights; + if ( cfg.count("weights") ) weights.InitFromFile( cfg["weights"].as<string>() ); + SparseVector<double> lambdas; + weights.InitSparseVector( &lambdas ); + vector<double> dense_weights; + + // input + if ( !quiet && !verbose ) + cout << "(a dot represents " << DTRAIN_DOTS << " lines of input)" << endl; + string input_fn = cfg["input"].as<string>(); + ifstream input; + if ( input_fn != "-" ) input.open( input_fn.c_str() ); + string in; + vector<string> in_split; // input: src\tref\tpsg + vector<string> ref_tok; // tokenized reference + vector<WordID> ref_ids; // reference as vector of WordID + string grammar_str; + + // buffer input for t > 0 + vector<string> src_str_buf; // source strings, TODO? memory + vector<vector<WordID> > ref_ids_buf; // references as WordID vecs + filtering_ostream grammar_buf; // written to compressed file in /tmp + // this is for writing the grammar buffer file + grammar_buf.push( gzip_compressor() ); + char grammar_buf_tmp_fn[] = DTRAIN_TMP_DIR"/dtrain-grammars-XXXXXX"; + mkstemp( grammar_buf_tmp_fn ); + grammar_buf.push( file_sink(grammar_buf_tmp_fn, ios::binary | ios::trunc) ); + + size_t sid = 0, in_sz = 99999999; // sentence id, input size + double acc_1best_score = 0., acc_1best_model = 0.; + vector<vector<double> > scores_per_iter; + double max_score = 0.; + size_t best_t = 0; + bool next = false, stop = false; + double score = 0.; + size_t cand_len = 0; + double overall_time = 0.; + + // for the perceptron/SVM; TODO as params + double eta = 0.0005; + double gamma = 0.;//01; // -> SVM + lambdas.add_value( FD::Convert("__bias"), 0 ); + + // for random sampling + srand ( time(NULL) ); + + + for ( size_t t = 0; t < T; t++ ) // T epochs + { + + time_t start, end; + time( &start ); + + // actually, we need only need this if t > 0 FIXME + ifstream grammar_file( grammar_buf_tmp_fn, ios_base::in | ios_base::binary ); + filtering_istream grammar_buf_in; + grammar_buf_in.push( gzip_decompressor() ); + grammar_buf_in.push( grammar_file ); + + // reset average scores + acc_1best_score = acc_1best_model = 0.; + + // reset sentence counter + sid = 0; + + if ( !quiet ) cout << "Iteration #" << t+1 << " of " << T << "." << endl; + + while( true ) + { + + // get input from stdin or file + in.clear(); + next = stop = false; // next iteration, premature stop + if ( t == 0 ) { + if ( input_fn == "-" ) { + if ( !getline(cin, in) ) next = true; + } else { + if ( !getline(input, in) ) next = true; + } + } else { + if ( sid == in_sz ) next = true; // stop if we reach the end of our input + } + // stop after X sentences (but still iterate for those) + if ( stop_after > 0 && stop_after == sid && !next ) stop = true; + + // produce some pretty output + if ( !quiet && !verbose ) { + if ( sid == 0 ) cout << " "; + if ( (sid+1) % (DTRAIN_DOTS) == 0 ) { + cout << "."; + cout.flush(); + } + if ( (sid+1) % (20*DTRAIN_DOTS) == 0) { + cout << " " << sid+1 << endl; + if ( !next && !stop ) cout << " "; + } + if ( stop ) { + if ( sid % (20*DTRAIN_DOTS) != 0 ) cout << " " << sid << endl; + cout << "Stopping after " << stop_after << " input sentences." << endl; + } else { + if ( next ) { + if ( sid % (20*DTRAIN_DOTS) != 0 ) { + cout << " " << sid << endl; + } + } + } + } + + // next iteration + if ( next || stop ) break; + + // weights + dense_weights.clear(); + weights.InitFromVector( lambdas ); + weights.InitVector( &dense_weights ); + decoder.SetWeights( dense_weights ); + + if ( t == 0 ) { + // handling input + in_split.clear(); + boost::split( in_split, in, boost::is_any_of("\t") ); // in_split[0] is id + // getting reference + ref_tok.clear(); ref_ids.clear(); + boost::split( ref_tok, in_split[2], boost::is_any_of(" ") ); + register_and_convert( ref_tok, ref_ids ); + ref_ids_buf.push_back( ref_ids ); + // process and set grammar + bool broken_grammar = true; + for ( string::iterator ti = in_split[3].begin(); ti != in_split[3].end(); ti++ ) { + if ( !isspace(*ti) ) { + broken_grammar = false; + break; + } + } + if ( broken_grammar ) continue; + grammar_str = boost::replace_all_copy( in_split[3], " __NEXT__RULE__ ", "\n" ) + "\n"; // FIXME copy, __ + grammar_buf << grammar_str << DTRAIN_GRAMMAR_DELIM << endl; + decoder.SetSentenceGrammarFromString( grammar_str ); + // decode, kbest + src_str_buf.push_back( in_split[1] ); + decoder.Decode( in_split[1], &observer ); + } else { + // get buffered grammar + grammar_str.clear(); + int i = 1; + while ( true ) { + string g; + getline( grammar_buf_in, g ); + if ( g == DTRAIN_GRAMMAR_DELIM ) break; + grammar_str += g+"\n"; + i += 1; + } + decoder.SetSentenceGrammarFromString( grammar_str ); + // decode, kbest + decoder.Decode( src_str_buf[sid], &observer ); + } + + // get kbest list + KBestList* kb; + //if ( ) { // TODO get from forest + kb = observer.GetKBest(); + //} + + // scoring kbest + if ( t > 0 ) ref_ids = ref_ids_buf[sid]; + for ( size_t i = 0; i < kb->GetSize(); i++ ) { + NgramCounts counts = make_ngram_counts( ref_ids, kb->sents[i], N ); + // this is for approx bleu + if ( scorer_str == "approx_bleu" ) { + if ( i == 0 ) { // 'context of 1best translations' + global_counts += counts; + global_hyp_len += kb->sents[i].size(); + global_ref_len += ref_ids.size(); + counts.reset(); + cand_len = 0; + } else { + cand_len = kb->sents[i].size(); + } + NgramCounts counts_tmp = global_counts + counts; + // TODO as param + score = 0.9 * scorer( counts_tmp, + global_ref_len, + global_hyp_len + cand_len, N, bleu_weights ); + } else { + // other scorers + cand_len = kb->sents[i].size(); + score = scorer( counts, + ref_ids.size(), + kb->sents[i].size(), N, bleu_weights ); + } + + kb->scores.push_back( score ); + + if ( i == 0 ) { + acc_1best_score += score; + acc_1best_model += kb->model_scores[i]; + } + + if ( verbose ) { + if ( i == 0 ) cout << "'" << TD::GetString( ref_ids ) << "' [ref]" << endl; + cout << _prec5 << _nopos << "[hyp " << i << "] " << "'" << TD::GetString( kb->sents[i] ) << "'"; + cout << " [SCORE=" << score << ",model="<< kb->model_scores[i] << "]" << endl; + cout << kb->feats[i] << endl; // this is maybe too verbose + } + } // Nbest loop + + if ( verbose ) cout << endl; + + + // UPDATE WEIGHTS + if ( !noup ) { + + TrainingInstances pairs; + sample_all( kb, pairs, n_pairs ); + + vector< SparseVector<double> > featureValueDiffs; + vector<double> lossMinusModelScoreDiffs; + for ( TrainingInstances::iterator ti = pairs.begin(); + ti != pairs.end(); ti++ ) { + + SparseVector<double> dv; + if ( ti->first_score - ti->second_score < 0 ) { + dv = ti->second - ti->first; + dv.add_value( FD::Convert("__bias"), -1 ); + + featureValueDiffs.push_back(dv); + double lossMinusModelScoreDiff = ti->loss_diff - ti->model_score_diff; + lossMinusModelScoreDiffs.push_back(lossMinusModelScoreDiff); + + if (update_type == "perceptron") { + lambdas += dv * eta; + cerr << "after perceptron update: " << lambdas << endl << endl; + } + + if ( verbose ) { + cout << "{{ f("<< ti->first_rank <<") > f(" << ti->second_rank << ") but g(i)="<< ti->first_score <<" < g(j)="<< ti->second_score << " so update" << endl; + cout << " i " << TD::GetString(kb->sents[ti->first_rank]) << endl; + cout << " " << kb->feats[ti->first_rank] << endl; + cout << " j " << TD::GetString(kb->sents[ti->second_rank]) << endl; + cout << " " << kb->feats[ti->second_rank] << endl; + cout << " diff vec: " << dv << endl; + cout << " lambdas after update: " << lambdas << endl; + cout << "}}" << endl; + } + } else { + //SparseVector<double> reg; + //reg = lambdas * ( 2 * gamma ); + //lambdas += reg * ( -eta ); + } + } + cerr << "Collected " << featureValueDiffs.size() << " constraints." << endl; + + double slack = 0.01; + if (update_type == "mira") { + if (featureValueDiffs.size() > 0) { + vector<double> alphas; + if (slack != 0) { + alphas = Mira::Hildreth::optimise(featureValueDiffs, lossMinusModelScoreDiffs, slack); + } else { + alphas = Mira::Hildreth::optimise(featureValueDiffs, lossMinusModelScoreDiffs); + } + + for (size_t k = 0; k < featureValueDiffs.size(); ++k) { + lambdas += featureValueDiffs[k] * alphas[k]; + } + // cerr << "after mira update: " << lambdas << endl << endl; + } + } + } + + ++sid; + + } // input loop + + if ( t == 0 ) in_sz = sid; // remember size (lines) of input + + // print some stats + double avg_1best_score = acc_1best_score/(double)in_sz; + double avg_1best_model = acc_1best_model/(double)in_sz; + double avg_1best_score_diff, avg_1best_model_diff; + if ( t > 0 ) { + avg_1best_score_diff = avg_1best_score - scores_per_iter[t-1][0]; + avg_1best_model_diff = avg_1best_model - scores_per_iter[t-1][1]; + } else { + avg_1best_score_diff = avg_1best_score; + avg_1best_model_diff = avg_1best_model; + } + cout << _prec5 << _pos << "WEIGHTS" << endl; + for (vector<string>::iterator it = wprint.begin(); it != wprint.end(); it++) { + cout << setw(16) << *it << " = " << dense_weights[FD::Convert( *it )] << endl; + } + + cout << " ---" << endl; + cout << _nopos << " avg score: " << avg_1best_score; + cout << _pos << " (" << avg_1best_score_diff << ")" << endl; + cout << _nopos << "avg model score: " << avg_1best_model; + cout << _pos << " (" << avg_1best_model_diff << ")" << endl; + vector<double> remember_scores; + remember_scores.push_back( avg_1best_score ); + remember_scores.push_back( avg_1best_model ); + scores_per_iter.push_back( remember_scores ); + if ( avg_1best_score > max_score ) { + max_score = avg_1best_score; + best_t = t; + } + + // close open files + if ( input_fn != "-" ) input.close(); + close( grammar_buf ); + grammar_file.close(); + + time ( &end ); + double time_dif = difftime( end, start ); + overall_time += time_dif; + if ( !quiet ) { + cout << _prec2 << _nopos << "(time " << time_dif/60. << " min, "; + cout << time_dif/(double)in_sz<< " s/S)" << endl; + } + + if ( t+1 != T ) cout << endl; + + if ( noup ) break; + + // write weights after every epoch + std::string s; + std::stringstream out; + out << t; + s = out.str(); + string weights_file = output_file + "." + s; + weights.WriteToFile(weights_file, true ); + + } // outer loop + + unlink( grammar_buf_tmp_fn ); + if ( !noup ) { + if ( !quiet ) cout << endl << "writing weights file '" << cfg["output"].as<string>() << "' ..."; + weights.WriteToFile( cfg["output"].as<string>(), true ); + if ( !quiet ) cout << "done" << endl; + } + + if ( !quiet ) { + cout << _prec5 << _nopos << endl << "---" << endl << "Best iteration: "; + cout << best_t+1 << " [SCORE '" << scorer_str << "'=" << max_score << "]." << endl; + cout << _prec2 << "This took " << overall_time/60. << " min." << endl; + } + + return 0; +} + diff --git a/dtrain/test/mira_update/sample.h b/dtrain/test/mira_update/sample.h new file mode 100644 index 00000000..5c331bba --- /dev/null +++ b/dtrain/test/mira_update/sample.h @@ -0,0 +1,101 @@ +#ifndef _DTRAIN_SAMPLE_H_ +#define _DTRAIN_SAMPLE_H_ + + +#include "kbestget.h" + + +namespace dtrain +{ + + +struct TPair +{ + SparseVector<double> first, second; + size_t first_rank, second_rank; + double first_score, second_score; + double model_score_diff; + double loss_diff; +}; + +typedef vector<TPair> TrainingInstances; + + +void + sample_all( KBestList* kb, TrainingInstances &training, size_t n_pairs ) +{ + std::vector<double> loss_diffs; + TrainingInstances training_tmp; + for ( size_t i = 0; i < kb->GetSize()-1; i++ ) { + for ( size_t j = i+1; j < kb->GetSize(); j++ ) { + TPair p; + p.first = kb->feats[i]; + p.second = kb->feats[j]; + p.first_rank = i; + p.second_rank = j; + p.first_score = kb->scores[i]; + p.second_score = kb->scores[j]; + + bool conservative = 1; + if ( kb->scores[i] - kb->scores[j] < 0 ) { + // j=hope, i=fear + p.model_score_diff = kb->model_scores[j] - kb->model_scores[i]; + p.loss_diff = kb->scores[j] - kb->scores[i]; + training_tmp.push_back(p); + loss_diffs.push_back(p.loss_diff); + } + else if (!conservative) { + // i=hope, j=fear + p.model_score_diff = kb->model_scores[i] - kb->model_scores[j]; + p.loss_diff = kb->scores[i] - kb->scores[j]; + training_tmp.push_back(p); + loss_diffs.push_back(p.loss_diff); + } + } + } + + if (training_tmp.size() > 0) { + double threshold; + std::sort(loss_diffs.begin(), loss_diffs.end()); + std::reverse(loss_diffs.begin(), loss_diffs.end()); + threshold = loss_diffs.size() >= n_pairs ? loss_diffs[n_pairs-1] : loss_diffs[loss_diffs.size()-1]; + cerr << "threshold: " << threshold << endl; + size_t constraints = 0; + for (size_t i = 0; (i < training_tmp.size() && constraints < n_pairs); ++i) { + if (training_tmp[i].loss_diff >= threshold) { + training.push_back(training_tmp[i]); + constraints++; + } + } + } + else { + cerr << "No pairs selected." << endl; + } +} + +void +sample_rand( KBestList* kb, TrainingInstances &training ) +{ + srand( time(NULL) ); + for ( size_t i = 0; i < kb->GetSize()-1; i++ ) { + for ( size_t j = i+1; j < kb->GetSize(); j++ ) { + if ( rand() % 2 ) { + TPair p; + p.first = kb->feats[i]; + p.second = kb->feats[j]; + p.first_rank = i; + p.second_rank = j; + p.first_score = kb->scores[i]; + p.second_score = kb->scores[j]; + training.push_back( p ); + } + } + } +} + + +} // namespace + + +#endif + |