本文整理汇总了Python中sklearn.metrics.cluster.normalized_mutual_info_score函数的典型用法代码示例。如果您正苦于以下问题:Python normalized_mutual_info_score函数的具体用法?Python normalized_mutual_info_score怎么用?Python normalized_mutual_info_score使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了normalized_mutual_info_score函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: load
def load():
data = genfromtxt('../dataset/' + dataFile + '.csv', delimiter=',')
label = genfromtxt('../dataset/' + labelFile + '.csv', delimiter=',')
res = pickle.load(open(FN,'rb'))
AE = res['autoencoder']
encodedX = AE.encoder(AE.X)
X = encodedX.data.numpy()
#X = preprocessing.scale(encodedX.data.numpy())
d_matrix = sklearn.metrics.pairwise.pairwise_distances(X, Y=None, metric='euclidean')
s = np.median(d_matrix)
Vgamma = 1/(2*s*s)
spAlloc = SpectralClustering(2, gamma=Vgamma).fit_predict(X)
nmi_sp = np.around(normalized_mutual_info_score(label, spAlloc), 3)
kmAlloc = KMeans(2).fit_predict(X)
nmi_km = np.around(normalized_mutual_info_score(label, kmAlloc), 3)
print X
print nmi_sp
print nmi_km
print res['loss']
#print res['autoencoder']
txt = dataFile + ' nmiSP : ' + str(nmi_sp) + ' , nmiKM : ' + str(nmi_km) + ' , num_of_layers:' + str(num_of_layers) + ' , num_of_output:' + str(num_of_output) + '\n'
fin = open('auto_out.txt','a')
fin.write(txt)
fin.close()
开发者ID:juliaprocess,项目名称:ml_examples,代码行数:34,代码来源:autoencoder.py
示例2: test_exactly_zero_info_score
def test_exactly_zero_info_score():
"""Check numerical stability when information is exactly zero"""
for i in np.logspace(1, 4, 4).astype(np.int):
labels_a, labels_b = np.ones(i, dtype=np.int), np.arange(i, dtype=np.int)
assert_equal(normalized_mutual_info_score(labels_a, labels_b), 0.0)
assert_equal(v_measure_score(labels_a, labels_b), 0.0)
assert_equal(adjusted_mutual_info_score(labels_a, labels_b), 0.0)
assert_equal(normalized_mutual_info_score(labels_a, labels_b), 0.0)
开发者ID:JinguoGao,项目名称:scikit-learn,代码行数:8,代码来源:test_supervised.py
示例3: test_single_linkage_clustering
def test_single_linkage_clustering():
# Check that we get the correct result in two emblematic cases
moons, moon_labels = make_moons(noise=0.05, random_state=42)
clustering = AgglomerativeClustering(n_clusters=2, linkage='single')
clustering.fit(moons)
assert_almost_equal(normalized_mutual_info_score(clustering.labels_,
moon_labels), 1)
circles, circle_labels = make_circles(factor=0.5, noise=0.025,
random_state=42)
clustering = AgglomerativeClustering(n_clusters=2, linkage='single')
clustering.fit(circles)
assert_almost_equal(normalized_mutual_info_score(clustering.labels_,
circle_labels), 1)
开发者ID:kevin-coder,项目名称:scikit-learn-fork,代码行数:14,代码来源:test_hierarchical.py
示例4: test_exactly_zero_info_score
def test_exactly_zero_info_score():
# Check numerical stability when information is exactly zero
for i in np.logspace(1, 4, 4).astype(np.int):
labels_a, labels_b = (np.ones(i, dtype=np.int),
np.arange(i, dtype=np.int))
assert_equal(normalized_mutual_info_score(labels_a, labels_b), 0.0)
assert_equal(v_measure_score(labels_a, labels_b), 0.0)
assert_equal(adjusted_mutual_info_score(labels_a, labels_b), 0.0)
assert_equal(normalized_mutual_info_score(labels_a, labels_b), 0.0)
for method in ["min", "geometric", "arithmetic", "max"]:
assert adjusted_mutual_info_score(labels_a, labels_b,
method) == 0.0
assert normalized_mutual_info_score(labels_a, labels_b,
method) == 0.0
开发者ID:MartinThoma,项目名称:scikit-learn,代码行数:14,代码来源:test_supervised.py
示例5: __eval_lda_clustering
def __eval_lda_clustering(lda_model, mm_corpus, gold_labels):
# lda_model = gensim.models.ldamodel.LdaModel.load(model_file)
sys_labels = list()
for i, doc in enumerate(mm_corpus):
topic_dist = lda_model[doc]
# print topic_dist
cluster_idx = 0
max_dist = 0
for tup in topic_dist:
if tup[1] > max_dist:
cluster_idx = tup[0]
max_dist = tup[1]
sys_labels.append(cluster_idx)
if len(sys_labels) % 5000 == 0:
print len(sys_labels)
# if i > 10:
# break
# print len(sys_labels)
# print len(gold_labels)
nmi_score = normalized_mutual_info_score(gold_labels, sys_labels)
purity_score = purity(gold_labels, sys_labels)
ri_score = rand_index(gold_labels, sys_labels)
# print 'NMI: %f' % normalized_mutual_info_score(gold_labels, sys_labels)
# print 'Purity: %f' % purity(gold_labels, sys_labels)
# print 'Accuracy: %f' % cluster_accuracy(gold_labels, sys_labels)
print 'NMI: %f Purity: %f Rand index: %f' % (nmi_score, purity_score, ri_score)
return nmi_score, purity_score, ri_score
开发者ID:hldai,项目名称:emadr-exp,代码行数:30,代码来源:lda.py
示例6: evaluation
def evaluation(X_selected, n_clusters, y):
"""
This function calculates ARI, ACC and NMI of clustering results
Input
-----
X_selected: {numpy array}, shape (n_samples, n_selected_features}
input data on the selected features
n_clusters: {int}
number of clusters
y: {numpy array}, shape (n_samples,)
true labels
Output
------
nmi: {float}
Normalized Mutual Information
acc: {float}
Accuracy
"""
k_means = KMeans(n_clusters=n_clusters, init='k-means++', n_init=10, max_iter=300,
tol=0.0001, precompute_distances=True, verbose=0,
random_state=None, copy_x=True, n_jobs=1)
k_means.fit(X_selected)
y_predict = k_means.labels_
# calculate NMI
nmi = normalized_mutual_info_score(y, y_predict)
# calculate ACC
y_permuted_predict = best_map(y, y_predict)
acc = accuracy_score(y, y_permuted_predict)
return nmi, acc
开发者ID:Bekterra,项目名称:scikit-feature,代码行数:35,代码来源:unsupervised_evaluation.py
示例7: NMI
def NMI(groundTruth, predictionResult):
oneListGT = _labelPreprocessing(groundTruth)
oneListPR = _labelPreprocessing(predictionResult)
return normalized_mutual_info_score(oneListGT, oneListPR)
# This show how NMI can produce high result while it should not
# I5SIM3DatasetTrueClassification = [[1, 22, 52, 67, 84, 88, 106, 124, 138, 156, 167, 172, 204, 228, 240, 245, 256, 283, 313, 322, 337, 355, 367, 375, 380, 382, 405, 421, 422, 449, 451, 452, 464, 468, 469, 519, 520, 539, 566, 596, 612, 627, 628, 642, 656, 683, 718, 780, 808, 817, 830, 831, 833, 835, 852, 853, 854, 870, 876, 878, 927, 948, 952, 958, 968, 972, 976, 1005, 1016, 1024, 1058, 1108, 1122, 1123, 1149,1152, 1190, 1217, 1236, 1243, 1244, 1257, 1260, 1325, 1331, 1346, 1348, 1375, 1382, 1390, 1393, 1416, 1433, 1445, 1451, 1491, 1521, 1557, 1580, 1588],
# [36, 71,76, 80, 85, 155, 157, 171, 182, 211, 215, 224, 237, 238, 239, 292, 298, 311, 315, 329, 342, 361, 370, 384, 403, 415, 416, 419, 425, 437, 483, 485, 488, 497, 522, 528, 545, 561, 569, 571, 572, 574, 621, 645, 653, 664, 674, 699, 712, 732, 734, 740, 755, 773, 774, 802, 828, 841, 846, 872, 896, 903, 906, 940, 991, 1006, 1013, 1042, 1074, 1085, 1100, 1125, 1148, 1170, 1179, 1180, 1203, 1237, 1239, 1252, 1256, 1274, 1279, 1289, 1297, 1307, 1353, 1358, 1383, 1415, 1417, 1431, 1438,1449, 1457, 1506, 1542, 1544, 1547, 1579],
# [2, 8, 18, 32, 45, 59, 64, 79, 86, 96, 103, 110, 140, 166, 174, 199, 201, 202, 214, 235, 242, 247, 250, 262, 273, 278, 279, 286, 290, 326, 344, 345, 350, 363, 364, 424, 426, 427, 447, 470, 493, 501, 523, 526, 548, 563, 586, 599, 601, 602, 605, 632, 643, 661, 662, 677, 694, 702, 706, 751, 768, 815, 832, 871, 892, 930, 932, 934, 973, 984, 994, 1010, 1064,1080, 1115, 1133, 1136, 1166, 1173, 1258, 1298, 1302, 1313, 1319, 1320, 1321, 1326, 1328, 1333, 1341, 1349, 1385, 1403, 1478, 1489, 1512, 1556, 1559, 1565, 1572],
# [12, 25, 29, 97, 102, 107, 116, 119, 132, 141, 158, 178, 181, 252, 253, 309,332, 356, 359, 373, 374, 393, 402, 420, 433, 438, 448, 459, 472, 480, 482, 491,515, 517, 529, 530, 543, 581, 618, 623, 636, 650, 676, 687, 727, 728, 737, 742,787, 793, 807, 857, 858, 863, 882, 904, 907, 957, 969, 970, 971, 975, 1000, 1007, 1011, 1043, 1050, 1056, 1069, 1112, 1141, 1145, 1147, 1151, 1157, 1163, 1172,1246, 1316, 1335, 1352, 1360, 1376, 1387, 1399, 1412, 1468, 1505, 1509, 1510, 1513, 1514, 1517, 1529, 1567, 1576, 1578, 1581, 1583, 1591],
# [6, 21, 23, 34, 42, 53, 77, 90, 154, 168, 195, 264, 267, 269, 282, 284, 335, 336, 352, 354, 358, 379,400, 434, 444, 453, 463, 509, 511, 554, 556, 559, 580, 591, 594, 595, 615, 640,707, 714, 749, 756, 763, 764, 765, 792, 821, 843, 855, 860, 875, 894, 897, 920,954, 1002, 1012, 1025, 1028, 1057, 1076, 1083, 1109, 1127, 1130, 1134, 1144, 1153, 1160, 1194, 1195, 1198, 1207, 1209, 1242, 1245, 1262, 1265, 1284, 1373, 1392, 1407, 1429, 1446, 1461, 1470, 1477, 1479, 1485, 1511, 1519, 1520, 1534, 1539,1554, 1560, 1566, 1577, 1590, 1599],
# [14, 50, 55, 73, 94, 100, 109, 112, 144, 149, 183, 200, 207, 234, 249, 251, 258, 275, 297, 301, 305, 323, 346, 357, 386, 404, 409, 436, 440, 458, 496, 504, 525, 538, 570, 583, 608, 634, 635, 637, 665, 668, 673, 679, 682, 688, 689, 693, 711, 716, 752, 760, 783, 794, 819, 823, 836, 847, 881, 891, 893, 943, 946, 961, 974, 988, 995, 1001, 1003, 1040, 1075, 1155, 1171, 1228, 1259, 1264, 1269, 1291, 1304, 1305, 1308, 1332, 1343, 1356, 1357, 1398, 1401, 1427, 1428, 1435, 1482, 1483, 1484, 1496, 1533, 1535, 1555, 1574, 1586,1589],
# [5, 19, 46, 63, 68, 82, 111, 120, 129, 131, 145, 151, 152, 170, 176, 184,203, 208, 216, 220, 227, 246, 263, 302, 308, 331, 334, 351, 376, 378, 387, 396,408, 412, 428, 489, 500, 502, 555, 577, 622, 631, 651, 652, 686, 705, 745, 747,782, 798, 827, 874, 884, 885, 928, 937, 939, 956, 983, 1009, 1018, 1030, 1039,1062, 1063, 1070, 1072, 1073, 1084, 1093, 1103, 1107, 1128, 1158, 1168, 1169, 1224, 1227, 1230, 1272, 1315, 1327, 1336, 1388, 1394, 1395, 1400, 1409, 1432, 1447, 1448, 1452, 1453, 1465, 1476, 1503, 1515, 1523, 1525, 1582],
# [9, 13, 15, 41, 44, 93, 113, 122, 134, 136, 139, 173, 185, 189, 205, 241, 259, 260, 268, 272, 299, 318, 327, 353, 389, 394, 413, 430, 479, 492, 499, 503, 516, 531, 535, 573, 579, 590, 609, 614, 638, 660, 666, 670, 691, 717, 724, 757, 762, 796, 801, 803, 809, 811, 838, 844, 873, 888, 899, 905, 908, 913, 918, 929, 953, 977, 982, 990, 992, 1020, 1021, 1051, 1094, 1116, 1124, 1164, 1183, 1184, 1199, 1208, 1219, 1226,1303, 1306, 1323, 1334, 1345, 1354, 1364, 1374, 1377, 1402, 1422, 1501, 1531, 1540, 1553, 1585, 1592, 1597],
# [3, 20, 26, 39, 40, 57, 70, 78, 87, 92, 126, 133, 143, 147, 153, 160, 190, 198, 206, 210, 254, 270, 280, 295, 314, 321, 338, 339, 347, 360, 365, 399, 401, 429, 435, 471, 475, 487, 490, 505, 506, 507, 565, 600, 624, 625, 649, 654, 659, 675, 709, 713, 719, 722, 766, 775, 784, 790, 820, 849, 887, 914, 938, 941, 960, 962, 999, 1054, 1060, 1095, 1099, 1106, 1139, 1142, 1154, 1165, 1186, 1189, 1197, 1273, 1276, 1295, 1301, 1309, 1314, 1324, 1366, 1368,1379, 1436, 1440, 1450, 1459, 1473, 1500, 1528, 1532, 1558, 1563, 1568],
# [0, 4,27, 56, 66, 91, 117, 118, 121, 142, 194, 209, 221, 236, 243, 244, 248, 277, 293,317, 333, 348, 371, 481, 510, 514, 532, 551, 568, 575, 585, 604, 620, 629, 644,681, 700, 704, 720, 726, 743, 748, 770, 779, 795, 850, 851, 859, 867, 889, 911,924, 933, 949, 955, 967, 989, 998, 1019, 1052, 1098, 1105, 1117, 1126, 1131, 1132, 1138, 1156, 1162, 1167, 1175, 1176, 1200, 1238, 1268, 1277, 1278, 1283, 1292, 1310, 1330, 1339, 1350, 1372, 1391, 1404, 1406, 1419, 1463, 1466, 1480, 1481,1490, 1498, 1526, 1538, 1549, 1562, 1570, 1598],
# [24, 51, 54, 74, 108, 130, 148,186, 196, 226, 230, 261, 281, 294, 304, 307, 349, 362, 372, 383, 417, 465, 477,537, 546, 553, 560, 578, 592, 593, 611, 613, 616, 619, 630, 658, 669, 685, 692,696, 791, 805, 806, 834, 837, 840, 845, 856, 868, 869, 898, 915, 945, 951, 964,987, 1004, 1008, 1015, 1036, 1041, 1045, 1071, 1081, 1082, 1088, 1111, 1119, 1120, 1135, 1143, 1159, 1174, 1177, 1193, 1202, 1205, 1221, 1248, 1253, 1280, 1281, 1290, 1293, 1311, 1312, 1340, 1378, 1397, 1405, 1474, 1475, 1486, 1487, 1516,1518, 1537, 1552, 1573, 1596],
# [16, 28, 37, 104, 128, 159, 164, 175, 187, 188, 212, 217, 223, 255, 312, 341, 343, 392, 397, 398, 406, 410, 418, 454, 455, 461, 462, 478, 494, 495, 512, 540, 550, 558, 597, 626, 633, 729, 735, 741, 750, 761, 781, 797, 799, 814, 822, 824, 879, 900, 910, 966, 979, 981, 993, 996, 1037, 1044,1046, 1053, 1061, 1065, 1077, 1079, 1096, 1097, 1113, 1121, 1146, 1150, 1182, 1185, 1196, 1210, 1235, 1241, 1254, 1263, 1275, 1285, 1287, 1338, 1355, 1359, 1363, 1380, 1381, 1396, 1437, 1441, 1467, 1493, 1494, 1495, 1522, 1541, 1546, 1551,1564, 1575],
# [11, 17, 31, 48, 75, 89, 95, 98, 115, 123, 125, 135, 137, 161, 179, 191, 219, 222, 257, 266, 276, 291, 300, 310, 320, 324, 411, 473, 476, 498, 518, 534, 557, 576, 582, 587, 639, 646, 648, 663, 701, 710, 723, 731, 744, 759, 772, 776, 812, 813, 816, 818, 839, 861, 864, 866, 883, 886, 916, 921, 922, 965, 980, 985, 997, 1031, 1032, 1038, 1047, 1059, 1114, 1137, 1191, 1201, 1206, 1215, 1218, 1223, 1234, 1249, 1251, 1266, 1288, 1317, 1318, 1370, 1371, 1414, 1420, 1430, 1439, 1444, 1460, 1464, 1507, 1508, 1524, 1536, 1543, 1548],
# [7, 10, 35, 58, 61, 69, 99, 146, 163, 165, 192, 213, 231, 233, 274, 287, 328, 330, 366, 377, 390,395, 445, 446, 450, 456, 460, 508, 536, 541, 547, 549, 564, 567, 598, 606, 617,657, 671, 672, 695, 703, 725, 733, 736, 754, 769, 771, 778, 785, 786, 800, 810,829, 865, 877, 880, 909, 919, 931, 935, 942, 963, 986, 1023, 1026, 1055, 1068,1086, 1089, 1101, 1102, 1104, 1110, 1140, 1181, 1212, 1229, 1267, 1270, 1286, 1322, 1337, 1347, 1361, 1362, 1369, 1384, 1411, 1413, 1423, 1426, 1456, 1469, 1472, 1488, 1499, 1530, 1561, 1595],
# [38, 43, 60, 62, 101, 225, 229, 232, 285, 288,289, 306, 316, 319, 340, 368, 381, 423, 439, 441, 457, 467, 474, 521, 533, 542,544, 552, 562, 588, 589, 603, 610, 641, 647, 698, 708, 721, 730, 746, 753, 767,788, 825, 862, 890, 895, 902, 912, 917, 925, 926, 947, 959, 1017, 1022, 1029, 1048, 1049, 1066, 1078, 1091, 1092, 1129, 1187, 1192, 1204, 1213, 1216, 1222, 1225, 1231, 1232, 1233, 1247, 1250, 1261, 1271, 1294, 1296, 1299, 1344, 1367, 1389,1410, 1421, 1424, 1425, 1434, 1442, 1443, 1454, 1455, 1458, 1471, 1492, 1550, 1584, 1587, 1593],
# [30, 33, 47, 49, 65, 72, 81, 83, 105, 114, 127, 150, 162, 169,177, 180, 193, 197, 218, 265, 271, 296, 303, 325, 369, 385, 388, 391, 407, 414,431, 432, 442, 443, 466, 484, 486, 513, 524, 527, 584, 607, 655, 667, 678, 680,684, 690, 697, 715, 738, 739, 758, 777, 789, 804, 826, 842, 848, 901, 923, 936,944, 950, 978, 1014, 1027, 1033, 1034, 1035, 1067, 1087, 1090, 1118, 1161, 1178,1188, 1211, 1214, 1220, 1240, 1255, 1282, 1300, 1329, 1342, 1351, 1365, 1386, 1408, 1418, 1462, 1497, 1502, 1504, 1527, 1545, 1569, 1571, 1594]]
# I5SIMTestDatasetTrueClassification = [[1, 22, 52, 67, 84, 88, 106, 124, 138, 156, 167, 172, 204, 228, 240, 245, 256, 283, 313, 322, 337, 355, 367, 375, 380, 382, 405, 421, 422, 449, 451, 452, 464, 468, 469, 519, 520, 539, 566, 596, 612, 627, 628, 642, 656, 683, 718, 780, 808, 817, 830, 831, 833, 835, 852, 853, 854, 870, 876, 878, 927, 948, 952, 958, 968, 972, 976, 1005, 1016, 1024, 1058, 1108, 1122, 1123, 1149,1152, 1190, 1217, 1236, 1243, 1244, 1257, 1260, 1325, 1331, 1346, 1348, 1375, 1382, 1390, 1393, 1416, 1433, 1445, 1451, 1491, 1521, 1557, 1580, 1588,36, 71,76, 80, 85, 155, 157, 171, 182, 211, 215, 224, 237, 238, 239, 292, 298, 311, 315, 329, 342, 361, 370, 384, 403, 415, 416, 419, 425, 437, 483, 485, 488, 497, 522, 528, 545, 561, 569, 571, 572, 574, 621, 645, 653, 664, 674, 699, 712, 732, 734, 740, 755, 773, 774, 802, 828, 841, 846, 872, 896, 903, 906, 940, 991, 1006, 1013, 1042, 1074, 1085, 1100, 1125, 1148, 1170, 1179, 1180, 1203, 1237, 1239, 1252, 1256, 1274, 1279, 1289, 1297, 1307, 1353, 1358, 1383, 1415, 1417, 1431, 1438,1449, 1457, 1506, 1542, 1544, 1547, 1579],
# [2, 8, 18, 32, 45, 59, 64, 79, 86, 96, 103, 110, 140, 166, 174, 199, 201, 202, 214, 235, 242, 247, 250, 262, 273, 278, 279, 286, 290, 326, 344, 345, 350, 363, 364, 424, 426, 427, 447, 470, 493, 501, 523, 526, 548, 563, 586, 599, 601, 602, 605, 632, 643, 661, 662, 677, 694, 702, 706, 751, 768, 815, 832, 871, 892, 930, 932, 934, 973, 984, 994, 1010, 1064,1080, 1115, 1133, 1136, 1166, 1173, 1258, 1298, 1302, 1313, 1319, 1320, 1321, 1326, 1328, 1333, 1341, 1349, 1385, 1403, 1478, 1489, 1512, 1556, 1559, 1565, 1572, 12, 25, 29, 97, 102, 107, 116, 119, 132, 141, 158, 178, 181, 252, 253, 309,332, 356, 359, 373, 374, 393, 402, 420, 433, 438, 448, 459, 472, 480, 482, 491,515, 517, 529, 530, 543, 581, 618, 623, 636, 650, 676, 687, 727, 728, 737, 742,787, 793, 807, 857, 858, 863, 882, 904, 907, 957, 969, 970, 971, 975, 1000, 1007, 1011, 1043, 1050, 1056, 1069, 1112, 1141, 1145, 1147, 1151, 1157, 1163, 1172,1246, 1316, 1335, 1352, 1360, 1376, 1387, 1399, 1412, 1468, 1505, 1509, 1510, 1513, 1514, 1517, 1529, 1567, 1576, 1578, 1581, 1583, 1591],
# [6, 21, 23, 34, 42, 53, 77, 90, 154, 168, 195, 264, 267, 269, 282, 284, 335, 336, 352, 354, 358, 379,400, 434, 444, 453, 463, 509, 511, 554, 556, 559, 580, 591, 594, 595, 615, 640,707, 714, 749, 756, 763, 764, 765, 792, 821, 843, 855, 860, 875, 894, 897, 920,954, 1002, 1012, 1025, 1028, 1057, 1076, 1083, 1109, 1127, 1130, 1134, 1144, 1153, 1160, 1194, 1195, 1198, 1207, 1209, 1242, 1245, 1262, 1265, 1284, 1373, 1392, 1407, 1429, 1446, 1461, 1470, 1477, 1479, 1485, 1511, 1519, 1520, 1534, 1539,1554, 1560, 1566, 1577, 1590, 1599, 14, 50, 55, 73, 94, 100, 109, 112, 144, 149, 183, 200, 207, 234, 249, 251, 258, 275, 297, 301, 305, 323, 346, 357, 386, 404, 409, 436, 440, 458, 496, 504, 525, 538, 570, 583, 608, 634, 635, 637, 665, 668, 673, 679, 682, 688, 689, 693, 711, 716, 752, 760, 783, 794, 819, 823, 836, 847, 881, 891, 893, 943, 946, 961, 974, 988, 995, 1001, 1003, 1040, 1075, 1155, 1171, 1228, 1259, 1264, 1269, 1291, 1304, 1305, 1308, 1332, 1343, 1356, 1357, 1398, 1401, 1427, 1428, 1435, 1482, 1483, 1484, 1496, 1533, 1535, 1555, 1574, 1586,1589],
# [5, 19, 46, 63, 68, 82, 111, 120, 129, 131, 145, 151, 152, 170, 176, 184,203, 208, 216, 220, 227, 246, 263, 302, 308, 331, 334, 351, 376, 378, 387, 396,408, 412, 428, 489, 500, 502, 555, 577, 622, 631, 651, 652, 686, 705, 745, 747,782, 798, 827, 874, 884, 885, 928, 937, 939, 956, 983, 1009, 1018, 1030, 1039,1062, 1063, 1070, 1072, 1073, 1084, 1093, 1103, 1107, 1128, 1158, 1168, 1169, 1224, 1227, 1230, 1272, 1315, 1327, 1336, 1388, 1394, 1395, 1400, 1409, 1432, 1447, 1448, 1452, 1453, 1465, 1476, 1503, 1515, 1523, 1525, 1582, 9, 13, 15, 41, 44, 93, 113, 122, 134, 136, 139, 173, 185, 189, 205, 241, 259, 260, 268, 272, 299, 318, 327, 353, 389, 394, 413, 430, 479, 492, 499, 503, 516, 531, 535, 573, 579, 590, 609, 614, 638, 660, 666, 670, 691, 717, 724, 757, 762, 796, 801, 803, 809, 811, 838, 844, 873, 888, 899, 905, 908, 913, 918, 929, 953, 977, 982, 990, 992, 1020, 1021, 1051, 1094, 1116, 1124, 1164, 1183, 1184, 1199, 1208, 1219, 1226,1303, 1306, 1323, 1334, 1345, 1354, 1364, 1374, 1377, 1402, 1422, 1501, 1531, 1540, 1553, 1585, 1592, 1597],
# [3, 20, 26, 39, 40, 57, 70, 78, 87, 92, 126, 133, 143, 147, 153, 160, 190, 198, 206, 210, 254, 270, 280, 295, 314, 321, 338, 339, 347, 360, 365, 399, 401, 429, 435, 471, 475, 487, 490, 505, 506, 507, 565, 600, 624, 625, 649, 654, 659, 675, 709, 713, 719, 722, 766, 775, 784, 790, 820, 849, 887, 914, 938, 941, 960, 962, 999, 1054, 1060, 1095, 1099, 1106, 1139, 1142, 1154, 1165, 1186, 1189, 1197, 1273, 1276, 1295, 1301, 1309, 1314, 1324, 1366, 1368,1379, 1436, 1440, 1450, 1459, 1473, 1500, 1528, 1532, 1558, 1563, 1568, 0, 4,27, 56, 66, 91, 117, 118, 121, 142, 194, 209, 221, 236, 243, 244, 248, 277, 293,317, 333, 348, 371, 481, 510, 514, 532, 551, 568, 575, 585, 604, 620, 629, 644,681, 700, 704, 720, 726, 743, 748, 770, 779, 795, 850, 851, 859, 867, 889, 911,924, 933, 949, 955, 967, 989, 998, 1019, 1052, 1098, 1105, 1117, 1126, 1131, 1132, 1138, 1156, 1162, 1167, 1175, 1176, 1200, 1238, 1268, 1277, 1278, 1283, 1292, 1310, 1330, 1339, 1350, 1372, 1391, 1404, 1406, 1419, 1463, 1466, 1480, 1481,1490, 1498, 1526, 1538, 1549, 1562, 1570, 1598],
# [24, 51, 54, 74, 108, 130, 148,186, 196, 226, 230, 261, 281, 294, 304, 307, 349, 362, 372, 383, 417, 465, 477,537, 546, 553, 560, 578, 592, 593, 611, 613, 616, 619, 630, 658, 669, 685, 692,696, 791, 805, 806, 834, 837, 840, 845, 856, 868, 869, 898, 915, 945, 951, 964,987, 1004, 1008, 1015, 1036, 1041, 1045, 1071, 1081, 1082, 1088, 1111, 1119, 1120, 1135, 1143, 1159, 1174, 1177, 1193, 1202, 1205, 1221, 1248, 1253, 1280, 1281, 1290, 1293, 1311, 1312, 1340, 1378, 1397, 1405, 1474, 1475, 1486, 1487, 1516,1518, 1537, 1552, 1573, 1596, 16, 28, 37, 104, 128, 159, 164, 175, 187, 188, 212, 217, 223, 255, 312, 341, 343, 392, 397, 398, 406, 410, 418, 454, 455, 461, 462, 478, 494, 495, 512, 540, 550, 558, 597, 626, 633, 729, 735, 741, 750, 761, 781, 797, 799, 814, 822, 824, 879, 900, 910, 966, 979, 981, 993, 996, 1037, 1044,1046, 1053, 1061, 1065, 1077, 1079, 1096, 1097, 1113, 1121, 1146, 1150, 1182, 1185, 1196, 1210, 1235, 1241, 1254, 1263, 1275, 1285, 1287, 1338, 1355, 1359, 1363, 1380, 1381, 1396, 1437, 1441, 1467, 1493, 1494, 1495, 1522, 1541, 1546, 1551,1564, 1575],
# [11, 17, 31, 48, 75, 89, 95, 98, 115, 123, 125, 135, 137, 161, 179, 191, 219, 222, 257, 266, 276, 291, 300, 310, 320, 324, 411, 473, 476, 498, 518, 534, 557, 576, 582, 587, 639, 646, 648, 663, 701, 710, 723, 731, 744, 759, 772, 776, 812, 813, 816, 818, 839, 861, 864, 866, 883, 886, 916, 921, 922, 965, 980, 985, 997, 1031, 1032, 1038, 1047, 1059, 1114, 1137, 1191, 1201, 1206, 1215, 1218, 1223, 1234, 1249, 1251, 1266, 1288, 1317, 1318, 1370, 1371, 1414, 1420, 1430, 1439, 1444, 1460, 1464, 1507, 1508, 1524, 1536, 1543, 1548, 7, 10, 35, 58, 61, 69, 99, 146, 163, 165, 192, 213, 231, 233, 274, 287, 328, 330, 366, 377, 390,395, 445, 446, 450, 456, 460, 508, 536, 541, 547, 549, 564, 567, 598, 606, 617,657, 671, 672, 695, 703, 725, 733, 736, 754, 769, 771, 778, 785, 786, 800, 810,829, 865, 877, 880, 909, 919, 931, 935, 942, 963, 986, 1023, 1026, 1055, 1068,1086, 1089, 1101, 1102, 1104, 1110, 1140, 1181, 1212, 1229, 1267, 1270, 1286, 1322, 1337, 1347, 1361, 1362, 1369, 1384, 1411, 1413, 1423, 1426, 1456, 1469, 1472, 1488, 1499, 1530, 1561, 1595],
# [38, 43, 60, 62, 101, 225, 229, 232, 285, 288,289, 306, 316, 319, 340, 368, 381, 423, 439, 441, 457, 467, 474, 521, 533, 542,544, 552, 562, 588, 589, 603, 610, 641, 647, 698, 708, 721, 730, 746, 753, 767,788, 825, 862, 890, 895, 902, 912, 917, 925, 926, 947, 959, 1017, 1022, 1029, 1048, 1049, 1066, 1078, 1091, 1092, 1129, 1187, 1192, 1204, 1213, 1216, 1222, 1225, 1231, 1232, 1233, 1247, 1250, 1261, 1271, 1294, 1296, 1299, 1344, 1367, 1389,1410, 1421, 1424, 1425, 1434, 1442, 1443, 1454, 1455, 1458, 1471, 1492, 1550, 1584, 1587, 1593, 30, 33, 47, 49, 65, 72, 81, 83, 105, 114, 127, 150, 162, 169,177, 180, 193, 197, 218, 265, 271, 296, 303, 325, 369, 385, 388, 391, 407, 414,431, 432, 442, 443, 466, 484, 486, 513, 524, 527, 584, 607, 655, 667, 678, 680,684, 690, 697, 715, 738, 739, 758, 777, 789, 804, 826, 842, 848, 901, 923, 936,944, 950, 978, 1014, 1027, 1033, 1034, 1035, 1067, 1087, 1090, 1118, 1161, 1178,1188, 1211, 1214, 1220, 1240, 1255, 1282, 1300, 1329, 1342, 1351, 1365, 1386, 1408, 1418, 1462, 1497, 1502, 1504, 1527, 1545, 1569, 1571, 1594],[],[],[],[],[],[],[],[]]
# print NMI(I5SIMTestDatasetTrueClassification,I5SIM3DatasetTrueClassification)
开发者ID:PointwiseLSH,项目名称:PointwiseLSHProject,代码行数:32,代码来源:measurements.py
示例8: pairwise_MI
def pairwise_MI(data):
columns = data.columns
MI_df = pd.DataFrame(index = columns, columns = columns)
for c1,c2 in combinations(columns, 2):
cleaned = data[[c1,c2]].dropna()
MI = normalized_mutual_info_score(cleaned[c1], cleaned[c2])
MI_df.loc[c1,c2] = MI
MI_df.loc[c2,c1] = MI
return MI_df.astype(float)
开发者ID:IanEisenberg,项目名称:Self_Regulation_Ontology,代码行数:9,代码来源:graph_utils.py
示例9: calcNMI
def calcNMI():
dataset = readARFF();
subSet = dataset[['class', 'cluster']]
#print subSet
NMI = normalized_mutual_info_score(subSet['class'], subSet['cluster'])
print NMI
开发者ID:av-7,项目名称:Decision-Tree-Classifier-and-Clustering,代码行数:9,代码来源:external_evaluation_measures.py
示例10: main
def main():
file1 = sys.argv[1]
file2 = sys.argv[2]
c_true = {}
c_pred = {}
#read data from file
with open(file1) as fd1, open(file2) as fd2:
c_true = eval(fd1.readline())
c_pred = eval(fd2.readline())
#order the data in dictionary data structure
c_true_order = collections.OrderedDict(sorted(c_true.items()))
c_pred_order = collections.OrderedDict(sorted(c_pred.items()))
c_true_label = []
c_pred_label = []
print c_true_order
#make list with community label
for k, v in c_true_order.items():
c_true_label.append(v)
for k, v in c_pred_order.items():
c_pred_label.append(v)
simi = normalized_mutual_info_score(c_true_label,c_pred_label)
DATA_FILE = sys.argv[3].split("/")
FILE_LOG_NAME = "LOG_File_"+(DATA_FILE[-1])+ ".xlsx"
Kcore_Value = int(sys.argv[4])
if(not os.path.exists(FILE_LOG_NAME)):
wb = openpyxl.Workbook()
sheet = wb.active
sheet.title = "Sheet1"
sheet['A1'] = 'K/R Value'
sheet['B1'] = 'NMI Similarity'
sheet['A2'] = 'v=10%'
sheet['A3'] = 'v=20%'
sheet['A4'] = 'v=30%'
sheet['A5'] = 'v=40%'
sheet['A6'] = 'v=50%'
sheet['A7'] = 'v=60%'
sheet['A8'] = 'v=70%'
sheet['A9'] = 'v=80%'
sheet['A10'] = 'v=90%'
sheet['A11'] = 'v=100%'
else:
wb = openpyxl.load_workbook(FILE_LOG_NAME)
sheet = wb.get_sheet_by_name('Sheet1')
sheet['B'+str(Kcore_Value + 1)] = simi
wb.save(FILE_LOG_NAME)
开发者ID:hoduan,项目名称:SU-Community-Detection,代码行数:52,代码来源:similarity.py
示例11: get_loss
def get_loss(ckernel_net, data_loader):
# Compute final average loss
for idx, (data, target) in enumerate(data_loader):
data = Variable(data.type(db['dataType']))
loss = ckernel_net.CAE_compute_loss(data)
dataOut = ckernel_net(data)
dataOut = dataOut.cpu().data.numpy()
allocation = KMeans(10).fit_predict(dataOut)
nmi = normalized_mutual_info_score(allocation, target.numpy())
return [loss.cpu().data.numpy()[0], nmi]
开发者ID:juliaprocess,项目名称:ml_examples,代码行数:13,代码来源:mnist.py
示例12: __eval_lda_clustering_20ng
def __eval_lda_clustering_20ng():
text_doc_file = 'e:/dc/20ng_bydate/twe/docs-nl.txt'
dict_file = 'e:/dc/20ng_bydate/lda/all-docs.dict'
mm_file = 'e:/dc/20ng_bydate/lda/all-docs.mm'
lda_model_file = 'e:/dc/20ng_bydate/lda/lda-model'
dataset_label_file = 'e:/dc/20ng_bydate/doc_split_labels.bin'
test_label_file = 'e:/dc/20ng_bydate/test_labels.bin'
__text_file_to_mm_corpus(text_doc_file, dict_file, mm_file)
__train_lda_model(dict_file, mm_file, lda_model_file)
dataset_labels = ioutils.load_labels_file(dataset_label_file)
lda_model = gensim.models.ldamodel.LdaModel.load(lda_model_file)
mm_corpus = gensim.corpora.MmCorpus(mm_file)
sys_labels = list()
for i, doc in enumerate(mm_corpus):
if dataset_labels[i] == 0:
continue
topic_dist = lda_model[doc]
# print topic_dist
cluster_idx = 0
max_dist = 0
for tup in topic_dist:
if tup[1] > max_dist:
cluster_idx = tup[0]
max_dist = tup[1]
sys_labels.append(cluster_idx)
if len(sys_labels) % 1000 == 0:
print len(sys_labels)
# if i > 10:
# break
print len(sys_labels)
gold_labels = ioutils.load_labels_file(test_label_file)
print len(gold_labels)
print normalized_mutual_info_score(gold_labels, sys_labels)
print cluster_accuracy(gold_labels, sys_labels)
开发者ID:hldai,项目名称:emadr-exp,代码行数:39,代码来源:lda.py
示例13: main
def main():
file1 = sys.argv[1]
file2 = sys.argv[2]
c_true = {}
c_pred = {}
#read data from file
with open(file1) as fd1, open(file2) as fd2:
c_true = eval(fd1.readline())
c_pred = eval(fd2.readline())
#order the data in dictionary data structure
c_true_order = collections.OrderedDict(sorted(c_true.items()))
c_pred_order = collections.OrderedDict(sorted(c_pred.items()))
c_true_label = []
c_pred_label = []
#make list with community label
for k, v in c_true_order.items():
c_true_label.append(v)
for k, v in c_pred_order.items():
c_pred_label.append(v)
print normalized_mutual_info_score(c_true_label,c_pred_label)
开发者ID:AltmerX,项目名称:SUCD,代码行数:23,代码来源:similarity.py
示例14: test_v_measure_and_mutual_information
def test_v_measure_and_mutual_information(seed=36):
# Check relation between v_measure, entropy and mutual information
for i in np.logspace(1, 4, 4).astype(np.int):
random_state = np.random.RandomState(seed)
labels_a, labels_b = (random_state.randint(0, 10, i),
random_state.randint(0, 10, i))
assert_almost_equal(v_measure_score(labels_a, labels_b),
2.0 * mutual_info_score(labels_a, labels_b) /
(entropy(labels_a) + entropy(labels_b)), 0)
avg = 'arithmetic'
assert_almost_equal(v_measure_score(labels_a, labels_b),
normalized_mutual_info_score(labels_a, labels_b,
average_method=avg)
)
开发者ID:MartinThoma,项目名称:scikit-learn,代码行数:14,代码来源:test_supervised.py
示例15: evaluate
def evaluate( self, partition, clustered_ids ):
# no class info?
if not self.has_class_info():
return {}
# get two clusterings that we can compare
n = len(clustered_ids)
classes_subset = np.zeros( n )
for row in range(n):
classes_subset[row] = self.class_map[clustered_ids[row]]
scores = {}
scores["external-nmi"] = normalized_mutual_info_score( classes_subset, partition )
scores["external-ami"] = adjusted_mutual_info_score( classes_subset, partition )
scores["external-ari"] = adjusted_rand_score( classes_subset, partition )
return scores
开发者ID:duongtrung,项目名称:topic-stability,代码行数:14,代码来源:validation.py
示例16: sklearn_measures
def sklearn_measures(U, V):
# http://scikit-learn.org/stable/modules/classes.html#clustering-metrics
import sklearn.metrics.cluster as sym
U_labels = np.nonzero(U)[1]
V_labels = np.nonzero(V)[1]
print U_labels, V_labels
# V2_labels = np.nonzero(V2)[1]
print 'entro(U)=',sym.entropy(U_labels),'entro(V)=',sym.entropy(V_labels), 'entro(U,V)=',sym.mutual_info_score(U_labels, V_labels)
res = [ ['ari', 'nmi', 'ami', 'vm' ], \
[ sym.adjusted_rand_score(U_labels, V_labels),\
sym.normalized_mutual_info_score(U_labels, V_labels),\
sym.adjusted_mutual_info_score(U_labels, V_labels),\
sym.v_measure_score(U_labels, V_labels)]]
print res
return res
开发者ID:Bigxiaofeng,项目名称:CommunityEvaluation,代码行数:15,代码来源:cluster_agreement_examples.py
示例17: checkout_CAE
def checkout_CAE():
X = pickle.load( open( 'mnist_60000_validation.pk', "rb" ) )
Y = pickle.load( open( 'mnist_60000_label_validation.pk', "rb" ) )
Y = torch.from_numpy(Y)
kinfo = pickle.load( open( 'kernel_mnist.p', "rb" ) )
cnn = kinfo['kernel_net']
X_var = Variable(X.type(db['dataType']))
xout = cnn(X_var)
xout = xout.cpu().data.numpy()
allocation = KMeans(10).fit_predict(xout)
nmi = normalized_mutual_info_score(allocation, Y.numpy())
print('nmi : %.3f', nmi)
开发者ID:juliaprocess,项目名称:ml_examples,代码行数:15,代码来源:mnist.py
示例18: bow_kmeans
def bow_kmeans(bow_vecs, gold_labels, num_clusters):
print 'performing kmeans ...'
model = KMeans(n_clusters=num_clusters, n_jobs=4, n_init=20)
model.fit(bow_vecs)
# print len(gold_labels), 'samples'
nmi_score = normalized_mutual_info_score(gold_labels, model.labels_)
purity_score = purity(gold_labels, model.labels_)
ri_score = rand_index(gold_labels, model.labels_)
# print 'NMI: %f' % normalized_mutual_info_score(gold_labels, model.labels_)
# print 'Purity: %f' % purity(gold_labels, model.labels_)
# print 'Accuracy: %f' % cluster_accuracy(gold_labels, model.labels_)
print 'NMI: %f Purity: %f Rand index: %f' % (nmi_score, purity_score, ri_score)
return nmi_score, purity_score, ri_score
开发者ID:hldai,项目名称:emadr-exp,代码行数:16,代码来源:bow.py
示例19: nimSimilarity
def nimSimilarity(c_true, c_pred):
'''This will return the Normalized Mutual Information between two clusterings
Parameters: c_true, communities detected without kcore, a dictionary with community node as the key and community lable as the value
c_pred, communities detected with kcore, a dictionary with community node as the key and community lable as the value
Return nmi
Example:
x = {1:1,2:1,3:0,4:0}
y = {1:0,2:0,3:1,4:1}
print nimSimilarity(x,y)'''
#put community lables (lable might be duplicate)into array
c_true = list(c_true.values())
#print sorted(c_true)
c_pred = list(c_pred.values())
#print sorted(c_pred)
return normalized_mutual_info_score(c_true,c_pred)
开发者ID:AltmerX,项目名称:SUCD,代码行数:18,代码来源:_similarity.py
示例20: mutualinfo
def mutualinfo(df):
dfin=df
Label=dfin['L']
VALUES=['sentiment_polarity','sentiment_subjectivity','absPolarity','Clean tweet', 'L']
headers_names=list(dfin.columns.values)
headers_names = [x for x in headers_names if x not in VALUES]
mutualinfowords=[]
for header in headers_names:
mutualcolumn= dfin[header]
mutualvalue= normalized_mutual_info_score(mutualcolumn,Label)
if mutualvalue>0.02:
#print'mutual info',header, mutualvalue
mutualinfowords.append(header)
return mutualinfowords
#mutualinfo(test)
开发者ID:omedranoc,项目名称:ThesisPreprocessing,代码行数:19,代码来源:mutual_info.py
注:本文中的sklearn.metrics.cluster.normalized_mutual_info_score函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论