In [1]:
import os
import csv
import time
import platform
import datetime
import pandas as pd
import networkx as nx
from graph_partitioning import GraphPartitioning, utils

cols = ["WASTE", "CUT RATIO", "EDGES CUT", "TOTAL COMM VOLUME", "Qds", "CONDUCTANCE", "MAXPERM", "RBSE", "NMI", "FSCORE", "FSCORE RELABEL IMPROVEMENT", "LONELINESS"]

pwd = %pwd


ORDERED_ARRIVALS_DIR = os.path.join(pwd, "data", "ideal_node_ordering", "ordered_centralities")

analysisOnly = True


# [] 15 rankings - minimal binning
# [] 


# parametrized config
parametrized_config = {
    "DATA_FILENAME": os.path.join(pwd, "data", "ideal_node_ordering", "edgelist", "nn#networkID#.txt"),
    "OUTPUT_DIRECTORY": os.path.join(pwd, "output", "ideal_node_ordering"),

    # Set which algorithm is run for the PREDICTION MODEL.
    # Either: 'FENNEL' or 'SCOTCH'
    "PREDICTION_MODEL_ALGORITHM": "PATOH",

    # Alternativly, read input file for prediction model.
    # Set to empty to generate prediction model using algorithm value above.
    "PREDICTION_MODEL": "",

    
    "PARTITIONER_ALGORITHM": "PATOH",

    # File containing simulated arrivals. This is used in simulating nodes
    # arriving at the shelter. Nodes represented by line number; value of
    # 1 represents a node as arrived; value of 0 represents the node as not
    # arrived or needing a shelter.
    "SIMULATED_ARRIVAL_FILE": os.path.join(pwd,
                                           "data",
                                           "predition_model_tests",
                                           "dataset_1_shift_rotate",
                                           "simulated_arrival_list",
                                           "percentage_of_prediction_correct_#correctedness#",
                                           "arrival_#correctedness#_#networkID#.txt"
                                          ),
    
    # File containing the prediction of a node arriving. This is different to the
    # simulated arrivals, the values in this file are known before the disaster.
    "PREDICTION_LIST_FILE": os.path.join(pwd,
                                         "data",
                                         "predition_model_tests",
                                         "dataset_1_shift_rotate",
                                         "prediction_list",
                                         "prediction_#networkID#.txt"
                                        ),

    # File containing the geographic location of each node, in "x,y" format.
    "POPULATION_LOCATION_FILE": os.path.join(pwd,
                                             "data",
                                             "predition_model_tests",
                                             "coordinates",
                                             "coordinates_#networkID#.txt"
                                            ),

    # Number of shelters
    "num_partitions": 4,

    # The number of iterations when making prediction model
    "num_iterations": 12,

    # Percentage of prediction model to use before discarding
    # When set to 0, prediction model is discarded, useful for one-shot
    "prediction_model_cut_off": 0.0,

    # Alpha value used in one-shot (when restream_batches set to 1)
    "one_shot_alpha": 0.5,

    "use_one_shot_alpha" : False,

    # Number of arrivals to batch before recalculating alpha and restreaming.
    # When set to 1, one-shot is used with alpha value from above
    "restream_batches": 50,

    # When the batch size is reached: if set to True, each node is assigned
    # individually as first in first out. If set to False, the entire batch
    # is processed and empty before working on the next batch.
    "sliding_window": False,

    # Create virtual nodes based on prediction model
    "use_virtual_nodes": False,

    # Virtual nodes: edge weight
    "virtual_edge_weight": 1.0,

    # Loneliness score parameter. Used when scoring a partition by how many
    # lonely nodes exist.
    "loneliness_score_param": 1.2,
    
    
    "compute_metrics_enabled": True,

    ####
    # GRAPH MODIFICATION FUNCTIONS

    # Also enables the edge calculation function.
    "graph_modification_functions": True,

    # If set, the node weight is set to 100 if the node arrives at the shelter,
    # otherwise the node is removed from the graph.
    "alter_arrived_node_weight_to_100": False,

    # Uses generalized additive models from R to generate prediction of nodes not
    # arrived. This sets the node weight on unarrived nodes the the prediction
    # given by a GAM.
    # Needs POPULATION_LOCATION_FILE to be set.
    "alter_node_weight_to_gam_prediction": False,

    # The value of 'k' used in the GAM will be the number of nodes arrived until
    # it reaches this max value.
    "gam_k_value": 100,

    # Alter the edge weight for nodes that haven't arrived. This is a way to
    # de-emphasise the prediction model for the unknown nodes.
    "prediction_model_emphasis": 1.0,
    
    # This applies the prediction_list_file node weights onto the nodes in the graph
    # when the prediction model is being computed and then removes the weights
    # for the cutoff and batch arrival modes
    "apply_prediction_model_weights": True,
    
    # Path to the scotch shared library
    "SCOTCH_LIB_PATH": os.path.join(pwd, "libs/scotch/macOS/libscotch.dylib")
    if 'Darwin' in platform.system()
    else "/usr/local/lib/libscotch.so",
    
    # Path to the PaToH shared library
    "PATOH_LIB_PATH": os.path.join(pwd, "libs/patoh/lib/macOS/libpatoh.dylib")
    if 'Darwin' in platform.system()
    else os.path.join(pwd, "libs/patoh/lib/linux/libpatoh.so"),
    
    "PATOH_ITERATIONS": 5,
        
    # Expansion modes: 'avg_node_weight', 'total_node_weight', 'smallest_node_weight'
    # 'largest_node_weight'
    # add '_squared' or '_sqrt' at the end of any of the above for ^2 or sqrt(weight)
    # i.e. 'avg_node_weight_squared
    "PATOH_HYPEREDGE_EXPANSION_MODE": 'no_expansion',
    
    # Edge Expansion: average, total, minimum, maximum, product, product_squared, sqrt_product
    "EDGE_EXPANSION_MODE" : 'total',
    
    # Whether nodes should be reordered using a centrality metric for optimal node assignments in batch mode
    # This is specific to FENNEL and at the moment Leverage Centrality is used to compute new noder orders
    "FENNEL_NODE_REORDERING_ENABLED": False,
    
    # Whether the Friend of a Friend scoring system is active during FENNEL partitioning.
    # FOAF employs information about a node's friends to determine the best partition when
    # this node arrives at a shelter and no shelter has friends already arrived
    "FENNEL_FRIEND_OF_A_FRIEND_ENABLED": False,
    
    # Alters how much information to print. Keep it at 1 for this notebook.
    # 0 - will print nothing, useful for batch operations.
    # 1 - prints basic information on assignments and operations.
    # 2 - prints more information as it batches arrivals.
    "verbose": 0
}

#gp = GraphPartitioning(config)

# Optional: shuffle the order of nodes arriving
# Arrival order should not be shuffled if using GAM to alter node weights
#random.shuffle(gp.arrival_order)

%pylab inline


Populating the interactive namespace from numpy and matplotlib

In [2]:
# load the centralities files
import scipy.stats as sstats
centralities = {}

class CentralitiesExperiment:
    def __init__(self, dirName, dataPath):
        self.dirName = dirName
        self.dataPath = dataPath
        self.outputPath = os.path.join(parametrized_config["OUTPUT_DIRECTORY"], dirName)
                
        parts = dirName.split("_")
        
        self.orderType = parts[len(parts) - 1]
        
        name = dirName.replace('_' + self.orderType, "")
        self.centralityType = name.replace("_", " ")

        self.experimentFileNames = []
        self.experimentFilePaths = []
        
        self.scores = []
        self.avgScores = []
        self.varScores = []
        self.stdScores = []
        self.skewnessScores = []
        self.modeScores = []
        
        self.totalScore = 0.0

    def computeStatsScore(self):
        if(len(self.scores) == 0):
            return
        
        scores = []
        
        self.avgScores = []
        self.varScores = []
        self.stdScores = []
        self.skewnessScores = []
        self.modeScores = []

        for i in range(0, len(self.scores[0])):
            scores.append([])
            self.avgScores.append(0.0)
            self.varScores.append(0.0)
            self.stdScores.append(0.0)
            self.skewnessScores.append(0.0)
            self.modeScores.append(0.0)
        
        for score in self.scores:
            for i, val in enumerate(score):
                scores[i].append(float(val))
        
        for i, data in enumerate(scores):
            data = np.array(data)
            self.avgScores[i] = sstats.tmean(data)
            self.varScores[i] = sstats.tvar(data)
            self.stdScores[i] = sstats.tstd(data)
            self.skewnessScores[i] = sstats.skew(data)
            mode = sstats.mode(data)
            self.modeScores[i] = str(mode[0][0]) + ":" + str(mode[1][0])
                
    def _computeStatsScore(self):
        if(len(self.scores) == 0):
            return
        
        self.avgScores = []
        self.varScores = []
        self.stdScores = []
        
        for i in range(0, len(self.scores[0])):
            self.avgScores.append(0.0)
            self.varScores.append(0.0)
        
        for score in self.scores:
            for i, val in enumerate(score):
                self.avgScores[i] = self.avgScores[i] + float(val)
        
        for i, total in enumerate(self.avgScores):
            self.avgScores[i] = total / len(self.scores)
            
        # compute variance
        for score in self.scores:
            for i, val in enumerate(score):
                mean = self.avgScores[i]
                diffsquared = (float(val) - mean) ** 2
                self.varScores[i] = self.varScores[i] + diffsquared
        
        for i, total in enumerate(self.varScores):
            self.varScores[i] = total / len(self.scores)
            self.stdScores.append(self.varScores[i] ** 0.5)
            
    def printScoreline(self, scoreline):
        print("{0:.5f}\t{1:.10f}\t{2}\t{3}\t{4}\t{5}\t{6}".format(scoreline[0],scoreline[1],scoreline[2],scoreline[3],scoreline[4],scoreline[5],scoreline[6]))
            
    def saveScores(self):
        try:
            os.makedirs(self.outputPath)
        except Exception as e:
            pass
        fName = os.path.join(self.outputPath, "scores.txt")
        with open(fName, 'w+') as f:
            f.write(self.scoreStr(self.avgScores) + "\n")
            f.write(self.scoreStr(self.varScores) + "\n")
            f.write(self.scoreStr(self.stdScores) + "\n")
            f.write(self.scoreStr(self.modeScores) + "\n")
            f.write(self.scoreStr(self.skewnessScores) + "\n")
            
            for score in self.scores:
                f.write(self.scoreStr(score) + "\n")

    def loadScores(self):
        self.scores = []
        fName = os.path.join(self.outputPath, "scores.txt")
        with open(fName, 'r') as f:
            count = 0
            for line in f:
                if(count < 5):
                    count += 1
                    continue
                line = line.strip()
                parts = line.split(',')
                score = []
                for part in parts:
                    score.append(float(part))
                self.scores.append(score)
                
    def scoreStr(self, score):
        s = ""
        for val in score:
            if len(s) > 0:
                s = s + ","
            s = s + str(val)
        return s
            
    def centrality(self):
        return self.centralityType
    
    def ordering(self):
        return self.orderType
    
    def metadata(self):
        return self.centralityType + " centrality, " + self.orderType + " ordering n." + str(self.numExperiments()) + " experiments"
    
    def loadExperimentFiles(self):
        for root, dirs, files in os.walk(self.dataPath):
            for file in files:
                if(file.endswith(".txt")):
                    self.experimentFileNames.append(file.split(".txt")[0])
        centrality.sortExperiments()

    def sortExperiments(self):
        ordered = []
        indeces = {}
        for f in self.experimentFileNames:
            parts = f.split("_")
            index = int(parts[len(parts) - 1])
            indeces[index] = f
        
        self.experimentFileNames = []

        for i, idx in enumerate(sorted(list(indeces.keys()))):
            fn = indeces[idx]
            self.experimentFileNames.append(fn)
            self.experimentFilePaths.append(os.path.join(self.dataPath, fn) + ".txt")

    def numExperiments(self):
        return len(self.experimentFilePaths)
            
    def getDataExperimentPath(self, experimentNumber):
        '''must go from 0 - n'''
        if(experimentNumber >= 0 and experimentNumber < len(self.experimentFilePaths)):
            return self.experimentFilePaths[experimentNumber]
        return ""
    
    def getOutputExperimentPath(self, experimentNumber):
        '''must go from 0 - n'''
        if(experimentNumber >= 0 and experimentNumber < len(self.experimentFileNames)):
            experiment = self.experimentFileNames[experimentNumber].split(".txt")
            outFile = experiment[0] + "_out.txt"
            return os.path.join(self.outputPath, outFile)
        return ""
        
    def print(self):
        print(self.dataPath)
        for i, f in enumerate(self.experimentFileNames):
            print(i," ", f)
            print(self.experimentFilePaths[i])

dataFiles = []
for i in range(1, 41):
    dataFiles.append(parametrized_config['DATA_FILENAME'].replace("#networkID#", str(i)))
# traverse root directory, and list directories as dirs and files as files
for root, dirs, files in os.walk(ORDERED_ARRIVALS_DIR):
    for directory in dirs:
        centrality = CentralitiesExperiment(directory, os.path.join(ORDERED_ARRIVALS_DIR, directory))
        centrality.loadExperimentFiles()
        centralities[directory] = centrality
        print(centrality.metadata())


AA centrality, random ordering n.40 experiments
Alpha centrality, HL ordering n.40 experiments
Alpha centrality, LH ordering n.40 experiments
Average distance centrality, HL ordering n.40 experiments
Average distance centrality, LH ordering n.40 experiments
Barycenter centrality centrality, HL ordering n.40 experiments
Barycenter centrality centrality, LH ordering n.40 experiments
Betweenness centrality, HL ordering n.40 experiments
Betweenness centrality, LH ordering n.40 experiments
BottleNeck centrality centrality, HL ordering n.40 experiments
BottleNeck centrality centrality, LH ordering n.40 experiments
Bridging centrality centrality, HL ordering n.40 experiments
Bridging centrality centrality, LH ordering n.40 experiments
Centroid centrality centrality, HL ordering n.40 experiments
Centroid centrality centrality, LH ordering n.40 experiments
Closeness Freeman centrality, HL ordering n.40 experiments
Closeness Freeman centrality, LH ordering n.40 experiments
Closeness VariantLatora centrality, HL ordering n.40 experiments
Closeness VariantLatora centrality, LH ordering n.40 experiments
ClusterRank centrality, HL ordering n.40 experiments
ClusterRank centrality, LH ordering n.40 experiments
Communicability betweenness centrality centrality, HL ordering n.40 experiments
Communicability betweenness centrality centrality, LH ordering n.40 experiments
Community centrality centrality, HL ordering n.40 experiments
Community centrality centrality, LH ordering n.40 experiments
Core decomposition centrality, HL ordering n.40 experiments
Core decomposition centrality, LH ordering n.40 experiments
Cross clique centrality centrality, LH ordering n.40 experiments
Cross clique connectivity centrality, HL ordering n.40 experiments
Current flow closeness centrality centrality, HL ordering n.40 experiments
Current flow closeness centrality centrality, LH ordering n.40 experiments
Dangalchev closeness centrality centrality, HL ordering n.40 experiments
Dangalchev closeness centrality centrality, LH ordering n.40 experiments
Decay centrality centrality, HL ordering n.40 experiments
Decay centrality centrality, LH ordering n.40 experiments
Degree centrality centrality, HL ordering n.40 experiments
Degree centrality centrality, LH ordering n.40 experiments
Diffusion degree centrality, HL ordering n.40 experiments
Diffusion degree centrality, LH ordering n.40 experiments
DMNC centrality centrality, HL ordering n.40 experiments
DMNC centrality centrality, LH ordering n.40 experiments
Eccentricity centrality, HL ordering n.40 experiments
Eccentricity centrality, LH ordering n.40 experiments
Effectiveness centrality centrality, HL ordering n.40 experiments
Effectiveness centrality centrality, LH ordering n.40 experiments
Eigenvector centrality, HL ordering n.40 experiments
Eigenvector centrality, LH ordering n.40 experiments
Entropy centrality centrality, HL ordering n.40 experiments
Entropy centrality centrality, LH ordering n.40 experiments
EPC centrality, HL ordering n.40 experiments
EPC centrality, LH ordering n.40 experiments
Flow betweenness centrality centrality, HL ordering n.40 experiments
Flow betweenness centrality centrality, LH ordering n.40 experiments
Information centrality centrality, HL ordering n.40 experiments
Information centrality centrality, LH ordering n.40 experiments
Kleinbergs centrality HITS centrality, HL ordering n.40 experiments
Kleinbergs centrality HITS centrality, LH ordering n.40 experiments
LAC centrality, HL ordering n.40 experiments
LAC centrality, LH ordering n.40 experiments
Lapacian centrality centrality, HL ordering n.40 experiments
Lapacian centrality centrality, LH ordering n.40 experiments
Leverage centrality centrality, HL ordering n.40 experiments
Leverage centrality centrality, LH ordering n.40 experiments
Lin centrality centrality, HL ordering n.40 experiments
Lin centrality centrality, LH ordering n.40 experiments
Load centrality centrality, HL ordering n.40 experiments
Load centrality centrality, LH ordering n.40 experiments
Lobby index centrality, HL ordering n.40 experiments
Lobby index centrality, LH ordering n.40 experiments
Local assortativity centrality, HL ordering n.40 experiments
Local assortativity centrality, LH ordering n.40 experiments
Local clustering coefficients centrality, HL ordering n.40 experiments
Local clustering coefficients centrality, LH ordering n.40 experiments
Markov centrality centrality, HL ordering n.40 experiments
Markov centrality centrality, LH ordering n.40 experiments
MCC centrality centrality, HL ordering n.40 experiments
MCC centrality centrality, LH ordering n.40 experiments
MNC centrality centrality, HL ordering n.40 experiments
MNC centrality centrality, LH ordering n.40 experiments
Neighborhood connectivity centrality, HL ordering n.40 experiments
Neighborhood connectivity centrality, LH ordering n.40 experiments
Network centrality centrality, HL ordering n.40 experiments
Network centrality centrality, LH ordering n.40 experiments
Network fragmentation GeodesicDistanceWeighted centrality, HL ordering n.40 experiments
Network fragmentation GeodesicDistanceWeighted centrality, LH ordering n.40 experiments
Network fragmentation centrality, HL ordering n.40 experiments
Network fragmentation centrality, LH ordering n.40 experiments
Path centrality centrality, HL ordering n.40 experiments
Path centrality centrality, LH ordering n.40 experiments
Political independence index centrality, HL ordering n.40 experiments
Political independence index centrality, LH ordering n.40 experiments
Radiality centrality centrality, HL ordering n.40 experiments
Radiality centrality centrality, LH ordering n.40 experiments
Random walk betweenness centrality, HL ordering n.40 experiments
Random walk betweenness centrality, LH ordering n.40 experiments
Random walk closeness centrality, HL ordering n.40 experiments
Random walk closeness centrality, LH ordering n.40 experiments
SALSA centrality, HL ordering n.40 experiments
SALSA centrality, LH ordering n.40 experiments
Semi local centrality centrality, HL ordering n.40 experiments
Semi local centrality centrality, LH ordering n.40 experiments
Shortest path betweenness centrality, HL ordering n.40 experiments
Shortest path betweenness centrality, LH ordering n.40 experiments
Shortest path closeness centrality, HL ordering n.40 experiments
Shortest path closeness centrality, LH ordering n.40 experiments
Shortest path degree centrality, HL ordering n.40 experiments
Shortest path degree centrality, LH ordering n.40 experiments
Strength weighted vertex degree centrality, HL ordering n.40 experiments
Strength weighted vertex degree centrality, LH ordering n.40 experiments
Stress centrality centrality, HL ordering n.40 experiments
Stress centrality centrality, LH ordering n.40 experiments
Subgraph centrality, HL ordering n.40 experiments
Subgraph centrality, LH ordering n.40 experiments
Topological coefficient centrality, HL ordering n.40 experiments
Topological coefficient centrality, LH ordering n.40 experiments

In [3]:
# run the experiments here
import pyximport
pyximport.install()
from graph_partitioning import fennel as fnl
from graph_partitioning import scotch_partitioner as sctch
from graph_partitioning import patoh_partitioner as ptoh
from graph_partitioning import GraphPartitioning, utils

fennel = fnl.FennelPartitioner(0.5)
scotch = sctch.ScotchPartitioner(parametrized_config['SCOTCH_LIB_PATH'])
patoh = ptoh.PatohPartitioner(parametrized_config['PATOH_LIB_PATH'], hyperedgeExpansionMode=parametrized_config['PATOH_HYPEREDGE_EXPANSION_MODE'])

def loadGraph(edgeFile):
    G = nx.Graph()
    edges = []
    with open(edgeFile, 'r') as f:
        for line in f:
            line = line.strip()
            line = line.split(" ")
            n1 = int(line[0])
            n2 = int(line[1])
            
            G.add_node(n1)
            G.add_node(n2)
            
            edges.append((n1, n2))
        for edge in edges:
            G.add_edge(edge[0], edge[1])
    nx.set_node_attributes(G, 'weight', 1.0)
    nx.set_edge_attributes(G, 'weight', 1.0)
    return G

def loadArrivals(arrivalFile):
    arrivals = []
    with open(arrivalFile, 'r') as f:
        for line in f:
            line = line.strip()
            arrivals.append(int(line))
    return np.array(arrivals, dtype=np.int32)

def generateArray(num, value):
    arr = []
    for i in range(0, num):
        arr.append(value)
    return np.array(arr, dtype=np.int32)

def checkInputData(graph, arrivals):
    if(graph.number_of_nodes() == 0):
        print("Error, no nodes")
        return False
    if(graph.number_of_edges() == 0):
        print("Error, no edges")
        return False
        
    arr = np.array(arrivals)
    if(np.min(arr) > 0):
        print("Error arrival file has minimum node ID > 0:", np.min(arr))
        return False
    if(np.max(arr) >= graph.number_of_nodes()):
        print("Error arrival file has maximum node ID >= number_of_nodes():", np.max(arr))
        return False

    return True

def computeAlpha(graph, num_partitions):
    numedges = graph.number_of_edges()
    if(graph.is_directed()):
        numedges = numedges * 0.5
    return numedges * (num_partitions / (graph.number_of_nodes()**2))
    
def printScore(graph, assignments, num_partitions, loneliness_score_param, verbose = 1):
        x = utils.score(graph, assignments, num_partitions)
        edges_cut, steps, cut_edges = utils.base_metrics(graph, assignments)

        q_qds_conductance = utils.infomapModularityComQuality(graph, assignments, num_partitions)
        #old: mod = utils.modularity_wavg(graph, assignments, num_partitions)
        loneliness = utils.loneliness_score_wavg(graph, loneliness_score_param, assignments, num_partitions)
        max_perm = utils.wavg_max_perm(graph, assignments, num_partitions)
        #old: max_perm = utils.run_max_perm(graph)

        #nmi_score = nmi_metrics.nmi(np.array([self.assignments_prediction_model, self.assignments]))
        #nmi_score = normalized_mutual_info_score(self.assignments_prediction_model.tolist(), self.assignments.tolist())
        if verbose > 1:
            print("{0:.5f}\t\t{1:.10f}\t{2}\t\t{3}\t\t\t{4}\t{5}\t{6}".format(x[0], x[1], edges_cut, steps, q_qds_conductance[0], loneliness, max_perm))
            #print("{0:.5f}\t\t{1:.10f}\t{2}\t\t{3}\t\t\t{4}\t{5}\t{6}".format(x[0], x[1], edges_cut, steps, mod, loneliness, max_perm))
            #print("{0:.5f}\t\t{1:.10f}\t{2}\t\t{3}\t\t\t{4}\t{5}\t{6}\t{7:.10f}".format(x[0], x[1], edges_cut, steps, mod, loneliness, max_perm, nmi_score))
        # waste, cut_ratio, edges_cut, TCV (steps), Qds, loneliness, max_perm
        return [x[0], x[1], edges_cut, steps, q_qds_conductance[0], loneliness, max_perm]


    
# Run the centralities experiment for eachdatapoint
'''for key in list(centralities.keys()):
    if analysisOnly == True:
        break


    centrality = centralities[key]

    print("Running experiment:", centrality.metadata())

    for i in range(0, 40):
        with GraphPartitioning(parametrized_config) as gp:
            gp.verbose = 0
            gp.DATA_FILENAME = dataFiles[i]
            print(gp.DATA_FILENAME)
            
            gp.load_network()
            gp.init_partitioner()
    
            gp.arrival_order = loadArrivals(centrality.getDataExperimentPath(i))
    
            m = gp.prediction_model()
            m = gp.assign_cut_off()
            m = gp.batch_arrival()
            
            print(m)
        break

analysisOnly = True
'''

# FORMAT OF SAVED scores.txt files

# average (scores)
# variance (scores)
# std (scores)
# mode(scores)
# skewness (scores)
# this is then followed by each experiment's scores over which the stats above are computed
# waste, cut_ratio, edges_cut, TCV (steps), Qds, loneliness, max_perm

for key in list(centralities.keys()):
    if analysisOnly == True:
        break
    
    centrality = centralities[key]

    print("Running experiment:", centrality.metadata())

    for i in range(0, 40):
        edgeFile = dataFiles[i]
        
        G = loadGraph(edgeFile)
        arrival_list = loadArrivals(centrality.getDataExperimentPath(i))
        GSub = G.subgraph(arrival_list)
        
        if checkInputData(G, arrival_list):
            # ok, can proceed
            assignments = generateArray(G.number_of_nodes(), -1)
            fixed = generateArray(G.number_of_nodes(), -1)
            
            if parametrized_config['PARTITIONER_ALGORITHM'] == 'FENNEL':
                assignments = fennel.generate_prediction_model(GSub, parametrized_config['num_iterations'], parametrized_config['num_partitions'], assignments, fixed)

            elif parametrized_config['PARTITIONER_ALGORITHM'] == 'SCOTCH':
                assignments = scotch.generate_prediction_model(GSub, parametrized_config['num_iterations'], parametrized_config['num_partitions'], assignments, fixed)

            elif parametrized_config['PARTITIONER_ALGORITHM'] == 'PATOH':
                assignments = patoh.generate_prediction_model(GSub, parametrized_config['num_iterations'], parametrized_config['num_partitions'], assignments, fixed)

            # score contains: x[0], x[1], edges_cut, steps, mod, loneliness, max_perm
            score = printScore(GSub, assignments, parametrized_config['num_partitions'], parametrized_config['loneliness_score_param'])
            centrality.scores.append(score)
    centrality.computeStatsScore()
    centrality.saveScores()
print("Finished experiments.")


Finished experiments.

In [35]:
# analyse the results

# find min/max for each score
metrics = ["WASTE", "CUT RATIO", "EDGES CUT", "TOTAL COMM VOLUME", "Qds", "LONELINESS", "MAXPERM"]

max_metric_centrality=[]
min_metric_centrality=[]
max_metric = []
min_metric = []
avg_metric = []
#metric_sort_dataset = []
for metric in metrics:
    max_metric_centrality.append("")
    min_metric_centrality.append("")
    max_metric.append(0.0)
    min_metric.append(10000000.0)
    avg_metric.append(0.0)
    metric_sort_dataset = {}

avg_results = {}    
for key in list(centralities.keys()):
    centrality = centralities[key]
    centralityCode = centrality.centralityType + ":" + centrality.orderType
    print("Experiment:", centralityCode)

    centrality.loadScores()
    centrality.computeStatsScore()
    avg_results[centralityCode] = centrality.avgScores

    centrality.printScoreline(centrality.avgScores)

    for i, metric in enumerate(centrality.avgScores):
        if(max_metric[i] < metric):
            max_metric[i] = metric
            max_metric_centrality[i] = centralityCode
        if(min_metric[i] > metric):
            min_metric[i] = metric
            min_metric_centrality[i] = centralityCode
        avg_metric[i] = avg_metric[i] + metric
        # index the score
        #if metric in metric_sort_dataset[i]:
        #    metric_sort_dataset[i][metric].append()

with open(os.path.join(parametrized_config['OUTPUT_DIRECTORY'], "centrality_scores.csv"), 'w+') as f:
    s = "centrality"
    for metric in metrics:
        s = s + "," + metric
    f.write(s + "\n")
    
    for key in list(avg_results.keys()):
        line = key
        for score in avg_results[key]:
            line = line + "," + str(score)
        f.write(line + "\n")
        
for i, avg in enumerate(avg_metric):
    avg_metric[i] = avg / len(centralities)
            
for i, metric in enumerate(metrics):
    print(metric, "metric")
    print("   average =", avg_metric[i])
    print("   min, max =", min_metric[i], max_metric[i])
    print("  ", min_metric_centrality[i], "||", max_metric_centrality[i])


Experiment: AA:random
0.00000	0.3642223887	90.15	95.075	0.40797893778021416	0.7452021271808	0.015190637500000001
Experiment: Alpha:HL
0.00000	0.3647740181	90.275	95.075	0.41115159179477806	0.744545604013975	0.0088029375
Experiment: Alpha:LH
0.00000	0.3658334816	90.525	95.375	0.4053231887936006	0.74230364451105	0.0037204749999999983
Experiment: Average distance:HL
0.00000	0.3645310507	90.2	95.05	0.4075215286242746	0.744011746651	0.0018389312500000012
Experiment: Average distance:LH
0.00000	0.3636613102	89.975	94.7	0.40908385545430875	0.74508042203465	0.017410981249999995
Experiment: Barycenter centrality:HL
0.00000	0.3640772634	90.125	94.9	0.4069757202140485	0.7438894779868999	0.013572856249999996
Experiment: Barycenter centrality:LH
0.00000	0.3654111274	90.425	94.625	0.406124519912213	0.7452016174462999	0.00829180625
Experiment: Betweenness:HL
0.00000	0.3631679285	89.875	94.7	0.40810236534105326	0.7457118069159749	0.006608412499999999
Experiment: Betweenness:LH
0.00000	0.3644296037	90.225	94.75	0.41196966832966525	0.74575883253165	0.0141436125
Experiment: BottleNeck centrality:HL
0.00000	0.3637777807	90.075	94.475	0.4074312854289065	0.7448311965858501	0.009125637499999997
Experiment: BottleNeck centrality:LH
0.00000	0.3665493721	90.75	94.725	0.40878761929261725	0.744916795950625	0.010943999999999999
Experiment: Bridging centrality:HL
0.00000	0.3630582954	89.85	94.6	0.405038752836108	0.744653098729825	0.018392743750000003
Experiment: Bridging centrality:LH
0.00000	0.3661760897	90.625	95.2	0.4132103597993465	0.7436538852024499	0.011136356249999996
Experiment: Centroid centrality:HL
0.00000	0.3631942368	89.875	94.725	0.4078742255644219	0.74505991745195	0.00382379375
Experiment: Centroid centrality:LH
0.00000	0.3646248756	90.225	94.75	0.40783214774226506	0.745769724602975	0.007270837500000002
Experiment: Closeness Freeman:HL
0.00000	0.3622272691	89.65	94.75	0.4069251558361838	0.7447687813746249	0.01101601875
Experiment: Closeness Freeman:LH
0.00000	0.3653765749	90.45	94.375	0.40444879425402547	0.7461429551382	0.008509056249999999
Experiment: Closeness VariantLatora:HL
0.00000	0.3627372153	89.775	94.625	0.4040367621764364	0.7455223685652751	0.0034830187499999997
Experiment: Closeness VariantLatora:LH
0.00000	0.3664906961	90.675	95.0	0.4001835987690533	0.7448012702693501	0.001678956249999998
Experiment: ClusterRank:HL
0.00000	0.3647183291	90.275	94.8	0.40524555025825515	0.744677566647775	0.012811
Experiment: ClusterRank:LH
0.00000	0.3670355496	90.875	94.65	0.4096574398534285	0.7444846868678751	0.005112787500000002
Experiment: Communicability betweenness centrality:HL
0.00000	0.3629078215	89.825	94.8	0.40582438402328513	0.7447219906149251	0.005961899999999998
Experiment: Communicability betweenness centrality:LH
0.00000	0.3658318603	90.55	94.8	0.40904961125216605	0.7457664749913251	0.018212306249999997
Experiment: Community centrality:HL
0.00000	0.3660644933	90.6	95.0	0.40572822598050423	0.74401823542805	0.007256137499999996
Experiment: Community centrality:LH
0.00000	0.3628615713	89.8	94.175	0.4080974516929941	0.745545226373325	0.007228931249999998
Experiment: Core decomposition:HL
0.00000	0.3670329851	90.85	95.5	0.4052856199455884	0.744483173648225	0.010877650000000003
Experiment: Core decomposition:LH
0.00000	0.3668683821	90.8	94.8	0.40667458432682724	0.7453089241844	0.01582134375
Experiment: Cross clique centrality:LH
0.00000	0.3639672022	90.075	94.75	0.4104435251933757	0.7455975857198499	0.006680674999999997
Experiment: Cross clique connectivity:HL
0.00000	0.3645385265	90.225	95.325	0.4034093425395275	0.743349920419375	0.0057636187499999995
Experiment: Current flow closeness centrality:HL
0.00000	0.3641996225	90.175	94.4	0.40451672525076515	0.7450303964074501	0.004267712499999998
Experiment: Current flow closeness centrality:LH
0.00000	0.3655081529	90.45	95.15	0.41321945306681124	0.743743345632625	0.007594706250000002
Experiment: Dangalchev closeness centrality:HL
0.00000	0.3672944817	90.875	94.5	0.4076005332703268	0.7434291324959499	0.010649112499999999
Experiment: Dangalchev closeness centrality:LH
0.00000	0.3675968132	91.025	94.625	0.4092778378666055	0.744916057973275	0.01017416875
Experiment: Decay centrality:HL
0.00000	0.3590076096	88.875	94.225	0.39965308611044803	0.74524099770665	0.011372756250000001
Experiment: Decay centrality:LH
0.00000	0.3655529964	90.475	94.975	0.40247783768193884	0.744450013061575	0.007395674999999999
Experiment: Degree centrality:HL
0.00000	0.3647216909	90.25	94.675	0.4006595107972729	0.7441225174054751	0.0116496625
Experiment: Degree centrality:LH
0.00000	0.3674526250	90.925	95.525	0.41270631168813204	0.7446733139656001	0.0126161
Experiment: Diffusion degree:HL
0.00000	0.3644917288	90.175	94.975	0.4071546437660297	0.7450511132502751	0.001985337499999995
Experiment: Diffusion degree:LH
0.00000	0.3659146239	90.6	94.65	0.40922018820185535	0.7442862642762751	0.009801600000000002
Experiment: DMNC centrality:HL
0.00000	0.3627969334	89.775	94.825	0.4071594680594764	0.7445875435493	0.0016898499999999997
Experiment: DMNC centrality:LH
0.00000	0.3667127968	90.775	94.8	0.4082493225539869	0.745812061676	0.01243820625
Experiment: Eccentricity:HL
0.00000	0.3632311553	89.9	94.425	0.4043407180582018	0.7450767129071749	0.003916043749999998
Experiment: Eccentricity:LH
0.00000	0.3645576814	90.225	94.35	0.4030604593296955	0.74432757309925	-0.007772662500000002
Experiment: Effectiveness centrality:HL
0.00000	0.3613131771	89.425	94.8	0.40641586322637024	0.744634549831725	0.01774888125
Experiment: Effectiveness centrality:LH
0.00000	0.3680917884	91.075	95.25	0.403844906534968	0.744407778986225	0.0013982937499999986
Experiment: Eigenvector:HL
0.00000	0.3640107775	90.075	94.6	0.404636567693838	0.7437899916320999	0.0036827812500000016
Experiment: Eigenvector:LH
0.00000	0.3705339234	91.75	94.875	0.4112157749378351	0.743728665228075	0.0006732562499999993
Experiment: Entropy centrality:HL
0.00000	0.3645231316	90.225	94.7	0.40464849899995314	0.7455508286539001	0.008042893749999998
Experiment: Entropy centrality:LH
0.00000	0.3662869224	90.675	94.85	0.41065132617505207	0.744494319104125	0.004462237499999996
Experiment: EPC:HL
0.00000	0.3649660887	90.325	94.85	0.40537244300716646	0.744043487179175	0.005503799999999998
Experiment: EPC:LH
0.00000	0.3681021627	91.125	94.5	0.40839313458453325	0.744640106208425	0.007887331249999999
Experiment: Flow betweenness centrality:HL
0.00000	0.3631762932	89.875	94.7	0.3990590484666755	0.7456794018325	0.005472499999999998
Experiment: Flow betweenness centrality:LH
0.00000	0.3670037421	90.85	94.95	0.4107419445425404	0.7451467153747999	0.007689062500000001
Experiment: Information centrality:HL
0.00000	0.3622525327	89.675	94.4	0.4030490112289595	0.74414691448825	0.006413618749999997
Experiment: Information centrality:LH
0.00000	0.3629195524	89.8	94.725	0.40824255903825346	0.7455164394426751	0.006005024999999997
Experiment: Kleinbergs centrality HITS:HL
0.00000	0.3634840753	89.95	94.7	0.406024610153645	0.7446517949545	0.012528862499999998
Experiment: Kleinbergs centrality HITS:LH
0.00000	0.3686085256	91.25	95.45	0.4081550913290902	0.744311100229725	0.013055737500000001
Experiment: LAC:HL
0.00000	0.3649141573	90.3	94.875	0.40084989174042357	0.744902984211875	0.010347424999999999
Experiment: LAC:LH
0.00000	0.3671967625	90.925	94.5	0.41763712321281365	0.74404203492385	0.002832006249999999
Experiment: Lapacian centrality:HL
0.00000	0.3632961332	89.9	94.575	0.4067596307165718	0.7453328548437501	0.015099731250000002
Experiment: Lapacian centrality:LH
0.00000	0.3631144463	89.875	94.8	0.40827850687283984	0.745402024488925	0.0140508125
Experiment: Leverage centrality:HL
0.00000	0.3625166910	89.725	95.075	0.4088664710330484	0.74444560633805	0.013239037499999998
Experiment: Leverage centrality:LH
0.00000	0.3637256101	90.0	94.925	0.4094942204409781	0.745264324760425	0.0068871062499999995
Experiment: Lin centrality:HL
0.00000	0.3640257993	90.1	94.45	0.40132127816367846	0.744773304787375	-0.0014220312500000027
Experiment: Lin centrality:LH
0.00000	0.3659003136	90.575	94.675	0.406208191873071	0.7448713974357501	0.00826160625
Experiment: Load centrality:HL
0.00000	0.3641178375	90.125	94.475	0.4089689737035059	0.746338934960025	0.0034134875000000017
Experiment: Load centrality:LH
0.00000	0.3660285623	90.575	94.675	0.4086545905684028	0.7447562527178251	-0.007584812500000001
Experiment: Lobby index:HL
0.00000	0.3619087277	89.575	94.725	0.40692375631886346	0.74489264278705	0.00886863125
Experiment: Lobby index:LH
0.00000	0.3658573795	90.575	94.95	0.413686806453801	0.7451795748016499	0.010596425000000001
Experiment: Local assortativity:HL
0.00000	0.3660302515	90.6	94.825	0.41146435795543956	0.743560136442475	0.006895568750000002
Experiment: Local assortativity:LH
0.00000	0.3616080106	89.475	94.75	0.4046544486037124	0.745625476338725	0.007852056249999998
Experiment: Local clustering coefficients:HL
0.00000	0.3621304554	89.65	94.8	0.40754108637741204	0.745264391769	0.012475131249999999
Experiment: Local clustering coefficients:LH
0.00000	0.3660018069	90.575	94.8	0.41149021186542073	0.7451110974218	0.01053450625
Experiment: Markov centrality:HL
0.00000	0.3653153886	90.425	94.85	0.40502354391857426	0.744169999253425	0.010258543749999998
Experiment: Markov centrality:LH
0.00000	0.3660962129	90.625	94.7	0.4105529701938691	0.7443545373370251	-0.00047551875000000006
Experiment: MCC centrality:HL
0.00000	0.3630751565	89.85	94.675	0.4035640765897174	0.7435313117307499	0.0059221375
Experiment: MCC centrality:LH
0.00000	0.3598174990	89.025	94.35	0.40268011794937575	0.7465249795838	0.011803675
Experiment: MNC centrality:HL
0.00000	0.3654984778	90.475	95.175	0.40751459163287657	0.745079188147225	0.015487518750000002
Experiment: MNC centrality:LH
0.00000	0.3636133276	90.025	94.425	0.40976985964957535	0.7458369890288	0.0187926375
Experiment: Neighborhood connectivity:HL
0.00000	0.3618962610	89.575	94.9	0.40887945007019233	0.743987952893125	0.009243231249999997
Experiment: Neighborhood connectivity:LH
0.00000	0.3652870832	90.4	95.2	0.409766678412013	0.7438377718132501	0.00985805625
Experiment: Network centrality:HL
0.00000	0.3605575012	89.225	94.675	0.39919839894546194	0.7445001077797501	-0.0037855375000000017
Experiment: Network centrality:LH
0.00000	0.3671154963	90.9	93.95	0.4131866605433627	0.745381439279325	-0.0016027125000000024
Experiment: Network fragmentation GeodesicDistanceWeighted:HL
0.00000	0.3662881429	90.675	94.525	0.4068867664114781	0.743706800055125	0.0009822499999999983
Experiment: Network fragmentation GeodesicDistanceWeighted:LH
0.00000	0.3627255821	89.775	94.925	0.40539396183169957	0.7450581399167999	0.01011550625
Experiment: Network fragmentation:HL
0.00000	0.3670125810	90.85	94.175	0.40826688455892574	0.7452541473019251	0.008974293749999997
Experiment: Network fragmentation:LH
0.00000	0.3663757369	90.675	95.025	0.4105638012552844	0.744414495382775	0.01740784375
Experiment: Path centrality:HL
0.00000	0.3664587255	90.675	94.925	0.40490343316240623	0.7442492170914	0.0036195249999999984
Experiment: Path centrality:LH
0.00000	0.3659786465	90.575	94.85	0.40920863059767615	0.74524067303175	0.010507806250000001
Experiment: Political independence index:HL
0.00000	0.3655583406	90.525	94.85	0.40939836466993773	0.7443638312009501	0.0164090875
Experiment: Political independence index:LH
0.00000	0.3643131614	90.2	95.25	0.40637739339130263	0.7448237398582	0.011263849999999999
Experiment: Radiality centrality:HL
0.00000	0.3670685798	90.875	94.975	0.4079072206208723	0.7451585848389	0.014046568749999998
Experiment: Radiality centrality:LH
0.00000	0.3671927433	90.9	95.2	0.4058981977557784	0.74546165731575	0.00346495
Experiment: Random walk betweenness:HL
0.00000	0.3629168378	89.85	94.2	0.4052305134861898	0.7448085019982	0.003599631249999999
Experiment: Random walk betweenness:LH
0.00000	0.3659761856	90.575	95.325	0.40505653288041865	0.74553413658445	0.011020174999999998
Experiment: Random walk closeness:HL
0.00000	0.3652502156	90.4	95.125	0.40583788712969593	0.7433442552807501	0.012752924999999998
Experiment: Random walk closeness:LH
0.00000	0.3679728303	91.075	94.8	0.4141288173503866	0.7447131753127001	-0.0024356187500000014
Experiment: SALSA:HL
0.00000	0.3627530186	89.8	94.25	0.4071647137849963	0.744096826173575	0.006998075000000001
Experiment: SALSA:LH
0.00000	0.3702659473	91.65	94.75	0.4120075835672732	0.744732096132825	8.749999999974057e-08
Experiment: Semi local centrality:HL
0.00000	0.3636054521	90.025	94.425	0.4050772508091269	0.74461761910655	0.02027639375
Experiment: Semi local centrality:LH
0.00000	0.3638689874	90.075	94.8	0.4111395442576983	0.746228357875725	0.01591230625
Experiment: Shortest path betweenness:HL
0.00000	0.3634099480	89.95	94.325	0.408153339349243	0.74508403261655	0.012279156250000001
Experiment: Shortest path betweenness:LH
0.00000	0.3636258989	90.0	94.775	0.4083512290076115	0.744775248226625	0.0018482750000000017
Experiment: Shortest path closeness:HL
0.00000	0.3621622707	89.625	94.925	0.4085792087038646	0.7445485772140501	-0.0033176875000000003
Experiment: Shortest path closeness:LH
0.00000	0.3667686729	90.75	95.1	0.4033911490576615	0.7441642823393501	0.003531843749999998
Experiment: Shortest path degree:HL
0.00000	0.3621497482	89.625	94.9	0.4030995357592307	0.7440691532092251	0.01931814375
Experiment: Shortest path degree:LH
0.00000	0.3671948073	90.9	94.825	0.4133097487711659	0.74464976647555	0.008904224999999998
Experiment: Strength weighted vertex degree:HL
0.00000	0.3624594856	89.75	94.75	0.4060035567319361	0.744943773833325	0.0006749062499999999
Experiment: Strength weighted vertex degree:LH
0.00000	0.3644839002	90.225	94.9	0.40959176000684705	0.74554421367095	0.01107195
Experiment: Stress centrality:HL
0.00000	0.3646810500	90.25	95.225	0.40399807725026093	0.74392353832755	0.003663693749999998
Experiment: Stress centrality:LH
0.00000	0.3636650594	90.0	94.775	0.4133957536671541	0.745203098929525	0.014747431250000002
Experiment: Subgraph:HL
0.00000	0.3653359529	90.4	95.1	0.40287760415769275	0.744436258128825	-0.0003905812500000023
Experiment: Subgraph:LH
0.00000	0.3632535536	89.9	94.75	0.40039592773759525	0.7442525688605001	0.015896437500000003
Experiment: Topological coefficient:HL
0.00000	0.3662593921	90.65	94.65	0.4070810474067483	0.744006041745725	0.01073934375
Experiment: Topological coefficient:LH
0.00000	0.3644630571	90.25	94.8	0.4077147725982678	0.7447327528211749	0.002824893749999998
WASTE metric
   average = 0.0
   min, max = 0.0 0.0
   AA:random || 
CUT RATIO metric
   average = 0.364770424037
   min, max = 0.359007609588 0.370533923429
   Decay centrality:HL || Eigenvector:LH
EDGES CUT metric
   average = 90.2843478261
   min, max = 88.875 91.75
   Decay centrality:HL || Eigenvector:LH
TOTAL COMM VOLUME metric
   average = 94.7867391304
   min, max = 93.95 95.525
   Network centrality:LH || Degree centrality:LH
Qds metric
   average = 0.407204250349
   min, max = 0.399059048467 0.417637123213
   Flow betweenness centrality:HL || LAC:LH
LONELINESS metric
   average = 0.744751087577
   min, max = 0.742303644511 0.746524979584
   Alpha:LH || MCC centrality:LH
MAXPERM metric
   average = 0.0081181423913
   min, max = -0.0077726625 0.02027639375
   Eccentricity:LH || Semi local centrality:HL

In [21]:
# Extract variables for R analysis
y = ''
g = ''

tmpY = ''
tmpG = ''


gkey = {}

modeScore = {} # list of centralities for each modal edges cut value

# this is to set the CONTROL for the R statistical tests (control = first centrality data that has to go in)

# not sure - this would probably be the Leverage_Centrality_HL which has technically
# the best modal score
whichIDToKeepAsZero = 0 #leverage # 90 for Political_independence_index_LH # 0 for random 

for i_key, key in enumerate(list(centralities.keys())):
    if(i_key < whichIDToKeepAsZero):
        gkey[i_key + 1] = key
    elif(i_key == whichIDToKeepAsZero):
        gkey[0] = key
    else:
        gkey[i_key] = key
        
    centrality = centralities[key]

    # extract Edges Cut mode value
    #UNCOMMENT THESE TO ENABLE MODE SCORE
    #modescore = centrality.modeScores[2].split(':')
    #mecut = float(modescore[0]) # value of mode of edges cut
    #mcount = int(modescore[1]) # number of experiments with this modal value of edges cut
    
        
    # store each centrality based on their modal value of edges cut
    #overallmodescore = mecut / mcount
    
    # UNCOMMENT THIS TO ENABLE MODE SCORE
    #overallmodescore = mecut

    ## COMMENT THIS OUT IF WE DON?T WANT AVERAGE!!
    centrality.loadScores()
    centrality.computeStatsScore()
    overallmodescore = centrality.avgScores[2]
    
    if overallmodescore in modeScore:
        modeScore[overallmodescore].append(key)
    else:
        modeScore[overallmodescore] = [key]

    
    
    for i, score in enumerate(centrality.scores):
        edges_cut = score[2]
        
        if(i_key == whichIDToKeepAsZero):
            if(len(tmpY)):
                tmpY += ','
            tmpY += str(edges_cut)
            if(len(tmpG)):
                tmpG += ','
            tmpG += str(0)
            
        else:        
            if(len(y)):
                y += ','
            y += str(edges_cut)
            if(len(g)):
                g += ','
            g += str(i_key)
            #g += '"' + str(i_key) + '"'
            if(i == 40):
                break

y = "Y <- c(" + tmpY + ',' + y + ")"
g = "g <- as.factor(c(" + tmpG + ',' + g + "))"
print(y)
print("")
print(g)


Y <- c(87.0,89.0,80.0,77.0,94.0,92.0,78.0,90.0,82.0,66.0,86.0,82.0,85.0,96.0,87.0,80.0,86.0,75.0,83.0,88.0,94.0,86.0,75.0,94.0,92.0,89.0,84.0,68.0,102.0,80.0,77.0,99.0,86.0,79.0,95.0,77.0,98.0,77.0,77.0,79.0,87.0,89.0,80.0,77.0,94.0,92.0,78.0,90.0,82.0,66.0,86.0,82.0,85.0,96.0,87.0,80.0,86.0,75.0,83.0,88.0,94.0,86.0,75.0,94.0,92.0,89.0,84.0,68.0,102.0,80.0,77.0,99.0,86.0,79.0,95.0,77.0,98.0,77.0,77.0,79.0,87.0,89.0,80.0,77.0,94.0,92.0,78.0,90.0,82.0,66.0,86.0,87.0,89.0,80.0,77.0,94.0,92.0,78.0,90.0,82.0,66.0,86.0,82.0,85.0,96.0,87.0,89.0,87.0,89.0,80.0,77.0,94.0,92.0,78.0,90.0,82.0,66.0,86.0,82.0,85.0,96.0,87.0,80.0,86.0,75.0,83.0,88.0,94.0,86.0,75.0,94.0,92.0,89.0,84.0,68.0,102.0,80.0,77.0,99.0,86.0,79.0,95.0,77.0,98.0,77.0,77.0,79.0,74.0,83.0,79.0,82.0,103.0,107.0,80.0,91.0,85.0,74.0,80.0,85.0,83.0,90.0,68.0,92.0,88.0,85.0,91.0,79.0,87.0,98.0,86.0,86.0,89.0,99.0,83.0,81.0,96.0,81.0,73.0,88.0,87.0,82.0,83.0,88.0,94.0,76.0,86.0,100.0,74.0,86.0,71.0,86.0,75.0,91.0,91.0,76.0,87.0,90.0,73.0,84.0,83.0,76.0,89.0,72.0,80.0,83.0,74.0,87.0,80.0,75.0,96.0,77.0,86.0,82.0,85.0,85.0,77.0,99.0,88.0,78.0,90.0,71.0,82.0,81.0,76.0,87.0,77.0,78.0,81.0,86.0,80.0,78.0,96.0,87.0,99.0,99.0,92.0,97.0,79.0,65.0,75.0,99.0,78.0,101.0,84.0,91.0,89.0,85.0,93.0,79.0,97.0,88.0,77.0,92.0,96.0,102.0,87.0,76.0,99.0,81.0,103.0,104.0,85.0,90.0,88.0,82.0,99.0,81.0,92.0,99.0,80.0,87.0,85.0,78.0,76.0,91.0,87.0,74.0,86.0,94.0,64.0,86.0,89.0,82.0,86.0,71.0,81.0,95.0,87.0,85.0,81.0,76.0,84.0,83.0,85.0,90.0,84.0,92.0,86.0,100.0,81.0,86.0,91.0,79.0,79.0,81.0,82.0,89.0,78.0,78.0,80.0,87.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,80.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,77.0,84.0,77.0,79.0,74.0,93.0,93.0,81.0,83.0,96.0,67.0,77.0,82.0,78.0,91.0,65.0,86.0,89.0,74.0,83.0,85.0,77.0,89.0,78.0,83.0,86.0,86.0,77.0,75.0,88.0,89.0,82.0,92.0,85.0,82.0,90.0,86.0,92.0,72.0,78.0,81.0,84.0,76.0,91.0,78.0,79.0,105.0,97.0,93.0,96.0,93.0,74.0,83.0,87.0,90.0,91.0,82.0,76.0,84.0,86.0,99.0,82.0,85.0,82.0,80.0,92.0,91.0,109.0,79.0,75.0,98.0,79.0,105.0,95.0,87.0,88.0,86.0,96.0,97.0,99.0,77.0,80.0,76.0,79.0,75.0,76.0,75.0,97.0,94.0,75.0,81.0,81.0,67.0,76.0,84.0,76.0,92.0,67.0,71.0,96.0,71.0,87.0,83.0,73.0,84.0,82.0,93.0,87.0,87.0,82.0,70.0,92.0,81.0,69.0,90.0,70.0,85.0,77.0,72.0,97.0,90.0,74.0,78.0,79.0,82.0,72.0,83.0,79.0,89.0,94.0,91.0,85.0,96.0,73.0,86.0,96.0,82.0,94.0,72.0,76.0,91.0,87.0,93.0,97.0,87.0,90.0,88.0,93.0,81.0,91.0,84.0,79.0,96.0,87.0,86.0,90.0,75.0,82.0,98.0,85.0,92.0,91.0,75.0,79.0,82.0,79.0,79.0,89.0,73.0,112.0,96.0,76.0,87.0,96.0,75.0,78.0,93.0,84.0,91.0,73.0,87.0,95.0,79.0,89.0,77.0,87.0,87.0,77.0,84.0,94.0,96.0,82.0,72.0,102.0,79.0,74.0,95.0,82.0,86.0,86.0,87.0,94.0,91.0,79.0,80.0,79.0,76.0,77.0,86.0,78.0,91.0,102.0,88.0,85.0,90.0,64.0,77.0,84.0,81.0,88.0,62.0,83.0,88.0,75.0,95.0,87.0,81.0,98.0,82.0,87.0,90.0,95.0,84.0,80.0,101.0,76.0,81.0,99.0,74.0,85.0,81.0,84.0,96.0,82.0,81.0,84.0,76.0,86.0,74.0,87.0,78.0,97.0,97.0,84.0,84.0,79.0,68.0,85.0,87.0,80.0,87.0,69.0,84.0,81.0,81.0,92.0,95.0,72.0,83.0,76.0,87.0,98.0,85.0,83.0,81.0,101.0,87.0,84.0,87.0,72.0,88.0,84.0,81.0,87.0,82.0,82.0,89.0,86.0,85.0,74.0,79.0,70.0,111.0,99.0,71.0,103.0,97.0,100.0,77.0,93.0,79.0,97.0,65.0,90.0,97.0,76.0,91.0,93.0,78.0,90.0,84.0,93.0,86.0,96.0,91.0,98.0,107.0,78.0,76.0,108.0,72.0,96.0,100.0,91.0,101.0,77.0,79.0,90.0,85.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,80.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,77.0,92.0,78.0,87.0,74.0,92.0,89.0,81.0,83.0,95.0,63.0,86.0,92.0,77.0,86.0,74.0,79.0,92.0,86.0,86.0,84.0,74.0,88.0,80.0,84.0,90.0,86.0,80.0,84.0,88.0,79.0,83.0,96.0,80.0,82.0,76.0,81.0,87.0,76.0,75.0,86.0,92.0,91.0,94.0,89.0,80.0,95.0,95.0,85.0,87.0,95.0,77.0,81.0,90.0,94.0,89.0,70.0,84.0,97.0,75.0,88.0,86.0,86.0,91.0,79.0,106.0,81.0,97.0,111.0,84.0,106.0,88.0,94.0,96.0,73.0,89.0,87.0,83.0,91.0,98.0,84.0,82.0,91.0,80.0,89.0,90.0,71.0,98.0,94.0,82.0,91.0,90.0,72.0,82.0,87.0,82.0,89.0,70.0,88.0,91.0,83.0,91.0,87.0,93.0,93.0,76.0,94.0,88.0,85.0,82.0,76.0,94.0,85.0,79.0,98.0,74.0,92.0,84.0,79.0,93.0,85.0,78.0,92.0,80.0,87.0,93.0,83.0,87.0,108.0,97.0,104.0,82.0,88.0,93.0,95.0,94.0,88.0,88.0,76.0,90.0,92.0,75.0,99.0,93.0,88.0,90.0,75.0,97.0,85.0,99.0,100.0,72.0,97.0,100.0,77.0,89.0,76.0,78.0,101.0,90.0,97.0,99.0,81.0,98.0,87.0,82.0,76.0,99.0,70.0,101.0,91.0,82.0,91.0,83.0,67.0,84.0,94.0,78.0,89.0,68.0,85.0,94.0,81.0,86.0,82.0,78.0,83.0,71.0,89.0,82.0,84.0,80.0,74.0,86.0,76.0,77.0,98.0,78.0,82.0,80.0,79.0,94.0,84.0,77.0,79.0,82.0,81.0,79.0,99.0,78.0,93.0,94.0,87.0,96.0,92.0,94.0,92.0,94.0,79.0,90.0,65.0,90.0,97.0,72.0,102.0,89.0,96.0,92.0,75.0,89.0,93.0,90.0,96.0,104.0,89.0,86.0,73.0,96.0,79.0,93.0,77.0,80.0,92.0,80.0,82.0,98.0,81.0,72.0,75.0,85.0,70.0,93.0,95.0,82.0,84.0,82.0,88.0,84.0,94.0,81.0,89.0,71.0,75.0,91.0,82.0,94.0,88.0,80.0,83.0,68.0,91.0,90.0,96.0,86.0,78.0,97.0,81.0,80.0,92.0,81.0,87.0,80.0,79.0,94.0,84.0,76.0,89.0,72.0,80.0,72.0,98.0,79.0,101.0,101.0,93.0,85.0,88.0,77.0,88.0,92.0,79.0,94.0,71.0,90.0,97.0,82.0,92.0,91.0,105.0,89.0,88.0,98.0,100.0,94.0,96.0,89.0,94.0,80.0,97.0,103.0,72.0,76.0,79.0,79.0,98.0,91.0,82.0,80.0,80.0,88.0,85.0,99.0,88.0,108.0,88.0,85.0,95.0,83.0,69.0,87.0,94.0,79.0,94.0,90.0,95.0,84.0,83.0,97.0,96.0,91.0,83.0,83.0,96.0,90.0,90.0,91.0,83.0,93.0,100.0,105.0,95.0,78.0,86.0,84.0,88.0,102.0,82.0,72.0,85.0,88.0,80.0,74.0,79.0,77.0,105.0,87.0,91.0,89.0,88.0,75.0,83.0,86.0,92.0,87.0,67.0,85.0,90.0,87.0,82.0,84.0,90.0,84.0,80.0,93.0,111.0,95.0,87.0,77.0,106.0,82.0,89.0,103.0,73.0,88.0,91.0,87.0,92.0,77.0,83.0,84.0,80.0,87.0,81.0,95.0,70.0,111.0,98.0,86.0,95.0,86.0,86.0,81.0,90.0,88.0,105.0,68.0,93.0,94.0,92.0,96.0,87.0,99.0,88.0,87.0,96.0,95.0,99.0,80.0,85.0,97.0,87.0,76.0,98.0,84.0,95.0,93.0,99.0,106.0,84.0,87.0,95.0,87.0,81.0,74.0,98.0,79.0,90.0,97.0,88.0,85.0,88.0,77.0,89.0,89.0,81.0,92.0,63.0,84.0,83.0,82.0,88.0,86.0,75.0,83.0,79.0,85.0,89.0,91.0,97.0,78.0,89.0,78.0,89.0,86.0,81.0,78.0,85.0,84.0,92.0,78.0,78.0,81.0,81.0,83.0,79.0,75.0,77.0,97.0,88.0,76.0,83.0,93.0,73.0,79.0,89.0,79.0,95.0,67.0,84.0,92.0,75.0,78.0,94.0,79.0,86.0,78.0,93.0,95.0,87.0,99.0,82.0,92.0,78.0,79.0,102.0,77.0,95.0,76.0,91.0,91.0,91.0,83.0,84.0,83.0,89.0,94.0,91.0,83.0,111.0,100.0,82.0,97.0,91.0,95.0,87.0,90.0,81.0,99.0,75.0,81.0,83.0,82.0,89.0,79.0,95.0,97.0,81.0,88.0,87.0,85.0,85.0,83.0,93.0,90.0,74.0,93.0,78.0,96.0,88.0,85.0,93.0,96.0,88.0,86.0,89.0,87.0,79.0,97.0,78.0,92.0,90.0,75.0,81.0,89.0,64.0,86.0,88.0,83.0,88.0,70.0,89.0,89.0,68.0,92.0,93.0,75.0,80.0,80.0,82.0,91.0,96.0,89.0,79.0,88.0,76.0,82.0,91.0,81.0,81.0,85.0,83.0,83.0,95.0,75.0,89.0,87.0,79.0,78.0,78.0,83.0,109.0,98.0,83.0,89.0,98.0,66.0,79.0,91.0,93.0,93.0,64.0,94.0,94.0,91.0,89.0,81.0,93.0,86.0,82.0,99.0,95.0,94.0,85.0,88.0,90.0,99.0,82.0,104.0,75.0,91.0,92.0,84.0,91.0,96.0,81.0,92.0,79.0,87.0,79.0,97.0,78.0,92.0,90.0,75.0,81.0,89.0,64.0,86.0,88.0,83.0,88.0,70.0,89.0,89.0,68.0,92.0,93.0,75.0,80.0,80.0,82.0,91.0,96.0,89.0,79.0,88.0,76.0,82.0,91.0,81.0,81.0,85.0,83.0,83.0,95.0,75.0,89.0,87.0,79.0,78.0,78.0,83.0,109.0,98.0,83.0,89.0,98.0,66.0,79.0,91.0,93.0,93.0,64.0,94.0,94.0,91.0,89.0,81.0,93.0,86.0,82.0,99.0,95.0,94.0,85.0,88.0,90.0,99.0,82.0,104.0,75.0,91.0,92.0,84.0,91.0,96.0,81.0,92.0,79.0,85.0,77.0,83.0,74.0,101.0,89.0,81.0,82.0,92.0,75.0,83.0,85.0,74.0,86.0,65.0,87.0,96.0,79.0,87.0,89.0,75.0,84.0,75.0,92.0,95.0,90.0,91.0,90.0,90.0,79.0,80.0,98.0,71.0,81.0,93.0,84.0,93.0,83.0,86.0,82.0,85.0,75.0,95.0,86.0,93.0,92.0,94.0,89.0,92.0,92.0,68.0,82.0,96.0,86.0,89.0,69.0,79.0,94.0,94.0,87.0,84.0,85.0,81.0,85.0,92.0,88.0,104.0,85.0,91.0,102.0,80.0,74.0,96.0,70.0,89.0,86.0,78.0,93.0,91.0,79.0,80.0,75.0,86.0,78.0,91.0,73.0,85.0,94.0,81.0,90.0,96.0,63.0,80.0,93.0,85.0,85.0,65.0,87.0,92.0,85.0,92.0,92.0,79.0,82.0,80.0,86.0,98.0,87.0,94.0,80.0,99.0,90.0,83.0,88.0,82.0,86.0,79.0,77.0,94.0,87.0,86.0,83.0,86.0,71.0,89.0,83.0,73.0,89.0,97.0,102.0,85.0,92.0,73.0,94.0,88.0,93.0,101.0,76.0,96.0,89.0,92.0,98.0,89.0,96.0,95.0,80.0,84.0,88.0,101.0,97.0,80.0,95.0,76.0,85.0,91.0,71.0,95.0,93.0,86.0,95.0,76.0,87.0,85.0,71.0,76.0,77.0,80.0,77.0,102.0,98.0,82.0,90.0,85.0,69.0,77.0,88.0,96.0,88.0,67.0,83.0,91.0,76.0,88.0,91.0,92.0,89.0,79.0,94.0,86.0,91.0,100.0,72.0,95.0,78.0,97.0,96.0,78.0,92.0,85.0,87.0,91.0,90.0,88.0,91.0,76.0,89.0,77.0,76.0,78.0,97.0,86.0,87.0,95.0,87.0,67.0,86.0,86.0,89.0,88.0,71.0,87.0,95.0,84.0,92.0,78.0,73.0,87.0,82.0,86.0,90.0,83.0,82.0,92.0,92.0,92.0,92.0,100.0,76.0,87.0,106.0,80.0,98.0,82.0,82.0,90.0,89.0,95.0,77.0,82.0,86.0,111.0,101.0,81.0,94.0,94.0,71.0,95.0,86.0,86.0,91.0,66.0,76.0,90.0,82.0,81.0,79.0,87.0,98.0,84.0,86.0,90.0,94.0,102.0,73.0,91.0,91.0,78.0,88.0,88.0,93.0,106.0,85.0,92.0,77.0,80.0,93.0,95.0,90.0,74.0,94.0,84.0,96.0,99.0,83.0,88.0,88.0,63.0,88.0,93.0,81.0,100.0,74.0,83.0,89.0,79.0,93.0,89.0,78.0,83.0,69.0,94.0,96.0,93.0,94.0,74.0,89.0,99.0,76.0,87.0,85.0,81.0,86.0,83.0,97.0,71.0,89.0,83.0,90.0,80.0,72.0,89.0,77.0,97.0,93.0,79.0,84.0,82.0,73.0,77.0,89.0,80.0,87.0,63.0,78.0,90.0,81.0,83.0,87.0,85.0,85.0,76.0,91.0,82.0,87.0,83.0,78.0,87.0,80.0,86.0,99.0,83.0,80.0,83.0,77.0,91.0,86.0,84.0,78.0,80.0,83.0,81.0,91.0,82.0,110.0,107.0,90.0,90.0,81.0,71.0,92.0,88.0,87.0,81.0,72.0,84.0,92.0,94.0,84.0,80.0,91.0,85.0,80.0,86.0,88.0,95.0,94.0,79.0,106.0,76.0,75.0,100.0,73.0,91.0,93.0,88.0,91.0,83.0,75.0,98.0,83.0,84.0,89.0,94.0,72.0,91.0,93.0,89.0,83.0,96.0,69.0,82.0,86.0,84.0,87.0,66.0,71.0,85.0,85.0,94.0,87.0,75.0,82.0,75.0,82.0,91.0,84.0,77.0,71.0,89.0,77.0,88.0,88.0,71.0,85.0,77.0,81.0,87.0,76.0,76.0,84.0,84.0,88.0,90.0,102.0,95.0,106.0,98.0,86.0,92.0,101.0,71.0,86.0,95.0,88.0,98.0,65.0,86.0,88.0,97.0,98.0,98.0,86.0,87.0,81.0,89.0,101.0,103.0,90.0,76.0,99.0,96.0,93.0,96.0,82.0,94.0,105.0,88.0,109.0,102.0,90.0,87.0,88.0,74.0,81.0,88.0,81.0,97.0,93.0,92.0,92.0,87.0,79.0,90.0,100.0,78.0,97.0,81.0,78.0,95.0,84.0,84.0,85.0,84.0,94.0,78.0,99.0,103.0,89.0,97.0,90.0,98.0,87.0,87.0,96.0,75.0,81.0,84.0,87.0,91.0,81.0,76.0,94.0,74.0,88.0,80.0,81.0,80.0,98.0,98.0,79.0,91.0,75.0,80.0,81.0,92.0,89.0,95.0,67.0,86.0,82.0,78.0,90.0,87.0,82.0,84.0,78.0,91.0,85.0,93.0,85.0,76.0,92.0,80.0,76.0,91.0,75.0,91.0,92.0,83.0,95.0,95.0,83.0,104.0,88.0,88.0,86.0,76.0,87.0,95.0,94.0,85.0,82.0,82.0,63.0,88.0,91.0,74.0,94.0,70.0,78.0,90.0,78.0,89.0,94.0,86.0,88.0,80.0,94.0,83.0,90.0,88.0,75.0,98.0,90.0,84.0,95.0,77.0,83.0,81.0,90.0,91.0,92.0,80.0,96.0,88.0,79.0,81.0,91.0,80.0,117.0,97.0,91.0,87.0,99.0,68.0,96.0,87.0,85.0,94.0,70.0,93.0,94.0,85.0,97.0,99.0,85.0,89.0,79.0,111.0,95.0,83.0,80.0,83.0,93.0,91.0,95.0,95.0,72.0,89.0,92.0,82.0,99.0,87.0,81.0,85.0,79.0,77.0,76.0,89.0,72.0,87.0,97.0,77.0,87.0,88.0,76.0,84.0,88.0,82.0,92.0,69.0,80.0,85.0,77.0,90.0,92.0,92.0,98.0,80.0,81.0,80.0,84.0,86.0,73.0,91.0,89.0,82.0,89.0,75.0,90.0,84.0,86.0,91.0,77.0,78.0,83.0,77.0,86.0,82.0,83.0,75.0,100.0,95.0,75.0,95.0,80.0,79.0,94.0,95.0,82.0,93.0,71.0,84.0,90.0,81.0,98.0,86.0,73.0,85.0,71.0,94.0,90.0,86.0,82.0,88.0,96.0,81.0,85.0,103.0,92.0,100.0,85.0,88.0,91.0,80.0,72.0,85.0,86.0,83.0,79.0,75.0,77.0,97.0,88.0,76.0,83.0,93.0,73.0,79.0,89.0,79.0,95.0,67.0,84.0,92.0,75.0,78.0,94.0,79.0,86.0,78.0,93.0,95.0,87.0,99.0,82.0,92.0,78.0,79.0,102.0,77.0,95.0,76.0,91.0,91.0,91.0,83.0,84.0,83.0,89.0,94.0,91.0,83.0,111.0,100.0,82.0,97.0,91.0,95.0,87.0,90.0,81.0,99.0,75.0,81.0,83.0,82.0,89.0,79.0,95.0,97.0,81.0,88.0,87.0,85.0,85.0,83.0,93.0,90.0,74.0,93.0,78.0,96.0,88.0,85.0,93.0,96.0,88.0,86.0,89.0,84.0,89.0,94.0,72.0,91.0,93.0,89.0,83.0,96.0,69.0,82.0,86.0,84.0,87.0,66.0,71.0,85.0,85.0,94.0,87.0,75.0,82.0,75.0,82.0,91.0,84.0,77.0,71.0,89.0,77.0,88.0,88.0,71.0,85.0,77.0,81.0,87.0,76.0,76.0,84.0,84.0,88.0,90.0,102.0,95.0,106.0,98.0,86.0,92.0,101.0,71.0,86.0,95.0,88.0,98.0,65.0,86.0,88.0,97.0,98.0,98.0,86.0,87.0,81.0,89.0,101.0,103.0,90.0,76.0,99.0,96.0,93.0,96.0,82.0,94.0,105.0,88.0,109.0,94.0,90.0,87.0,88.0,76.0,74.0,92.0,73.0,100.0,93.0,82.0,82.0,86.0,79.0,86.0,89.0,86.0,91.0,66.0,85.0,86.0,76.0,84.0,84.0,79.0,88.0,78.0,92.0,92.0,94.0,94.0,86.0,90.0,85.0,81.0,85.0,75.0,79.0,92.0,77.0,99.0,89.0,78.0,83.0,76.0,79.0,97.0,81.0,81.0,100.0,91.0,86.0,90.0,87.0,88.0,90.0,102.0,82.0,92.0,84.0,88.0,84.0,93.0,90.0,90.0,89.0,97.0,85.0,93.0,101.0,91.0,106.0,91.0,93.0,93.0,98.0,97.0,71.0,85.0,93.0,83.0,98.0,90.0,86.0,86.0,79.0,85.0,80.0,72.0,74.0,89.0,105.0,81.0,87.0,94.0,78.0,90.0,89.0,77.0,86.0,68.0,80.0,94.0,80.0,86.0,90.0,74.0,85.0,80.0,80.0,96.0,86.0,79.0,69.0,90.0,79.0,80.0,94.0,80.0,87.0,76.0,82.0,95.0,86.0,79.0,83.0,85.0,81.0,82.0,88.0,71.0,106.0,92.0,88.0,90.0,95.0,70.0,79.0,89.0,73.0,88.0,67.0,93.0,105.0,90.0,92.0,93.0,73.0,85.0,87.0,98.0,95.0,92.0,87.0,87.0,97.0,88.0,88.0,105.0,84.0,101.0,81.0,83.0,92.0,94.0,76.0,94.0,81.0,82.0,79.0,77.0,72.0,97.0,93.0,76.0,85.0,83.0,65.0,80.0,89.0,74.0,89.0,63.0,78.0,90.0,78.0,86.0,87.0,76.0,94.0,77.0,88.0,86.0,84.0,84.0,77.0,87.0,80.0,85.0,94.0,78.0,90.0,86.0,86.0,92.0,94.0,88.0,81.0,82.0,92.0,74.0,97.0,80.0,100.0,89.0,84.0,87.0,83.0,74.0,86.0,86.0,79.0,91.0,67.0,95.0,84.0,71.0,93.0,85.0,83.0,88.0,72.0,98.0,83.0,93.0,101.0,77.0,94.0,87.0,86.0,87.0,70.0,93.0,87.0,80.0,93.0,88.0,83.0,84.0,92.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,80.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,77.0,89.0,73.0,82.0,69.0,96.0,87.0,86.0,82.0,94.0,68.0,76.0,85.0,82.0,97.0,65.0,91.0,90.0,85.0,85.0,89.0,75.0,85.0,81.0,87.0,85.0,86.0,78.0,69.0,84.0,86.0,89.0,99.0,86.0,82.0,80.0,87.0,93.0,88.0,82.0,78.0,89.0,82.0,87.0,95.0,90.0,96.0,83.0,76.0,100.0,93.0,68.0,86.0,86.0,86.0,85.0,90.0,84.0,95.0,79.0,87.0,81.0,87.0,84.0,81.0,97.0,85.0,100.0,100.0,86.0,110.0,81.0,99.0,95.0,83.0,83.0,89.0,83.0,93.0,93.0,80.0,82.0,82.0,87.0,76.0,88.0,82.0,93.0,92.0,82.0,87.0,87.0,79.0,84.0,89.0,75.0,92.0,64.0,85.0,87.0,73.0,91.0,88.0,76.0,92.0,77.0,84.0,86.0,85.0,96.0,89.0,99.0,76.0,74.0,91.0,74.0,92.0,83.0,89.0,94.0,81.0,84.0,87.0,87.0,97.0,83.0,80.0,83.0,102.0,103.0,93.0,90.0,103.0,61.0,89.0,90.0,87.0,90.0,64.0,86.0,90.0,81.0,86.0,96.0,94.0,99.0,76.0,96.0,90.0,92.0,86.0,100.0,92.0,94.0,81.0,93.0,80.0,97.0,83.0,80.0,83.0,102.0,103.0,93.0,90.0,86.0,88.0,82.0,82.0,100.0,96.0,93.0,86.0,89.0,79.0,86.0,86.0,76.0,92.0,71.0,81.0,97.0,83.0,83.0,79.0,81.0,97.0,77.0,109.0,84.0,95.0,88.0,69.0,93.0,79.0,81.0,101.0,84.0,81.0,88.0,84.0,93.0,80.0,86.0,83.0,80.0,75.0,82.0,89.0,105.0,100.0,86.0,89.0,88.0,65.0,82.0,92.0,92.0,92.0,65.0,83.0,90.0,78.0,87.0,88.0,81.0,96.0,80.0,89.0,90.0,87.0,89.0,77.0,91.0,86.0,73.0,95.0,85.0,92.0,93.0,85.0,98.0,76.0,78.0,84.0,79.0,90.0,82.0,84.0,106.0,102.0,86.0,91.0,84.0,71.0,76.0,92.0,77.0,86.0,62.0,88.0,89.0,86.0,80.0,89.0,89.0,100.0,77.0,100.0,89.0,98.0,88.0,92.0,102.0,79.0,76.0,101.0,78.0,91.0,77.0,75.0,97.0,90.0,78.0,93.0,91.0,69.0,91.0,68.0,98.0,105.0,81.0,89.0,78.0,70.0,83.0,92.0,83.0,97.0,70.0,87.0,93.0,93.0,90.0,75.0,75.0,84.0,74.0,104.0,84.0,95.0,82.0,94.0,98.0,88.0,97.0,98.0,76.0,93.0,92.0,89.0,105.0,90.0,73.0,89.0,83.0,79.0,79.0,77.0,97.0,87.0,76.0,84.0,93.0,69.0,85.0,87.0,80.0,92.0,67.0,81.0,91.0,83.0,87.0,93.0,80.0,85.0,83.0,93.0,98.0,87.0,97.0,81.0,89.0,78.0,86.0,102.0,77.0,95.0,74.0,86.0,91.0,91.0,78.0,84.0,75.0,77.0,89.0,85.0,105.0,94.0,82.0,100.0,91.0,82.0,82.0,86.0,83.0,92.0,67.0,92.0,82.0,86.0,86.0,97.0,95.0,87.0,72.0,95.0,81.0,93.0,99.0,94.0,93.0,91.0,79.0,99.0,76.0,96.0,76.0,79.0,89.0,85.0,85.0,86.0,81.0,82.0,89.0,72.0,93.0,97.0,84.0,84.0,86.0,63.0,82.0,94.0,81.0,87.0,64.0,78.0,85.0,77.0,83.0,89.0,77.0,82.0,76.0,95.0,88.0,81.0,92.0,92.0,90.0,80.0,88.0,85.0,75.0,80.0,88.0,89.0,93.0,73.0,77.0,80.0,87.0,73.0,99.0,84.0,106.0,94.0,91.0,96.0,89.0,66.0,75.0,90.0,90.0,92.0,65.0,83.0,106.0,80.0,91.0,96.0,99.0,101.0,67.0,89.0,97.0,93.0,85.0,85.0,98.0,89.0,88.0,99.0,87.0,93.0,93.0,83.0,101.0,80.0,83.0,102.0,85.0,76.0,80.0,71.0,88.0,89.0,88.0,84.0,91.0,64.0,85.0,86.0,80.0,90.0,75.0,83.0,82.0,73.0,89.0,84.0,89.0,83.0,68.0,91.0,91.0,81.0,81.0,83.0,97.0,81.0,78.0,96.0,72.0,91.0,75.0,82.0,89.0,77.0,75.0,79.0,88.0,98.0,87.0,79.0,102.0,92.0,91.0,96.0,93.0,70.0,78.0,96.0,89.0,98.0,77.0,86.0,89.0,81.0,89.0,92.0,92.0,89.0,75.0,96.0,88.0,104.0,108.0,90.0,104.0,93.0,84.0,97.0,82.0,84.0,85.0,85.0,97.0,79.0,80.0,86.0,80.0,81.0,95.0,95.0,110.0,96.0,75.0,90.0,89.0,73.0,87.0,91.0,97.0,93.0,80.0,84.0,97.0,79.0,96.0,83.0,90.0,97.0,85.0,87.0,99.0,92.0,99.0,85.0,102.0,91.0,96.0,98.0,78.0,90.0,93.0,89.0,96.0,85.0,81.0,84.0,83.0,73.0,93.0,95.0,101.0,89.0,99.0,92.0,80.0,69.0,88.0,85.0,83.0,104.0,69.0,84.0,91.0,91.0,89.0,78.0,84.0,81.0,72.0,99.0,100.0,103.0,90.0,83.0,91.0,95.0,71.0,92.0,76.0,91.0,85.0,85.0,95.0,90.0,76.0,82.0,71.0,81.0,80.0,75.0,90.0,99.0,76.0,85.0,88.0,68.0,79.0,94.0,75.0,86.0,64.0,86.0,95.0,84.0,88.0,83.0,79.0,82.0,79.0,84.0,89.0,93.0,90.0,80.0,91.0,78.0,92.0,97.0,72.0,78.0,87.0,80.0,96.0,76.0,79.0,74.0,83.0,90.0,88.0,71.0,120.0,101.0,100.0,104.0,98.0,67.0,82.0,89.0,89.0,87.0,99.0,89.0,92.0,89.0,95.0,75.0,101.0,90.0,76.0,100.0,95.0,98.0,77.0,75.0,107.0,80.0,107.0,96.0,76.0,91.0,91.0,90.0,96.0,96.0,84.0,102.0,86.0,80.0,86.0,71.0,92.0,99.0,82.0,87.0,81.0,74.0,79.0,90.0,78.0,91.0,70.0,88.0,82.0,75.0,91.0,94.0,75.0,86.0,68.0,86.0,85.0,92.0,83.0,81.0,98.0,87.0,84.0,98.0,72.0,91.0,84.0,76.0,101.0,87.0,80.0,83.0,90.0,86.0,93.0,76.0,96.0,93.0,85.0,91.0,95.0,76.0,82.0,98.0,91.0,93.0,80.0,78.0,96.0,78.0,93.0,83.0,90.0,87.0,66.0,94.0,86.0,88.0,107.0,81.0,102.0,83.0,78.0,95.0,74.0,85.0,88.0,91.0,93.0,84.0,71.0,79.0,90.0,88.0,85.0,79.0,93.0,88.0,86.0,87.0,84.0,73.0,72.0,91.0,80.0,88.0,64.0,81.0,85.0,80.0,89.0,78.0,76.0,83.0,69.0,96.0,84.0,90.0,88.0,73.0,95.0,79.0,98.0,99.0,93.0,94.0,84.0,80.0,106.0,89.0,86.0,81.0,87.0,87.0,82.0,85.0,97.0,95.0,88.0,92.0,79.0,79.0,78.0,87.0,88.0,87.0,78.0,83.0,89.0,87.0,95.0,86.0,101.0,98.0,74.0,96.0,91.0,101.0,82.0,76.0,100.0,78.0,88.0,91.0,67.0,86.0,98.0,88.0,102.0,92.0,78.0,81.0,83.0,76.0,80.0,68.0,90.0,97.0,85.0,88.0,90.0,69.0,79.0,87.0,79.0,89.0,65.0,79.0,91.0,72.0,89.0,81.0,78.0,82.0,84.0,88.0,91.0,88.0,79.0,72.0,96.0,76.0,81.0,81.0,70.0,75.0,89.0,90.0,99.0,85.0,83.0,90.0,81.0,81.0,100.0,84.0,95.0,99.0,91.0,88.0,81.0,93.0,95.0,88.0,92.0,96.0,72.0,80.0,82.0,94.0,81.0,91.0,87.0,85.0,86.0,90.0,83.0,101.0,97.0,91.0,102.0,79.0,75.0,102.0,74.0,83.0,99.0,87.0,103.0,91.0,70.0,93.0,84.0,82.0,91.0,77.0,103.0,98.0,76.0,86.0,86.0,70.0,87.0,88.0,82.0,85.0,74.0,88.0,97.0,93.0,81.0,78.0,97.0,87.0,80.0,95.0,82.0,84.0,102.0,82.0,95.0,91.0,85.0,98.0,78.0,91.0,80.0,85.0,96.0,89.0,75.0,77.0,82.0,71.0,86.0,73.0,96.0,90.0,80.0,82.0,79.0,68.0,81.0,95.0,77.0,84.0,82.0,77.0,93.0,69.0,100.0,74.0,76.0,85.0,72.0,87.0,99.0,89.0,80.0,73.0,98.0,81.0,79.0,94.0,73.0,90.0,88.0,79.0,88.0,77.0,84.0,83.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,85.0,82.0,77.0,79.0,101.0,92.0,77.0,81.0,90.0,64.0,83.0,95.0,81.0,94.0,64.0,81.0,94.0,80.0,92.0,86.0,86.0,86.0,75.0,92.0,85.0,87.0,87.0,80.0,92.0,82.0,90.0,89.0,68.0,84.0,81.0,87.0,94.0,86.0,80.0,79.0,78.0,74.0,99.0,70.0,104.0,94.0,75.0,89.0,91.0,86.0,88.0,96.0,79.0,95.0,70.0,92.0,102.0,89.0,88.0,93.0,88.0,85.0,84.0,99.0,90.0,100.0,96.0,76.0,96.0,90.0,79.0,93.0,87.0,87.0,79.0,91.0,90.0,82.0,77.0,88.0,83.0,79.0,79.0,77.0,97.0,87.0,76.0,84.0,93.0,69.0,85.0,87.0,80.0,92.0,67.0,81.0,91.0,83.0,87.0,93.0,80.0,85.0,83.0,93.0,98.0,87.0,97.0,81.0,89.0,78.0,86.0,102.0,77.0,95.0,74.0,86.0,91.0,91.0,78.0,84.0,75.0,77.0,89.0,85.0,105.0,94.0,82.0,100.0,91.0,82.0,82.0,86.0,83.0,92.0,67.0,92.0,82.0,86.0,86.0,97.0,95.0,87.0,72.0,95.0,81.0,93.0,99.0,94.0,93.0,91.0,79.0,99.0,76.0,96.0,76.0,79.0,89.0,85.0,85.0,86.0,78.0,74.0,88.0,71.0,99.0,99.0,75.0,87.0,83.0,65.0,77.0,93.0,87.0,87.0,72.0,82.0,91.0,89.0,86.0,93.0,85.0,83.0,79.0,97.0,87.0,89.0,89.0,76.0,92.0,87.0,71.0,101.0,80.0,79.0,89.0,80.0,99.0,93.0,81.0,82.0,87.0,74.0,93.0,89.0,90.0,98.0,81.0,95.0,79.0,65.0,89.0,92.0,77.0,96.0,65.0,88.0,92.0,89.0,92.0,84.0,97.0,86.0,74.0,97.0,83.0,91.0,87.0,75.0,93.0,88.0,91.0,97.0,93.0,83.0,77.0,82.0,88.0,78.0,81.0,96.0,82.0,74.0,81.0,78.0,93.0,99.0,85.0,86.0,101.0,69.0,86.0,92.0,77.0,85.0,70.0,76.0,85.0,76.0,86.0,89.0,75.0,82.0,75.0,91.0,102.0,86.0,92.0,76.0,93.0,86.0,87.0,96.0,74.0,85.0,85.0,83.0,90.0,79.0,81.0,82.0,77.0,82.0,90.0,74.0,106.0,94.0,90.0,89.0,83.0,73.0,92.0,87.0,73.0,97.0,90.0,91.0,89.0,80.0,92.0,95.0,89.0,90.0,81.0,93.0,112.0,89.0,86.0,98.0,97.0,78.0,110.0,98.0,89.0,90.0,78.0,89.0,102.0,85.0,76.0,86.0,84.0,77.0,79.0,74.0,93.0,93.0,81.0,83.0,96.0,67.0,77.0,82.0,78.0,91.0,65.0,86.0,89.0,74.0,83.0,85.0,77.0,89.0,78.0,83.0,86.0,86.0,77.0,75.0,88.0,89.0,82.0,92.0,85.0,82.0,90.0,86.0,92.0,72.0,78.0,81.0,76.0,91.0,78.0,79.0,105.0,97.0,93.0,96.0,93.0,74.0,83.0,87.0,90.0,91.0,82.0,76.0,84.0,86.0,99.0,82.0,85.0,82.0,80.0,92.0,91.0,109.0,79.0,75.0,98.0,79.0,105.0,95.0,87.0,88.0,86.0,96.0,97.0,99.0,77.0,80.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,85.0,77.0,83.0,74.0,101.0,89.0,81.0,82.0,92.0,75.0,83.0,85.0,74.0,86.0,65.0,87.0,96.0,79.0,87.0,89.0,75.0,84.0,75.0,92.0,95.0,90.0,91.0,90.0,90.0,79.0,80.0,98.0,71.0,81.0,93.0,84.0,93.0,83.0,86.0,82.0,75.0,95.0,86.0,93.0,92.0,94.0,89.0,92.0,92.0,68.0,82.0,96.0,86.0,89.0,69.0,79.0,94.0,94.0,87.0,84.0,85.0,81.0,85.0,92.0,88.0,104.0,85.0,91.0,102.0,80.0,74.0,96.0,70.0,89.0,86.0,78.0,93.0,91.0,79.0,80.0,85.0,77.0,83.0,74.0,101.0,89.0,81.0,82.0,92.0,75.0,83.0,85.0,74.0,86.0,65.0,87.0,96.0,79.0,87.0,89.0,75.0,84.0,75.0,92.0,95.0,90.0,91.0,90.0,90.0,79.0,80.0,98.0,71.0,81.0,93.0,84.0,93.0,83.0,86.0,82.0,75.0,95.0,86.0,93.0,92.0,94.0,89.0,92.0,92.0,68.0,82.0,96.0,86.0,89.0,69.0,79.0,94.0,94.0,87.0,84.0,85.0,81.0,85.0,92.0,88.0,104.0,85.0,91.0,102.0,80.0,74.0,96.0,70.0,89.0,86.0,78.0,93.0,91.0,79.0,80.0,86.0,74.0,82.0,73.0,94.0,85.0,82.0,82.0,83.0,72.0,81.0,88.0,81.0,85.0,71.0,87.0,89.0,82.0,82.0,93.0,71.0,84.0,77.0,83.0,86.0,85.0,78.0,82.0,87.0,86.0,71.0,92.0,83.0,83.0,81.0,81.0,91.0,93.0,82.0,85.0,75.0,78.0,91.0,81.0,98.0,92.0,80.0,85.0,82.0,70.0,75.0,85.0,92.0,96.0,69.0,73.0,88.0,82.0,86.0,83.0,85.0,82.0,74.0,96.0,104.0,100.0,85.0,75.0,101.0,99.0,75.0,97.0,82.0,91.0,84.0,83.0,90.0,82.0,78.0,90.0,86.0,79.0,95.0,88.0,102.0,96.0,87.0,96.0,96.0,66.0,80.0,92.0,82.0,84.0,70.0,80.0,92.0,92.0,89.0,87.0,74.0,81.0,75.0,82.0,86.0,85.0,89.0,77.0,89.0,80.0,85.0,93.0,77.0,79.0,79.0,82.0,101.0,75.0,75.0,82.0,80.0,83.0,96.0,81.0,104.0,98.0,90.0,94.0,87.0,72.0,93.0,89.0,87.0,97.0,87.0,90.0,102.0,84.0,103.0,86.0,88.0,97.0,68.0,103.0,85.0,88.0,94.0,85.0,97.0,97.0,86.0,103.0,82.0,91.0,78.0,82.0,103.0,78.0,83.0,83.0,97.0,71.0,82.0,84.0,110.0,94.0,91.0,93.0,93.0,68.0,77.0,96.0,84.0,91.0,73.0,78.0,88.0,83.0,102.0,88.0,84.0,91.0,75.0,92.0,103.0,90.0,106.0,75.0,100.0,92.0,78.0,98.0,89.0,95.0,96.0,83.0,107.0,96.0,78.0,87.0,77.0,72.0,84.0,68.0,90.0,94.0,82.0,89.0,89.0,75.0,85.0,85.0,80.0,85.0,64.0,87.0,95.0,79.0,94.0,84.0,93.0,83.0,82.0,85.0,85.0,96.0,92.0,73.0,104.0,88.0,82.0,95.0,87.0,92.0,86.0,86.0,98.0,84.0,80.0,84.0)

g <- as.factor(c(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114))

In [22]:
for key in sorted(modeScore.keys()):
    s = ''
    for c in modeScore[key]:
        if len(s):
            s += ', '
        s += c
    print(key, s)


80.9 BottleNeck_centrality_HL
82.125 Alpha_LH
82.55 MNC_centrality_HL
82.575 Eigenvector_HL, Kleinbergs_centrality_HITS_HL
82.625 Betweenness_HL, Shortest_path_betweenness_HL
82.825 Stress_centrality_HL
82.85 Communicability_betweenness_centrality_HL, Path_centrality_HL, Political_independence_index_LH
83.05 Effectiveness_centrality_HL
83.075 Network_centrality_HL
83.25 Leverage_centrality_HL
83.275 Closeness_VariantLatora_HL
83.3 MCC_centrality_HL
83.625 Lapacian_centrality_HL
83.725 Average_distance_LH
83.775 Load_centrality_HL
83.85 Flow_betweenness_centrality_HL
83.975 Dangalchev_closeness_centrality_HL, Decay_centrality_HL
84.025 Barycenter_centrality_HL, Closeness_Freeman_HL, Lin_centrality_HL, Radiality_centrality_HL, Shortest_path_closeness_HL
84.1 Centroid_centrality_HL
84.175 Community_centrality_HL
84.2 Random_walk_betweenness_HL
84.25 Cross_clique_connectivity_HL, Semi_local_centrality_HL
84.325 Network_fragmentation_GeodesicDistanceWeighted_HL
84.45 Bridging_centrality_LH
84.55 Degree_centrality_HL, Shortest_path_degree_HL, Strength_weighted_vertex_degree_HL
84.625 Subgraph_HL
84.65 LAC_HL
84.7210884354 AA_random
84.75 Lobby_index_HL
84.85 Current_flow_closeness_centrality_HL, Information_centrality_HL
84.875 SALSA_HL
85.1 Network_fragmentation_HL
85.125 Markov_centrality_HL, Random_walk_closeness_HL
85.325 Diffusion_degree_HL, Topological_coefficient_LH
85.35 Stress_centrality_LH
85.55 Bridging_centrality_HL
85.6 Leverage_centrality_LH
85.625 EPC_HL
85.675 ClusterRank_HL
85.7 Entropy_centrality_LH
85.825 Local_assortativity_LH
85.875 Eccentricity_LH
86.05 Alpha_HL
86.175 BottleNeck_centrality_LH, DMNC_centrality_LH
86.2 Local_assortativity_HL
86.275 Flow_betweenness_centrality_LH
86.3 DMNC_centrality_HL, SALSA_LH
86.375 Political_independence_index_HL
86.5 Core_decomposition_LH
86.625 Degree_centrality_LH, Shortest_path_degree_LH, Strength_weighted_vertex_degree_LH
86.75 Local_clustering_coefficients_HL
86.875 Network_fragmentation_GeodesicDistanceWeighted_LH
86.925 Neighborhood_connectivity_LH
87.075 Local_clustering_coefficients_LH, Markov_centrality_LH, Random_walk_closeness_LH
87.175 Effectiveness_centrality_LH
87.425 Network_fragmentation_LH
87.5 Eccentricity_HL
87.675 Entropy_centrality_HL
87.725 Random_walk_betweenness_LH
87.975 Lapacian_centrality_LH
88.0 Load_centrality_LH
88.05 Betweenness_LH, Shortest_path_betweenness_LH
88.075 Communicability_betweenness_centrality_LH
88.15 Diffusion_degree_LH
88.225 Dangalchev_closeness_centrality_LH, Decay_centrality_LH
88.45 Centroid_centrality_LH
88.5 Community_centrality_LH, Current_flow_closeness_centrality_LH, Information_centrality_LH
88.55 Path_centrality_LH
88.7 Closeness_VariantLatora_LH
88.725 Barycenter_centrality_LH, Closeness_Freeman_LH, Lin_centrality_LH, Radiality_centrality_LH, Shortest_path_closeness_LH
88.9 EPC_LH
88.9452054795 Lobby_index_LH
88.95 Topological_coefficient_HL
89.0 Semi_local_centrality_LH
89.1 Average_distance_HL, Core_decomposition_HL
89.125 MCC_centrality_LH
89.225 MNC_centrality_LH
89.35 Subgraph_LH
89.7 Neighborhood_connectivity_HL
90.025 ClusterRank_LH, LAC_LH
90.475 Cross_clique_centrality_LH
90.9 Network_centrality_LH
91.85 Kleinbergs_centrality_HITS_LH
92.05 Eigenvector_LH

In [25]:
k = ''
for key in sorted(gkey.keys()):
    if(len(k)):
        k += ','
    k += '"' + gkey[key] + '"'
    if gkey[key] == 'BottleNeck_centrality_HL':
        print('target best mode centrality:', key)

k = 'k <- c(' + k + ')'
print(k)


target best mode centrality: 10
k <- c("Leverage_centrality_HL","AA_random","Alpha_HL","Alpha_LH","Average_distance_HL","Average_distance_LH","Barycenter_centrality_HL","Barycenter_centrality_LH","Betweenness_HL","Betweenness_LH","BottleNeck_centrality_HL","BottleNeck_centrality_LH","Bridging_centrality_HL","Bridging_centrality_LH","Centroid_centrality_HL","Centroid_centrality_LH","Closeness_Freeman_HL","Closeness_Freeman_LH","Closeness_VariantLatora_HL","Closeness_VariantLatora_LH","ClusterRank_HL","ClusterRank_LH","Communicability_betweenness_centrality_HL","Communicability_betweenness_centrality_LH","Community_centrality_HL","Community_centrality_LH","Core_decomposition_HL","Core_decomposition_LH","Cross_clique_centrality_LH","Cross_clique_connectivity_HL","Current_flow_closeness_centrality_HL","Current_flow_closeness_centrality_LH","Dangalchev_closeness_centrality_HL","Dangalchev_closeness_centrality_LH","Decay_centrality_HL","Decay_centrality_LH","Degree_centrality_HL","Degree_centrality_LH","Diffusion_degree_HL","Diffusion_degree_LH","DMNC_centrality_HL","DMNC_centrality_LH","Eccentricity_HL","Eccentricity_LH","Effectiveness_centrality_HL","Effectiveness_centrality_LH","Eigenvector_HL","Eigenvector_LH","Entropy_centrality_HL","Entropy_centrality_LH","EPC_HL","EPC_LH","Flow_betweenness_centrality_HL","Flow_betweenness_centrality_LH","Information_centrality_HL","Information_centrality_LH","Kleinbergs_centrality_HITS_HL","Kleinbergs_centrality_HITS_LH","LAC_HL","LAC_LH","Lapacian_centrality_HL","Lapacian_centrality_LH","Leverage_centrality_LH","Lin_centrality_HL","Lin_centrality_LH","Load_centrality_HL","Load_centrality_LH","Lobby_index_HL","Lobby_index_LH","Local_assortativity_HL","Local_assortativity_LH","Local_clustering_coefficients_HL","Local_clustering_coefficients_LH","Markov_centrality_HL","Markov_centrality_LH","MCC_centrality_HL","MCC_centrality_LH","MNC_centrality_HL","MNC_centrality_LH","Neighborhood_connectivity_HL","Neighborhood_connectivity_LH","Network_centrality_HL","Network_centrality_LH","Network_fragmentation_GeodesicDistanceWeighted_HL","Network_fragmentation_GeodesicDistanceWeighted_LH","Network_fragmentation_HL","Network_fragmentation_LH","Path_centrality_HL","Path_centrality_LH","Political_independence_index_HL","Political_independence_index_LH","Radiality_centrality_HL","Radiality_centrality_LH","Random_walk_betweenness_HL","Random_walk_betweenness_LH","Random_walk_closeness_HL","Random_walk_closeness_LH","SALSA_HL","SALSA_LH","Semi_local_centrality_HL","Semi_local_centrality_LH","Shortest_path_betweenness_HL","Shortest_path_betweenness_LH","Shortest_path_closeness_HL","Shortest_path_closeness_LH","Shortest_path_degree_HL","Shortest_path_degree_LH","Strength_weighted_vertex_degree_HL","Strength_weighted_vertex_degree_LH","Stress_centrality_HL","Stress_centrality_LH","Subgraph_HL","Subgraph_LH","Topological_coefficient_HL","Topological_coefficient_LH")

In [26]:
# Extract variables for R analysis
y = ''
g = ''

tmpY = ''
tmpG = ''


gkey = {}

modeScore = {} # list of centralities for each modal edges cut value

# this is to set the CONTROL for the R statistical tests (control = first centrality data that has to go in)

# not sure - this would probably be the Leverage_Centrality_HL which has technically
# the best modal score
whichIDToKeepAsZero = 10

for i_key, key in enumerate(list(centralities.keys())):
    if(i_key < whichIDToKeepAsZero):
        gkey[i_key + 1] = key
    elif(i_key == whichIDToKeepAsZero):
        gkey[0] = key
    else:
        gkey[i_key] = key
        
    centrality = centralities[key]
    
    # extract Edges Cut mode value
    #UNCOMMENT THESE TO ENABLE MODE SCORE
    #modescore = centrality.modeScores[2].split(':')
    #mecut = float(modescore[0]) # value of mode of edges cut
    #mcount = int(modescore[1]) # number of experiments with this modal value of edges cut
    
        
    # store each centrality based on their modal value of edges cut
    #overallmodescore = mecut / mcount
    
    # UNCOMMENT THIS TO ENABLE MODE SCORE
    #overallmodescore = mecut

    ## COMMENT THIS OUT IF WE DON?T WANT AVERAGE!!
    #centrality.loadScores()
    #centrality.computeStatsScore()
    overallmodescore = centrality.avgScores[2]
    
    if overallmodescore in modeScore:
        modeScore[overallmodescore].append(key)
    else:
        modeScore[overallmodescore] = [key]    
    for i, score in enumerate(centrality.scores):
        edges_cut = score[2]
        
        if(i_key == whichIDToKeepAsZero):
            if(len(tmpY)):
                tmpY += ','
            tmpY += str(edges_cut)
            if(len(tmpG)):
                tmpG += ','
            tmpG += str(0)
            
        else:        
            if(len(y)):
                y += ','
            y += str(edges_cut)
            if(len(g)):
                g += ','
            g += str(i_key)
            #g += '"' + str(i_key) + '"'
            if(i == 40):
                break

y = "Y_best <- c(" + tmpY + ',' + y + ")"
g = "g_best <- as.factor(c(" + tmpG + ',' + g + "))"
print(y)
print("")
print(g)


Y_best <- c(82.0,72.0,83.0,79.0,89.0,94.0,91.0,85.0,96.0,73.0,86.0,96.0,82.0,94.0,72.0,76.0,91.0,87.0,93.0,97.0,87.0,90.0,88.0,93.0,81.0,91.0,84.0,79.0,96.0,87.0,86.0,90.0,75.0,82.0,98.0,85.0,92.0,91.0,75.0,79.0,82.0,72.0,83.0,79.0,89.0,94.0,91.0,85.0,96.0,73.0,86.0,96.0,82.0,94.0,72.0,76.0,91.0,87.0,93.0,97.0,87.0,90.0,88.0,93.0,81.0,91.0,84.0,79.0,96.0,87.0,86.0,90.0,75.0,82.0,98.0,85.0,92.0,91.0,75.0,79.0,87.0,89.0,80.0,77.0,94.0,92.0,78.0,90.0,82.0,66.0,86.0,82.0,85.0,96.0,87.0,80.0,86.0,75.0,83.0,88.0,94.0,86.0,75.0,94.0,92.0,89.0,84.0,68.0,102.0,80.0,77.0,99.0,86.0,79.0,95.0,77.0,98.0,77.0,77.0,79.0,87.0,74.0,83.0,79.0,82.0,103.0,107.0,80.0,91.0,85.0,74.0,80.0,85.0,83.0,90.0,68.0,92.0,88.0,85.0,91.0,79.0,87.0,98.0,86.0,86.0,89.0,99.0,83.0,81.0,96.0,81.0,73.0,88.0,87.0,82.0,83.0,88.0,94.0,76.0,86.0,100.0,74.0,86.0,71.0,86.0,75.0,91.0,91.0,76.0,87.0,90.0,73.0,84.0,83.0,76.0,89.0,72.0,80.0,83.0,74.0,87.0,80.0,75.0,96.0,77.0,86.0,82.0,85.0,85.0,77.0,99.0,88.0,78.0,90.0,71.0,82.0,81.0,76.0,87.0,77.0,78.0,81.0,86.0,80.0,78.0,96.0,87.0,99.0,99.0,92.0,97.0,79.0,65.0,75.0,99.0,78.0,101.0,84.0,91.0,89.0,85.0,93.0,79.0,97.0,88.0,77.0,92.0,96.0,102.0,87.0,76.0,99.0,81.0,103.0,104.0,85.0,90.0,88.0,82.0,99.0,81.0,92.0,99.0,80.0,87.0,85.0,78.0,76.0,91.0,87.0,74.0,86.0,94.0,64.0,86.0,89.0,82.0,86.0,71.0,81.0,95.0,87.0,85.0,81.0,76.0,84.0,83.0,85.0,90.0,84.0,92.0,86.0,100.0,81.0,86.0,91.0,79.0,79.0,81.0,82.0,89.0,78.0,78.0,80.0,87.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,80.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,77.0,84.0,77.0,79.0,74.0,93.0,93.0,81.0,83.0,96.0,67.0,77.0,82.0,78.0,91.0,65.0,86.0,89.0,74.0,83.0,85.0,77.0,89.0,78.0,83.0,86.0,86.0,77.0,75.0,88.0,89.0,82.0,92.0,85.0,82.0,90.0,86.0,92.0,72.0,78.0,81.0,84.0,76.0,91.0,78.0,79.0,105.0,97.0,93.0,96.0,93.0,74.0,83.0,87.0,90.0,91.0,82.0,76.0,84.0,86.0,99.0,82.0,85.0,82.0,80.0,92.0,91.0,109.0,79.0,75.0,98.0,79.0,105.0,95.0,87.0,88.0,86.0,96.0,97.0,99.0,77.0,80.0,76.0,79.0,75.0,76.0,75.0,97.0,94.0,75.0,81.0,81.0,67.0,76.0,84.0,76.0,92.0,67.0,71.0,96.0,71.0,87.0,83.0,73.0,84.0,82.0,93.0,87.0,87.0,82.0,70.0,92.0,81.0,69.0,90.0,70.0,85.0,77.0,72.0,97.0,90.0,74.0,78.0,79.0,79.0,79.0,89.0,73.0,112.0,96.0,76.0,87.0,96.0,75.0,78.0,93.0,84.0,91.0,73.0,87.0,95.0,79.0,89.0,77.0,87.0,87.0,77.0,84.0,94.0,96.0,82.0,72.0,102.0,79.0,74.0,95.0,82.0,86.0,86.0,87.0,94.0,91.0,79.0,80.0,79.0,76.0,77.0,86.0,78.0,91.0,102.0,88.0,85.0,90.0,64.0,77.0,84.0,81.0,88.0,62.0,83.0,88.0,75.0,95.0,87.0,81.0,98.0,82.0,87.0,90.0,95.0,84.0,80.0,101.0,76.0,81.0,99.0,74.0,85.0,81.0,84.0,96.0,82.0,81.0,84.0,76.0,86.0,74.0,87.0,78.0,97.0,97.0,84.0,84.0,79.0,68.0,85.0,87.0,80.0,87.0,69.0,84.0,81.0,81.0,92.0,95.0,72.0,83.0,76.0,87.0,98.0,85.0,83.0,81.0,101.0,87.0,84.0,87.0,72.0,88.0,84.0,81.0,87.0,82.0,82.0,89.0,86.0,85.0,74.0,79.0,70.0,111.0,99.0,71.0,103.0,97.0,100.0,77.0,93.0,79.0,97.0,65.0,90.0,97.0,76.0,91.0,93.0,78.0,90.0,84.0,93.0,86.0,96.0,91.0,98.0,107.0,78.0,76.0,108.0,72.0,96.0,100.0,91.0,101.0,77.0,79.0,90.0,85.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,80.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,77.0,92.0,78.0,87.0,74.0,92.0,89.0,81.0,83.0,95.0,63.0,86.0,92.0,77.0,86.0,74.0,79.0,92.0,86.0,86.0,84.0,74.0,88.0,80.0,84.0,90.0,86.0,80.0,84.0,88.0,79.0,83.0,96.0,80.0,82.0,76.0,81.0,87.0,76.0,75.0,86.0,92.0,91.0,94.0,89.0,80.0,95.0,95.0,85.0,87.0,95.0,77.0,81.0,90.0,94.0,89.0,70.0,84.0,97.0,75.0,88.0,86.0,86.0,91.0,79.0,106.0,81.0,97.0,111.0,84.0,106.0,88.0,94.0,96.0,73.0,89.0,87.0,83.0,91.0,98.0,84.0,82.0,91.0,80.0,89.0,90.0,71.0,98.0,94.0,82.0,91.0,90.0,72.0,82.0,87.0,82.0,89.0,70.0,88.0,91.0,83.0,91.0,87.0,93.0,93.0,76.0,94.0,88.0,85.0,82.0,76.0,94.0,85.0,79.0,98.0,74.0,92.0,84.0,79.0,93.0,85.0,78.0,92.0,80.0,87.0,93.0,83.0,87.0,108.0,97.0,104.0,82.0,88.0,93.0,95.0,94.0,88.0,88.0,76.0,90.0,92.0,75.0,99.0,93.0,88.0,90.0,75.0,97.0,85.0,99.0,100.0,72.0,97.0,100.0,77.0,89.0,76.0,78.0,101.0,90.0,97.0,99.0,81.0,98.0,87.0,82.0,76.0,99.0,70.0,101.0,91.0,82.0,91.0,83.0,67.0,84.0,94.0,78.0,89.0,68.0,85.0,94.0,81.0,86.0,82.0,78.0,83.0,71.0,89.0,82.0,84.0,80.0,74.0,86.0,76.0,77.0,98.0,78.0,82.0,80.0,79.0,94.0,84.0,77.0,79.0,82.0,81.0,79.0,99.0,78.0,93.0,94.0,87.0,96.0,92.0,94.0,92.0,94.0,79.0,90.0,65.0,90.0,97.0,72.0,102.0,89.0,96.0,92.0,75.0,89.0,93.0,90.0,96.0,104.0,89.0,86.0,73.0,96.0,79.0,93.0,77.0,80.0,92.0,80.0,82.0,98.0,81.0,72.0,75.0,85.0,70.0,93.0,95.0,82.0,84.0,82.0,88.0,84.0,94.0,81.0,89.0,71.0,75.0,91.0,82.0,94.0,88.0,80.0,83.0,68.0,91.0,90.0,96.0,86.0,78.0,97.0,81.0,80.0,92.0,81.0,87.0,80.0,79.0,94.0,84.0,76.0,89.0,72.0,80.0,72.0,98.0,79.0,101.0,101.0,93.0,85.0,88.0,77.0,88.0,92.0,79.0,94.0,71.0,90.0,97.0,82.0,92.0,91.0,105.0,89.0,88.0,98.0,100.0,94.0,96.0,89.0,94.0,80.0,97.0,103.0,72.0,76.0,79.0,79.0,98.0,91.0,82.0,80.0,80.0,88.0,85.0,99.0,88.0,108.0,88.0,85.0,95.0,83.0,69.0,87.0,94.0,79.0,94.0,90.0,95.0,84.0,83.0,97.0,96.0,91.0,83.0,83.0,96.0,90.0,90.0,91.0,83.0,93.0,100.0,105.0,95.0,78.0,86.0,84.0,88.0,102.0,82.0,72.0,85.0,88.0,80.0,74.0,79.0,77.0,105.0,87.0,91.0,89.0,88.0,75.0,83.0,86.0,92.0,87.0,67.0,85.0,90.0,87.0,82.0,84.0,90.0,84.0,80.0,93.0,111.0,95.0,87.0,77.0,106.0,82.0,89.0,103.0,73.0,88.0,91.0,87.0,92.0,77.0,83.0,84.0,80.0,87.0,81.0,95.0,70.0,111.0,98.0,86.0,95.0,86.0,86.0,81.0,90.0,88.0,105.0,68.0,93.0,94.0,92.0,96.0,87.0,99.0,88.0,87.0,96.0,95.0,99.0,80.0,85.0,97.0,87.0,76.0,98.0,84.0,95.0,93.0,99.0,106.0,84.0,87.0,95.0,87.0,81.0,74.0,98.0,79.0,90.0,97.0,88.0,85.0,88.0,77.0,89.0,89.0,81.0,92.0,63.0,84.0,83.0,82.0,88.0,86.0,75.0,83.0,79.0,85.0,89.0,91.0,97.0,78.0,89.0,78.0,89.0,86.0,81.0,78.0,85.0,84.0,92.0,78.0,78.0,81.0,81.0,83.0,79.0,75.0,77.0,97.0,88.0,76.0,83.0,93.0,73.0,79.0,89.0,79.0,95.0,67.0,84.0,92.0,75.0,78.0,94.0,79.0,86.0,78.0,93.0,95.0,87.0,99.0,82.0,92.0,78.0,79.0,102.0,77.0,95.0,76.0,91.0,91.0,91.0,83.0,84.0,83.0,89.0,94.0,91.0,83.0,111.0,100.0,82.0,97.0,91.0,95.0,87.0,90.0,81.0,99.0,75.0,81.0,83.0,82.0,89.0,79.0,95.0,97.0,81.0,88.0,87.0,85.0,85.0,83.0,93.0,90.0,74.0,93.0,78.0,96.0,88.0,85.0,93.0,96.0,88.0,86.0,89.0,87.0,79.0,97.0,78.0,92.0,90.0,75.0,81.0,89.0,64.0,86.0,88.0,83.0,88.0,70.0,89.0,89.0,68.0,92.0,93.0,75.0,80.0,80.0,82.0,91.0,96.0,89.0,79.0,88.0,76.0,82.0,91.0,81.0,81.0,85.0,83.0,83.0,95.0,75.0,89.0,87.0,79.0,78.0,78.0,83.0,109.0,98.0,83.0,89.0,98.0,66.0,79.0,91.0,93.0,93.0,64.0,94.0,94.0,91.0,89.0,81.0,93.0,86.0,82.0,99.0,95.0,94.0,85.0,88.0,90.0,99.0,82.0,104.0,75.0,91.0,92.0,84.0,91.0,96.0,81.0,92.0,79.0,87.0,79.0,97.0,78.0,92.0,90.0,75.0,81.0,89.0,64.0,86.0,88.0,83.0,88.0,70.0,89.0,89.0,68.0,92.0,93.0,75.0,80.0,80.0,82.0,91.0,96.0,89.0,79.0,88.0,76.0,82.0,91.0,81.0,81.0,85.0,83.0,83.0,95.0,75.0,89.0,87.0,79.0,78.0,78.0,83.0,109.0,98.0,83.0,89.0,98.0,66.0,79.0,91.0,93.0,93.0,64.0,94.0,94.0,91.0,89.0,81.0,93.0,86.0,82.0,99.0,95.0,94.0,85.0,88.0,90.0,99.0,82.0,104.0,75.0,91.0,92.0,84.0,91.0,96.0,81.0,92.0,79.0,85.0,77.0,83.0,74.0,101.0,89.0,81.0,82.0,92.0,75.0,83.0,85.0,74.0,86.0,65.0,87.0,96.0,79.0,87.0,89.0,75.0,84.0,75.0,92.0,95.0,90.0,91.0,90.0,90.0,79.0,80.0,98.0,71.0,81.0,93.0,84.0,93.0,83.0,86.0,82.0,85.0,75.0,95.0,86.0,93.0,92.0,94.0,89.0,92.0,92.0,68.0,82.0,96.0,86.0,89.0,69.0,79.0,94.0,94.0,87.0,84.0,85.0,81.0,85.0,92.0,88.0,104.0,85.0,91.0,102.0,80.0,74.0,96.0,70.0,89.0,86.0,78.0,93.0,91.0,79.0,80.0,75.0,86.0,78.0,91.0,73.0,85.0,94.0,81.0,90.0,96.0,63.0,80.0,93.0,85.0,85.0,65.0,87.0,92.0,85.0,92.0,92.0,79.0,82.0,80.0,86.0,98.0,87.0,94.0,80.0,99.0,90.0,83.0,88.0,82.0,86.0,79.0,77.0,94.0,87.0,86.0,83.0,86.0,71.0,89.0,83.0,73.0,89.0,97.0,102.0,85.0,92.0,73.0,94.0,88.0,93.0,101.0,76.0,96.0,89.0,92.0,98.0,89.0,96.0,95.0,80.0,84.0,88.0,101.0,97.0,80.0,95.0,76.0,85.0,91.0,71.0,95.0,93.0,86.0,95.0,76.0,87.0,85.0,71.0,76.0,77.0,80.0,77.0,102.0,98.0,82.0,90.0,85.0,69.0,77.0,88.0,96.0,88.0,67.0,83.0,91.0,76.0,88.0,91.0,92.0,89.0,79.0,94.0,86.0,91.0,100.0,72.0,95.0,78.0,97.0,96.0,78.0,92.0,85.0,87.0,91.0,90.0,88.0,91.0,76.0,89.0,77.0,76.0,78.0,97.0,86.0,87.0,95.0,87.0,67.0,86.0,86.0,89.0,88.0,71.0,87.0,95.0,84.0,92.0,78.0,73.0,87.0,82.0,86.0,90.0,83.0,82.0,92.0,92.0,92.0,92.0,100.0,76.0,87.0,106.0,80.0,98.0,82.0,82.0,90.0,89.0,95.0,77.0,82.0,86.0,111.0,101.0,81.0,94.0,94.0,71.0,95.0,86.0,86.0,91.0,66.0,76.0,90.0,82.0,81.0,79.0,87.0,98.0,84.0,86.0,90.0,94.0,102.0,73.0,91.0,91.0,78.0,88.0,88.0,93.0,106.0,85.0,92.0,77.0,80.0,93.0,95.0,90.0,74.0,94.0,84.0,96.0,99.0,83.0,88.0,88.0,63.0,88.0,93.0,81.0,100.0,74.0,83.0,89.0,79.0,93.0,89.0,78.0,83.0,69.0,94.0,96.0,93.0,94.0,74.0,89.0,99.0,76.0,87.0,85.0,81.0,86.0,83.0,97.0,71.0,89.0,83.0,90.0,80.0,72.0,89.0,77.0,97.0,93.0,79.0,84.0,82.0,73.0,77.0,89.0,80.0,87.0,63.0,78.0,90.0,81.0,83.0,87.0,85.0,85.0,76.0,91.0,82.0,87.0,83.0,78.0,87.0,80.0,86.0,99.0,83.0,80.0,83.0,77.0,91.0,86.0,84.0,78.0,80.0,83.0,81.0,91.0,82.0,110.0,107.0,90.0,90.0,81.0,71.0,92.0,88.0,87.0,81.0,72.0,84.0,92.0,94.0,84.0,80.0,91.0,85.0,80.0,86.0,88.0,95.0,94.0,79.0,106.0,76.0,75.0,100.0,73.0,91.0,93.0,88.0,91.0,83.0,75.0,98.0,83.0,84.0,89.0,94.0,72.0,91.0,93.0,89.0,83.0,96.0,69.0,82.0,86.0,84.0,87.0,66.0,71.0,85.0,85.0,94.0,87.0,75.0,82.0,75.0,82.0,91.0,84.0,77.0,71.0,89.0,77.0,88.0,88.0,71.0,85.0,77.0,81.0,87.0,76.0,76.0,84.0,84.0,88.0,90.0,102.0,95.0,106.0,98.0,86.0,92.0,101.0,71.0,86.0,95.0,88.0,98.0,65.0,86.0,88.0,97.0,98.0,98.0,86.0,87.0,81.0,89.0,101.0,103.0,90.0,76.0,99.0,96.0,93.0,96.0,82.0,94.0,105.0,88.0,109.0,102.0,90.0,87.0,88.0,74.0,81.0,88.0,81.0,97.0,93.0,92.0,92.0,87.0,79.0,90.0,100.0,78.0,97.0,81.0,78.0,95.0,84.0,84.0,85.0,84.0,94.0,78.0,99.0,103.0,89.0,97.0,90.0,98.0,87.0,87.0,96.0,75.0,81.0,84.0,87.0,91.0,81.0,76.0,94.0,74.0,88.0,80.0,81.0,80.0,98.0,98.0,79.0,91.0,75.0,80.0,81.0,92.0,89.0,95.0,67.0,86.0,82.0,78.0,90.0,87.0,82.0,84.0,78.0,91.0,85.0,93.0,85.0,76.0,92.0,80.0,76.0,91.0,75.0,91.0,92.0,83.0,95.0,95.0,83.0,104.0,88.0,88.0,86.0,76.0,87.0,95.0,94.0,85.0,82.0,82.0,63.0,88.0,91.0,74.0,94.0,70.0,78.0,90.0,78.0,89.0,94.0,86.0,88.0,80.0,94.0,83.0,90.0,88.0,75.0,98.0,90.0,84.0,95.0,77.0,83.0,81.0,90.0,91.0,92.0,80.0,96.0,88.0,79.0,81.0,91.0,80.0,117.0,97.0,91.0,87.0,99.0,68.0,96.0,87.0,85.0,94.0,70.0,93.0,94.0,85.0,97.0,99.0,85.0,89.0,79.0,111.0,95.0,83.0,80.0,83.0,93.0,91.0,95.0,95.0,72.0,89.0,92.0,82.0,99.0,87.0,81.0,85.0,79.0,77.0,76.0,89.0,72.0,87.0,97.0,77.0,87.0,88.0,76.0,84.0,88.0,82.0,92.0,69.0,80.0,85.0,77.0,90.0,92.0,92.0,98.0,80.0,81.0,80.0,84.0,86.0,73.0,91.0,89.0,82.0,89.0,75.0,90.0,84.0,86.0,91.0,77.0,78.0,83.0,77.0,86.0,82.0,83.0,75.0,100.0,95.0,75.0,95.0,80.0,79.0,94.0,95.0,82.0,93.0,71.0,84.0,90.0,81.0,98.0,86.0,73.0,85.0,71.0,94.0,90.0,86.0,82.0,88.0,96.0,81.0,85.0,103.0,92.0,100.0,85.0,88.0,91.0,80.0,72.0,85.0,86.0,83.0,79.0,75.0,77.0,97.0,88.0,76.0,83.0,93.0,73.0,79.0,89.0,79.0,95.0,67.0,84.0,92.0,75.0,78.0,94.0,79.0,86.0,78.0,93.0,95.0,87.0,99.0,82.0,92.0,78.0,79.0,102.0,77.0,95.0,76.0,91.0,91.0,91.0,83.0,84.0,83.0,89.0,94.0,91.0,83.0,111.0,100.0,82.0,97.0,91.0,95.0,87.0,90.0,81.0,99.0,75.0,81.0,83.0,82.0,89.0,79.0,95.0,97.0,81.0,88.0,87.0,85.0,85.0,83.0,93.0,90.0,74.0,93.0,78.0,96.0,88.0,85.0,93.0,96.0,88.0,86.0,89.0,84.0,89.0,94.0,72.0,91.0,93.0,89.0,83.0,96.0,69.0,82.0,86.0,84.0,87.0,66.0,71.0,85.0,85.0,94.0,87.0,75.0,82.0,75.0,82.0,91.0,84.0,77.0,71.0,89.0,77.0,88.0,88.0,71.0,85.0,77.0,81.0,87.0,76.0,76.0,84.0,84.0,88.0,90.0,102.0,95.0,106.0,98.0,86.0,92.0,101.0,71.0,86.0,95.0,88.0,98.0,65.0,86.0,88.0,97.0,98.0,98.0,86.0,87.0,81.0,89.0,101.0,103.0,90.0,76.0,99.0,96.0,93.0,96.0,82.0,94.0,105.0,88.0,109.0,94.0,90.0,87.0,88.0,76.0,74.0,92.0,73.0,100.0,93.0,82.0,82.0,86.0,79.0,86.0,89.0,86.0,91.0,66.0,85.0,86.0,76.0,84.0,84.0,79.0,88.0,78.0,92.0,92.0,94.0,94.0,86.0,90.0,85.0,81.0,85.0,75.0,79.0,92.0,77.0,99.0,89.0,78.0,83.0,76.0,79.0,97.0,81.0,81.0,100.0,91.0,86.0,90.0,87.0,88.0,90.0,102.0,82.0,92.0,84.0,88.0,84.0,93.0,90.0,90.0,89.0,97.0,85.0,93.0,101.0,91.0,106.0,91.0,93.0,93.0,98.0,97.0,71.0,85.0,93.0,83.0,98.0,90.0,86.0,86.0,79.0,85.0,80.0,72.0,74.0,89.0,105.0,81.0,87.0,94.0,78.0,90.0,89.0,77.0,86.0,68.0,80.0,94.0,80.0,86.0,90.0,74.0,85.0,80.0,80.0,96.0,86.0,79.0,69.0,90.0,79.0,80.0,94.0,80.0,87.0,76.0,82.0,95.0,86.0,79.0,83.0,85.0,81.0,82.0,88.0,71.0,106.0,92.0,88.0,90.0,95.0,70.0,79.0,89.0,73.0,88.0,67.0,93.0,105.0,90.0,92.0,93.0,73.0,85.0,87.0,98.0,95.0,92.0,87.0,87.0,97.0,88.0,88.0,105.0,84.0,101.0,81.0,83.0,92.0,94.0,76.0,94.0,81.0,82.0,79.0,77.0,72.0,97.0,93.0,76.0,85.0,83.0,65.0,80.0,89.0,74.0,89.0,63.0,78.0,90.0,78.0,86.0,87.0,76.0,94.0,77.0,88.0,86.0,84.0,84.0,77.0,87.0,80.0,85.0,94.0,78.0,90.0,86.0,86.0,92.0,94.0,88.0,81.0,82.0,92.0,74.0,97.0,80.0,100.0,89.0,84.0,87.0,83.0,74.0,86.0,86.0,79.0,91.0,67.0,95.0,84.0,71.0,93.0,85.0,83.0,88.0,72.0,98.0,83.0,93.0,101.0,77.0,94.0,87.0,86.0,87.0,70.0,93.0,87.0,80.0,93.0,88.0,83.0,84.0,92.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,80.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,77.0,89.0,73.0,82.0,69.0,96.0,87.0,86.0,82.0,94.0,68.0,76.0,85.0,82.0,97.0,65.0,91.0,90.0,85.0,85.0,89.0,75.0,85.0,81.0,87.0,85.0,86.0,78.0,69.0,84.0,86.0,89.0,99.0,86.0,82.0,80.0,87.0,93.0,88.0,82.0,78.0,89.0,82.0,87.0,95.0,90.0,96.0,83.0,76.0,100.0,93.0,68.0,86.0,86.0,86.0,85.0,90.0,84.0,95.0,79.0,87.0,81.0,87.0,84.0,81.0,97.0,85.0,100.0,100.0,86.0,110.0,81.0,99.0,95.0,83.0,83.0,89.0,83.0,93.0,93.0,80.0,82.0,82.0,87.0,76.0,88.0,82.0,93.0,92.0,82.0,87.0,87.0,79.0,84.0,89.0,75.0,92.0,64.0,85.0,87.0,73.0,91.0,88.0,76.0,92.0,77.0,84.0,86.0,85.0,96.0,89.0,99.0,76.0,74.0,91.0,74.0,92.0,83.0,89.0,94.0,81.0,84.0,87.0,87.0,97.0,83.0,80.0,83.0,102.0,103.0,93.0,90.0,103.0,61.0,89.0,90.0,87.0,90.0,64.0,86.0,90.0,81.0,86.0,96.0,94.0,99.0,76.0,96.0,90.0,92.0,86.0,100.0,92.0,94.0,81.0,93.0,80.0,97.0,83.0,80.0,83.0,102.0,103.0,93.0,90.0,86.0,88.0,82.0,82.0,100.0,96.0,93.0,86.0,89.0,79.0,86.0,86.0,76.0,92.0,71.0,81.0,97.0,83.0,83.0,79.0,81.0,97.0,77.0,109.0,84.0,95.0,88.0,69.0,93.0,79.0,81.0,101.0,84.0,81.0,88.0,84.0,93.0,80.0,86.0,83.0,80.0,75.0,82.0,89.0,105.0,100.0,86.0,89.0,88.0,65.0,82.0,92.0,92.0,92.0,65.0,83.0,90.0,78.0,87.0,88.0,81.0,96.0,80.0,89.0,90.0,87.0,89.0,77.0,91.0,86.0,73.0,95.0,85.0,92.0,93.0,85.0,98.0,76.0,78.0,84.0,79.0,90.0,82.0,84.0,106.0,102.0,86.0,91.0,84.0,71.0,76.0,92.0,77.0,86.0,62.0,88.0,89.0,86.0,80.0,89.0,89.0,100.0,77.0,100.0,89.0,98.0,88.0,92.0,102.0,79.0,76.0,101.0,78.0,91.0,77.0,75.0,97.0,90.0,78.0,93.0,91.0,69.0,91.0,68.0,98.0,105.0,81.0,89.0,78.0,70.0,83.0,92.0,83.0,97.0,70.0,87.0,93.0,93.0,90.0,75.0,75.0,84.0,74.0,104.0,84.0,95.0,82.0,94.0,98.0,88.0,97.0,98.0,76.0,93.0,92.0,89.0,105.0,90.0,73.0,89.0,83.0,79.0,79.0,77.0,97.0,87.0,76.0,84.0,93.0,69.0,85.0,87.0,80.0,92.0,67.0,81.0,91.0,83.0,87.0,93.0,80.0,85.0,83.0,93.0,98.0,87.0,97.0,81.0,89.0,78.0,86.0,102.0,77.0,95.0,74.0,86.0,91.0,91.0,78.0,84.0,75.0,77.0,89.0,85.0,105.0,94.0,82.0,100.0,91.0,82.0,82.0,86.0,83.0,92.0,67.0,92.0,82.0,86.0,86.0,97.0,95.0,87.0,72.0,95.0,81.0,93.0,99.0,94.0,93.0,91.0,79.0,99.0,76.0,96.0,76.0,79.0,89.0,85.0,85.0,86.0,81.0,82.0,89.0,72.0,93.0,97.0,84.0,84.0,86.0,63.0,82.0,94.0,81.0,87.0,64.0,78.0,85.0,77.0,83.0,89.0,77.0,82.0,76.0,95.0,88.0,81.0,92.0,92.0,90.0,80.0,88.0,85.0,75.0,80.0,88.0,89.0,93.0,73.0,77.0,80.0,87.0,73.0,99.0,84.0,106.0,94.0,91.0,96.0,89.0,66.0,75.0,90.0,90.0,92.0,65.0,83.0,106.0,80.0,91.0,96.0,99.0,101.0,67.0,89.0,97.0,93.0,85.0,85.0,98.0,89.0,88.0,99.0,87.0,93.0,93.0,83.0,101.0,80.0,83.0,102.0,85.0,76.0,80.0,71.0,88.0,89.0,88.0,84.0,91.0,64.0,85.0,86.0,80.0,90.0,75.0,83.0,82.0,73.0,89.0,84.0,89.0,83.0,68.0,91.0,91.0,81.0,81.0,83.0,97.0,81.0,78.0,96.0,72.0,91.0,75.0,82.0,89.0,77.0,75.0,79.0,88.0,98.0,87.0,79.0,102.0,92.0,91.0,96.0,93.0,70.0,78.0,96.0,89.0,98.0,77.0,86.0,89.0,81.0,89.0,92.0,92.0,89.0,75.0,96.0,88.0,104.0,108.0,90.0,104.0,93.0,84.0,97.0,82.0,84.0,85.0,85.0,97.0,79.0,80.0,86.0,80.0,81.0,95.0,95.0,110.0,96.0,75.0,90.0,89.0,73.0,87.0,91.0,97.0,93.0,80.0,84.0,97.0,79.0,96.0,83.0,90.0,97.0,85.0,87.0,99.0,92.0,99.0,85.0,102.0,91.0,96.0,98.0,78.0,90.0,93.0,89.0,96.0,85.0,81.0,84.0,83.0,73.0,93.0,95.0,101.0,89.0,99.0,92.0,80.0,69.0,88.0,85.0,83.0,104.0,69.0,84.0,91.0,91.0,89.0,78.0,84.0,81.0,72.0,99.0,100.0,103.0,90.0,83.0,91.0,95.0,71.0,92.0,76.0,91.0,85.0,85.0,95.0,90.0,76.0,82.0,71.0,81.0,80.0,75.0,90.0,99.0,76.0,85.0,88.0,68.0,79.0,94.0,75.0,86.0,64.0,86.0,95.0,84.0,88.0,83.0,79.0,82.0,79.0,84.0,89.0,93.0,90.0,80.0,91.0,78.0,92.0,97.0,72.0,78.0,87.0,80.0,96.0,76.0,79.0,74.0,83.0,90.0,88.0,71.0,120.0,101.0,100.0,104.0,98.0,67.0,82.0,89.0,89.0,87.0,99.0,89.0,92.0,89.0,95.0,75.0,101.0,90.0,76.0,100.0,95.0,98.0,77.0,75.0,107.0,80.0,107.0,96.0,76.0,91.0,91.0,90.0,96.0,96.0,84.0,102.0,86.0,80.0,86.0,71.0,92.0,99.0,82.0,87.0,81.0,74.0,79.0,90.0,78.0,91.0,70.0,88.0,82.0,75.0,91.0,94.0,75.0,86.0,68.0,86.0,85.0,92.0,83.0,81.0,98.0,87.0,84.0,98.0,72.0,91.0,84.0,76.0,101.0,87.0,80.0,83.0,90.0,86.0,93.0,76.0,96.0,93.0,85.0,91.0,95.0,76.0,82.0,98.0,91.0,93.0,80.0,78.0,96.0,78.0,93.0,83.0,90.0,87.0,66.0,94.0,86.0,88.0,107.0,81.0,102.0,83.0,78.0,95.0,74.0,85.0,88.0,91.0,93.0,84.0,71.0,79.0,90.0,88.0,85.0,79.0,93.0,88.0,86.0,87.0,84.0,73.0,72.0,91.0,80.0,88.0,64.0,81.0,85.0,80.0,89.0,78.0,76.0,83.0,69.0,96.0,84.0,90.0,88.0,73.0,95.0,79.0,98.0,99.0,93.0,94.0,84.0,80.0,106.0,89.0,86.0,81.0,87.0,87.0,82.0,85.0,97.0,95.0,88.0,92.0,79.0,79.0,78.0,87.0,88.0,87.0,78.0,83.0,89.0,87.0,95.0,86.0,101.0,98.0,74.0,96.0,91.0,101.0,82.0,76.0,100.0,78.0,88.0,91.0,67.0,86.0,98.0,88.0,102.0,92.0,78.0,81.0,83.0,76.0,80.0,68.0,90.0,97.0,85.0,88.0,90.0,69.0,79.0,87.0,79.0,89.0,65.0,79.0,91.0,72.0,89.0,81.0,78.0,82.0,84.0,88.0,91.0,88.0,79.0,72.0,96.0,76.0,81.0,81.0,70.0,75.0,89.0,90.0,99.0,85.0,83.0,90.0,81.0,81.0,100.0,84.0,95.0,99.0,91.0,88.0,81.0,93.0,95.0,88.0,92.0,96.0,72.0,80.0,82.0,94.0,81.0,91.0,87.0,85.0,86.0,90.0,83.0,101.0,97.0,91.0,102.0,79.0,75.0,102.0,74.0,83.0,99.0,87.0,103.0,91.0,70.0,93.0,84.0,82.0,91.0,77.0,103.0,98.0,76.0,86.0,86.0,70.0,87.0,88.0,82.0,85.0,74.0,88.0,97.0,93.0,81.0,78.0,97.0,87.0,80.0,95.0,82.0,84.0,102.0,82.0,95.0,91.0,85.0,98.0,78.0,91.0,80.0,85.0,96.0,89.0,75.0,77.0,82.0,71.0,86.0,73.0,96.0,90.0,80.0,82.0,79.0,68.0,81.0,95.0,77.0,84.0,82.0,77.0,93.0,69.0,100.0,74.0,76.0,85.0,72.0,87.0,99.0,89.0,80.0,73.0,98.0,81.0,79.0,94.0,73.0,90.0,88.0,79.0,88.0,77.0,84.0,83.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,85.0,82.0,77.0,79.0,101.0,92.0,77.0,81.0,90.0,64.0,83.0,95.0,81.0,94.0,64.0,81.0,94.0,80.0,92.0,86.0,86.0,86.0,75.0,92.0,85.0,87.0,87.0,80.0,92.0,82.0,90.0,89.0,68.0,84.0,81.0,87.0,94.0,86.0,80.0,79.0,78.0,74.0,99.0,70.0,104.0,94.0,75.0,89.0,91.0,86.0,88.0,96.0,79.0,95.0,70.0,92.0,102.0,89.0,88.0,93.0,88.0,85.0,84.0,99.0,90.0,100.0,96.0,76.0,96.0,90.0,79.0,93.0,87.0,87.0,79.0,91.0,90.0,82.0,77.0,88.0,83.0,79.0,79.0,77.0,97.0,87.0,76.0,84.0,93.0,69.0,85.0,87.0,80.0,92.0,67.0,81.0,91.0,83.0,87.0,93.0,80.0,85.0,83.0,93.0,98.0,87.0,97.0,81.0,89.0,78.0,86.0,102.0,77.0,95.0,74.0,86.0,91.0,91.0,78.0,84.0,75.0,77.0,89.0,85.0,105.0,94.0,82.0,100.0,91.0,82.0,82.0,86.0,83.0,92.0,67.0,92.0,82.0,86.0,86.0,97.0,95.0,87.0,72.0,95.0,81.0,93.0,99.0,94.0,93.0,91.0,79.0,99.0,76.0,96.0,76.0,79.0,89.0,85.0,85.0,86.0,78.0,74.0,88.0,71.0,99.0,99.0,75.0,87.0,83.0,65.0,77.0,93.0,87.0,87.0,72.0,82.0,91.0,89.0,86.0,93.0,85.0,83.0,79.0,97.0,87.0,89.0,89.0,76.0,92.0,87.0,71.0,101.0,80.0,79.0,89.0,80.0,99.0,93.0,81.0,82.0,87.0,74.0,93.0,89.0,90.0,98.0,81.0,95.0,79.0,65.0,89.0,92.0,77.0,96.0,65.0,88.0,92.0,89.0,92.0,84.0,97.0,86.0,74.0,97.0,83.0,91.0,87.0,75.0,93.0,88.0,91.0,97.0,93.0,83.0,77.0,82.0,88.0,78.0,81.0,96.0,82.0,74.0,81.0,78.0,93.0,99.0,85.0,86.0,101.0,69.0,86.0,92.0,77.0,85.0,70.0,76.0,85.0,76.0,86.0,89.0,75.0,82.0,75.0,91.0,102.0,86.0,92.0,76.0,93.0,86.0,87.0,96.0,74.0,85.0,85.0,83.0,90.0,79.0,81.0,82.0,77.0,82.0,90.0,74.0,106.0,94.0,90.0,89.0,83.0,73.0,92.0,87.0,73.0,97.0,90.0,91.0,89.0,80.0,92.0,95.0,89.0,90.0,81.0,93.0,112.0,89.0,86.0,98.0,97.0,78.0,110.0,98.0,89.0,90.0,78.0,89.0,102.0,85.0,76.0,86.0,84.0,77.0,79.0,74.0,93.0,93.0,81.0,83.0,96.0,67.0,77.0,82.0,78.0,91.0,65.0,86.0,89.0,74.0,83.0,85.0,77.0,89.0,78.0,83.0,86.0,86.0,77.0,75.0,88.0,89.0,82.0,92.0,85.0,82.0,90.0,86.0,92.0,72.0,78.0,81.0,76.0,91.0,78.0,79.0,105.0,97.0,93.0,96.0,93.0,74.0,83.0,87.0,90.0,91.0,82.0,76.0,84.0,86.0,99.0,82.0,85.0,82.0,80.0,92.0,91.0,109.0,79.0,75.0,98.0,79.0,105.0,95.0,87.0,88.0,86.0,96.0,97.0,99.0,77.0,80.0,80.0,87.0,83.0,74.0,97.0,87.0,76.0,89.0,94.0,69.0,86.0,89.0,80.0,88.0,71.0,79.0,98.0,73.0,87.0,85.0,75.0,84.0,79.0,88.0,90.0,84.0,78.0,78.0,105.0,82.0,85.0,91.0,83.0,76.0,82.0,78.0,89.0,91.0,77.0,94.0,77.0,81.0,90.0,92.0,109.0,98.0,77.0,90.0,92.0,86.0,78.0,80.0,84.0,87.0,75.0,86.0,93.0,88.0,98.0,84.0,90.0,86.0,81.0,96.0,87.0,99.0,94.0,77.0,106.0,104.0,88.0,97.0,91.0,81.0,85.0,85.0,106.0,81.0,86.0,84.0,85.0,77.0,83.0,74.0,101.0,89.0,81.0,82.0,92.0,75.0,83.0,85.0,74.0,86.0,65.0,87.0,96.0,79.0,87.0,89.0,75.0,84.0,75.0,92.0,95.0,90.0,91.0,90.0,90.0,79.0,80.0,98.0,71.0,81.0,93.0,84.0,93.0,83.0,86.0,82.0,75.0,95.0,86.0,93.0,92.0,94.0,89.0,92.0,92.0,68.0,82.0,96.0,86.0,89.0,69.0,79.0,94.0,94.0,87.0,84.0,85.0,81.0,85.0,92.0,88.0,104.0,85.0,91.0,102.0,80.0,74.0,96.0,70.0,89.0,86.0,78.0,93.0,91.0,79.0,80.0,85.0,77.0,83.0,74.0,101.0,89.0,81.0,82.0,92.0,75.0,83.0,85.0,74.0,86.0,65.0,87.0,96.0,79.0,87.0,89.0,75.0,84.0,75.0,92.0,95.0,90.0,91.0,90.0,90.0,79.0,80.0,98.0,71.0,81.0,93.0,84.0,93.0,83.0,86.0,82.0,75.0,95.0,86.0,93.0,92.0,94.0,89.0,92.0,92.0,68.0,82.0,96.0,86.0,89.0,69.0,79.0,94.0,94.0,87.0,84.0,85.0,81.0,85.0,92.0,88.0,104.0,85.0,91.0,102.0,80.0,74.0,96.0,70.0,89.0,86.0,78.0,93.0,91.0,79.0,80.0,86.0,74.0,82.0,73.0,94.0,85.0,82.0,82.0,83.0,72.0,81.0,88.0,81.0,85.0,71.0,87.0,89.0,82.0,82.0,93.0,71.0,84.0,77.0,83.0,86.0,85.0,78.0,82.0,87.0,86.0,71.0,92.0,83.0,83.0,81.0,81.0,91.0,93.0,82.0,85.0,75.0,78.0,91.0,81.0,98.0,92.0,80.0,85.0,82.0,70.0,75.0,85.0,92.0,96.0,69.0,73.0,88.0,82.0,86.0,83.0,85.0,82.0,74.0,96.0,104.0,100.0,85.0,75.0,101.0,99.0,75.0,97.0,82.0,91.0,84.0,83.0,90.0,82.0,78.0,90.0,86.0,79.0,95.0,88.0,102.0,96.0,87.0,96.0,96.0,66.0,80.0,92.0,82.0,84.0,70.0,80.0,92.0,92.0,89.0,87.0,74.0,81.0,75.0,82.0,86.0,85.0,89.0,77.0,89.0,80.0,85.0,93.0,77.0,79.0,79.0,82.0,101.0,75.0,75.0,82.0,80.0,83.0,96.0,81.0,104.0,98.0,90.0,94.0,87.0,72.0,93.0,89.0,87.0,97.0,87.0,90.0,102.0,84.0,103.0,86.0,88.0,97.0,68.0,103.0,85.0,88.0,94.0,85.0,97.0,97.0,86.0,103.0,82.0,91.0,78.0,82.0,103.0,78.0,83.0,83.0,97.0,71.0,82.0,84.0,110.0,94.0,91.0,93.0,93.0,68.0,77.0,96.0,84.0,91.0,73.0,78.0,88.0,83.0,102.0,88.0,84.0,91.0,75.0,92.0,103.0,90.0,106.0,75.0,100.0,92.0,78.0,98.0,89.0,95.0,96.0,83.0,107.0,96.0,78.0,87.0,77.0,72.0,84.0,68.0,90.0,94.0,82.0,89.0,89.0,75.0,85.0,85.0,80.0,85.0,64.0,87.0,95.0,79.0,94.0,84.0,93.0,83.0,82.0,85.0,85.0,96.0,92.0,73.0,104.0,88.0,82.0,95.0,87.0,92.0,86.0,86.0,98.0,84.0,80.0,84.0)

g_best <- as.factor(c(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,39,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,41,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,68,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,69,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,90,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114,114))

In [27]:
k = ''
for key in sorted(gkey.keys()):
    if(len(k)):
        k += ','
    k += '"' + gkey[key] + '"'
    if gkey[key] == 'Leverage_centrality_HL':
        print('target best mode centrality:', key)

k = 'k_best <- c(' + k + ')'
print(k)


target best mode centrality: 61
k_best <- c("BottleNeck_centrality_LH","AA_random","Alpha_HL","Alpha_LH","Average_distance_HL","Average_distance_LH","Barycenter_centrality_HL","Barycenter_centrality_LH","Betweenness_HL","Betweenness_LH","BottleNeck_centrality_HL","Bridging_centrality_HL","Bridging_centrality_LH","Centroid_centrality_HL","Centroid_centrality_LH","Closeness_Freeman_HL","Closeness_Freeman_LH","Closeness_VariantLatora_HL","Closeness_VariantLatora_LH","ClusterRank_HL","ClusterRank_LH","Communicability_betweenness_centrality_HL","Communicability_betweenness_centrality_LH","Community_centrality_HL","Community_centrality_LH","Core_decomposition_HL","Core_decomposition_LH","Cross_clique_centrality_LH","Cross_clique_connectivity_HL","Current_flow_closeness_centrality_HL","Current_flow_closeness_centrality_LH","Dangalchev_closeness_centrality_HL","Dangalchev_closeness_centrality_LH","Decay_centrality_HL","Decay_centrality_LH","Degree_centrality_HL","Degree_centrality_LH","Diffusion_degree_HL","Diffusion_degree_LH","DMNC_centrality_HL","DMNC_centrality_LH","Eccentricity_HL","Eccentricity_LH","Effectiveness_centrality_HL","Effectiveness_centrality_LH","Eigenvector_HL","Eigenvector_LH","Entropy_centrality_HL","Entropy_centrality_LH","EPC_HL","EPC_LH","Flow_betweenness_centrality_HL","Flow_betweenness_centrality_LH","Information_centrality_HL","Information_centrality_LH","Kleinbergs_centrality_HITS_HL","Kleinbergs_centrality_HITS_LH","LAC_HL","LAC_LH","Lapacian_centrality_HL","Lapacian_centrality_LH","Leverage_centrality_HL","Leverage_centrality_LH","Lin_centrality_HL","Lin_centrality_LH","Load_centrality_HL","Load_centrality_LH","Lobby_index_HL","Lobby_index_LH","Local_assortativity_HL","Local_assortativity_LH","Local_clustering_coefficients_HL","Local_clustering_coefficients_LH","Markov_centrality_HL","Markov_centrality_LH","MCC_centrality_HL","MCC_centrality_LH","MNC_centrality_HL","MNC_centrality_LH","Neighborhood_connectivity_HL","Neighborhood_connectivity_LH","Network_centrality_HL","Network_centrality_LH","Network_fragmentation_GeodesicDistanceWeighted_HL","Network_fragmentation_GeodesicDistanceWeighted_LH","Network_fragmentation_HL","Network_fragmentation_LH","Path_centrality_HL","Path_centrality_LH","Political_independence_index_HL","Political_independence_index_LH","Radiality_centrality_HL","Radiality_centrality_LH","Random_walk_betweenness_HL","Random_walk_betweenness_LH","Random_walk_closeness_HL","Random_walk_closeness_LH","SALSA_HL","SALSA_LH","Semi_local_centrality_HL","Semi_local_centrality_LH","Shortest_path_betweenness_HL","Shortest_path_betweenness_LH","Shortest_path_closeness_HL","Shortest_path_closeness_LH","Shortest_path_degree_HL","Shortest_path_degree_LH","Strength_weighted_vertex_degree_HL","Strength_weighted_vertex_degree_LH","Stress_centrality_HL","Stress_centrality_LH","Subgraph_HL","Subgraph_LH","Topological_coefficient_HL","Topological_coefficient_LH")

In [28]:
# Kendall Tau code

#edges cut, TCV, modularity and loneliness

metricList = []
metricList.append([])  # edges cut
metricList.append([])  # edges cut
metricList.append([])  # edges cut
metricList.append([])  # edges cut


for i_key, key in enumerate(list(centralities.keys())):        
    centrality = centralities[key]
    
    # edges
    modescore = centrality.modeScores[2].split(':')
    metric = float(modescore[0]) # value of mode of edges cut
    metricList[0].append(metric)

    # TCV
    modescore = centrality.modeScores[3].split(':')
    metric = float(modescore[0]) # value of mode of edges cut
    metricList[1].append(metric)    
    
    # Qds
    modescore = centrality.modeScores[4].split(':')
    metric = float(modescore[0]) # value of mode of edges cut
    metricList[2].append(metric)
    
    # loneliness
    modescore = centrality.modeScores[5].split(':')
    metric = float(modescore[0]) # value of mode of edges cut
    metricList[3].append(metric)

taus = []
pvals = []

from scipy.stats import kendalltau

for i in range(0, 4):
    taus.append([])
    pvals.append([])
    for e in range(0, 4):
        tau, pvalue = kendalltau(metricList[i], metricList[e])
        #print(i, e, tau, pvalue)
        taus[i].append(tau)
        pvals[i].append(pvalue)


1.0	0.179643894705	-0.116625988565	0.137879900366
0.179643894705	1.0	-0.0582464256174	0.0155607029261
-0.116625988565	-0.0582464256174	1.0	-0.000613779411357
0.137879900366	0.0155607029261	-0.000613779411357	1.0

In [30]:
for row in taus:
    s = ''
    for item in row:
        if len(s) > 0:
            s += "\t"
        s += str(item)
    print(s)
    
for row in pvals:
    s = ''
    for item in row:
        if len(s) > 0:
            s += "\t"
        s += str(item)
    print(s)


1.0	0.179643894705	-0.116625988565	0.137879900366
0.179643894705	1.0	-0.0582464256174	0.0155607029261
-0.116625988565	-0.0582464256174	1.0	-0.000613779411357
0.137879900366	0.0155607029261	-0.000613779411357	1.0
1.53871593044e-56	0.00442255427753	0.0646226393117	0.0289171943463
0.00442255427753	1.53871593044e-56	0.356072286027	0.805257442494
0.0646226393117	0.356072286027	1.53871593044e-56	0.992240759871
0.0289171943463	0.805257442494	0.992240759871	1.53871593044e-56

In [ ]:
# metrics important = 
import matplotlib.pyplot as plt

def findBin(value, edges):
    previousEdge = 0.0
    for i, edge in enumerate(edges):
        if i != 0:
            if value >= previousEdge and value < edge:
                return i
        previousEdge = edge
    return len(edges)

def binScore(bin_id, num_bins, low_is_max = True):
    half = int(num_bins * 0.5)
    diff = 0
    if(low_is_max):
        if(bin_id > half):
            return 0
        diff = (half + 1 - bin_id)
    else:
        if(bin_id <= half):
            return 0
        diff = bin_id - half
    return diff * 25 / half
            
        
stats_metrics = [2, 3, 4, 5] # "EDGES CUT", "TOTAL COMM VOLUME", "MODULARITY", "LONELINESS"
max_to_low_metrics = [4]

means = {}
stds = {}
skews = {}

means_hist = {}
means_binedges = {}
stds_hist = {}
stds_binedges = {}
skews_hist = {}
skews_binedges = {}



for stat in stats_metrics:
    means[stat] = []
    stds[stat] = []
    skews[stat] = []
    means_hist[stat] = {}
    means_binedges[stat] = {}
    stds_hist[stat] = {}
    stds_binedges[stat] = {}
    skews_hist[stat] = {}
    skews_binedges[stat] = {}


for key in list(centralities.keys()):
    centrality = centralities[key]
    centralityCode = centrality.centralityType + ":" + centrality.orderType
    
    centrality.loadScores()
    centrality.computeStatsScore()
    
    for smetric in stats_metrics:
        means[smetric].append(centrality.avgScores[smetric])
        stds[smetric].append(centrality.stdScores[smetric])
        skews[smetric].append(centrality.skewnessScores[smetric])


        
for stat in stats_metrics:
    means[stat] = np.array(means[stat])
    stds[stat] = np.array(stds[stat])
    skews[stat] = np.array(skews[stat])
    
    means_hist[stat], means_binedges[stat] = np.histogram(means[stat], bins='auto')
    stds_hist[stat], stds_binedges[stat] = np.histogram(stds[stat], bins='auto')
    skews_hist[stat], skews_binedges[stat] = np.histogram(skews[stat], bins='auto')

rank = {}

for key in list(centralities.keys()):
    # compute scores for this statistic for each centrality
    centrality = centralities[key]

    centrality.totalScore = 0.0

    for smetric in stats_metrics:
        mu = centrality.avgScores[smetric]
        std = centrality.stdScores[smetric]
        skew = centrality.skewnessScores[smetric]

        mean_bins = len(means_hist[stat])
        std_bins = len(stds_hist[stat])
        skew_bins = len(skews_hist[stat])

        low_is_max = True
        if smetric in max_to_low_metrics:
            low_is_max = False

        # mean should either be max or min
        mu_score = binScore(findBin(mu, means_binedges[smetric]), mean_bins, low_is_max)
        # std score should always be minimized
        std_score = binScore(findBin(std, stds_binedges[smetric]), std_bins)
        skew_score = binScore(findBin(skew, skews_binedges[smetric]), skew_bins, low_is_max)
        
        # override skew score
        skew_score = 0.0
        
        centrality.totalScore += mu_score + std_score + skew_score
    print(centrality.centralityType + ":" + centrality.orderType, centrality.totalScore)
    if centrality.totalScore in rank:
        rank[centrality.totalScore].append(centrality.centralityType + ":" + centrality.orderType)
    else:
        rank[centrality.totalScore] = [centrality.centralityType + ":" + centrality.orderType]

count = 0
for key in sorted(rank, reverse=True):
    for item in rank[key]:
        count += 1
        print(count, key, item)
        if count == 20:
            print("===================")
        
for stat in stats_metrics:
    break
    fig = plt.figure()
    
    ax1 = fig.add_subplot(1,3,1)
    ax1.hist(means[stat])

    ax2 = fig.add_subplot(1,3,2)
    ax2.hist(stds[stat])

    ax3 = fig.add_subplot(1,3,3)
    ax3.hist(skews[stat])


    plt.show()
#plt.show()

In [ ]:
randomCentrality = centralities["AA_random"]

mu_stats = {}
std_stats = {}

for stat in stats_metrics:
    mu_stats[stat] = np.mean(means[stat])
    std_stats[stat] = np.std(means[stat])
    
# z - scores
for key in list(centralities.keys()):
    # compute scores for this statistic for each centrality
    centrality = centralities[key]
    if key == "AA_random":
        continue
    
    # compute z-score
    stats_metrics = [2, 3, 4, 5] # "EDGES CUT", "TOTAL COMM VOLUME", "MODULARITY", "LONELINESS"

    print("Z-SCORE: ", centrality.centralityType + ":" + centrality.orderType)
    
    for stat in stats_metrics:
        x = centrality.avgScores[stat]
        
        zscore = (x - mu_stats[stat]) / std_stats[stat]
        tabs = "\t\t"
        if stat == 3:
            tabs = "\t"
        print("   ", cols[stat], tabs, "{0:.5f}".format(zscore), "\t", "{0:.5f}".format(sstats.norm.cdf(zscore)))

In [ ]: