In [1]:
# 1_network_df
import networkx as nx
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline
import os
from glob import glob
plt.style.use('ggplot')
pd.set_option('display.width', 5000)
pd.set_option('display.max_columns', 60)
gml_files = glob('../output/network/article_neg1.gml')
In [2]:
def calculate_graph_inf(graph):
graph.name = filename
info = nx.info(graph)
print info
#plt.figure(figsize=(10,10))
#nx.draw_spring(graph, arrows=True, with_labels=True)
def highest_centrality(cent_dict):
"""Returns a tuple (node,value) with the node
with largest value from centrality dictionary."""
# create ordered tuple of centrality data
cent_items = [(b,a) for (a,b) in cent_dict.iteritems()]
# sort in descending order
cent_items.sort()
cent_items.reverse()
return tuple(reversed(cent_items[0]))
In [4]:
# graph = directed, ugraph = undirected
for graph_num, gml_graph in enumerate(gml_files):
dgraph = nx.read_gml(gml_graph)
graph = dgraph.to_undirected() # to directed graph
#U = graph.to_undirected(reciprocal=True)
#e = U.edges()
#ugraph.add_edges_from(e)
(filepath, filename) = os.path.split(gml_graph)
print('-' * 10)
print(gml_graph)
calculate_graph_inf(graph)
calculate_graph_inf(dgraph)
In [ ]:
In [ ]:
In [ ]:
network_data_columns = ['name',
'sentiment',
'# nodes',
'# edges',
'avg deg',
'avg in-deg',
'avg out-deg',
'density',
'deg assort coef',
'avg deg cent',
'avg bet cent',
'avg clo cent',
'high deg cent',
'high bet cent',
'high clo cent',
'avg node conn',
'# conn comp',
'gc size',
'# strong comp',
'# weak comp'
]
network_data = pd.DataFrame(columns = network_data_columns)
In [ ]:
# graph = directed, ugraph = undirected
for graph_num, gml_graph in enumerate(gml_files):
graph = nx.read_gml(gml_graph)
ugraph = graph.to_undirected() # to undirected graph
#U = graph.to_undirected(reciprocal=True)
#e = U.edges()
#ugraph.add_edges_from(e)
(filepath, filename) = os.path.split(gml_graph)
print('-' * 10)
print(gml_graph)
calculate_graph_inf(graph)
calculate_graph_inf(ugraph)
# calculate variables
#sent = filepath.split('/')[-1]
sent = "negative"
nodes = nx.number_of_nodes(graph)
edges = nx.number_of_edges(graph)
density = float("{0:.4f}".format(nx.density(graph)))
avg_deg_cen = np.array(nx.degree_centrality(graph).values()).mean()
avg_bet_cen = np.array(nx.betweenness_centrality(graph).values()).mean()
avg_clo_cen = np.array(nx.closeness_centrality(graph).values()).mean()
in_deg = sum(graph.in_degree().values())/float(nx.number_of_nodes(graph))
out_deg = sum(graph.out_degree().values())/float(nx.number_of_nodes(graph))
avg_deg = float("{0:.4f}".format(in_deg + out_deg))
strong_comp = nx.number_strongly_connected_components(graph)
weak_comp = nx.number_weakly_connected_components(graph)
avg_node_con = float("{0:.4f}".format((nx.average_node_connectivity(graph))))
deg_assort_coeff = float("{0:.4f}".format((nx.degree_assortativity_coefficient(graph))))
conn_comp = nx.number_connected_components(ugraph)
deg_cen = nx.degree_centrality(graph)
bet_cen = nx.betweenness_centrality(graph)
clo_cen = nx.closeness_centrality(graph)
highest_deg_cen = highest_centrality(deg_cen)
highest_bet_cen = highest_centrality(bet_cen)
highest_clo_cen = highest_centrality(clo_cen)
Gc = len(max(nx.connected_component_subgraphs(ugraph), key=len))
# save variables into list
graph_values = {'name':filename,
'sentiment':sent,
'# nodes':nodes,
'# edges':edges,
'avg deg':avg_deg,
'avg in-deg':"%.4f" % in_deg,
'avg out-deg':"%.4f" % out_deg,
'density':density,
'deg assort coef':deg_assort_coeff,
'avg deg cent':"%.4f" % avg_deg_cen,
'avg bet cent':"%.4f" % avg_bet_cen,
'avg clo cent':"%.4f" % avg_clo_cen,
'high deg cent':highest_deg_cen,
'high bet cent':highest_bet_cen,
'high clo cent':highest_clo_cen,
'avg node conn':avg_node_con,
'# conn comp':conn_comp,
'gc size':Gc,
'# strong comp':strong_comp,
'# weak comp':weak_comp
}
network_data = network_data.append(graph_values, ignore_index=True)
In [ ]:
network_data
network_data.to_csv('../output/df/network_df_negative.csv', encoding = 'utf-8')
In [ ]:
# 2_node_df
data_columns = ['name',
'sentiment'
]
data = pd.DataFrame(columns = data_columns)
combined_df = pd.DataFrame()
In [ ]:
# graph = directed, ugraph = undirected
for graph_num, gml_graph in enumerate(gml_files):
print('-' * 10)
print(gml_graph)
calculate_graph_inf(graph)
calculate_graph_inf(ugraph)
# calculate variables and save into list
#sent = filepath.split('/')[-1]
deg_cent = nx.degree_centrality(graph)
bet_cent = nx.betweenness_centrality(graph)
clo_cent = nx.closeness_centrality(graph)
graph_values = {'name':filename,
'sentiment':sent
}
data = data.append(graph_values, ignore_index=True)
degree = nx.degree(graph)
deg_df = pd.DataFrame.from_dict(degree, orient = 'index')
deg_df.columns = ['degree']
# degree centrality
deg_cent = nx.degree_centrality(graph)
dc_df = pd.DataFrame.from_dict(deg_cent, orient = 'index')
dc_df.columns = ['deg cent']
# betweenness centrality
bet_cent = nx.betweenness_centrality(graph)
bc_df = pd.DataFrame.from_dict(bet_cent, orient = 'index')
bc_df.columns = ['bet cent']
# closeness centrality
clo_cent = nx.closeness_centrality(graph)
cc_df = pd.DataFrame.from_dict(clo_cent, orient = 'index')
cc_df.columns = ['clo cent']
# concat node frames into node_df
frames = [deg_df, dc_df, bc_df, cc_df]
node_df = pd.concat(frames, axis = 1)
node_df.index.name = 'node'
node_df = node_df.reset_index()
values = pd.DataFrame(graph_values, columns = ('name', 'sentiment'), index = [0])
# df = merges graph_values with node_df for single graph and fill NaNs
df = pd.concat([values, node_df], axis = 1)
df = df.fillna(method='ffill')
combined_df = combined_df.append(df)
In [ ]:
combined_df
combined_df.to_csv('../output/df/nodes_df_negative.csv')
In [ ]:
# 7_negative_graph_calculation
def drawIt(graph, what = 'graph'):
nsize = graph.number_of_nodes()
print "Drawing %s of size %s:" % (what, nsize)
if nsize > 20:
plt.figure(figsize=(10, 10))
if nsize > 40:
nx.draw_spring(graph, with_labels = True, node_size = 70, font_size = 12)
else:
nx.draw_spring(graph, with_labels = True)
else:
nx.draw_spring(graph, with_labels = True)
plt.show()
def describeGraph(graph):
components = sorted(nx.connected_components(graph), key = len, reverse = True)
cc = [len(c) for c in components]
subgraphs = list(nx.connected_component_subgraphs(graph))
params = (graph.number_of_edges(),graph.number_of_nodes(),len(cc))
print "Graph has %s nodes, %s edges, %s connected components\n" % params
drawIt(graph)
for sub in components:
drawIt(graph.subgraph(sub), what = 'component')
In [ ]:
# list of connected components by size
connected_components = [len(c) for c in sorted(nx.connected_components(ugraph), key=len, reverse=True)]
# generate connected components as subgraphs
subgraphs = list(nx.connected_component_subgraphs(ugraph))
# greatest component
Gc = max(nx.connected_component_subgraphs(ugraph), key=len)
# returns all minimum k cutsets of an undirected graph
# i.e., the set(s) of nodes of cardinality equal to the node connectivity of G
# thus if removed, would break G into two or more connected components
cutsets = list(nx.all_node_cuts(Gc))
print "Connected components =", connected_components
print "Greatest component size =", len(Gc)
print "# of cutsets =", len(cutsets)
# returns a set of nodes or edges of minimum cardinality that disconnects G
min_ncut = nx.minimum_node_cut(Gc)
min_ecut = nx.minimum_edge_cut(Gc)
print "Min node cut =", min_ncut
print "Min edge cut =", min_ecut
In [ ]:
# min cuts with source and target
print nx.minimum_node_cut(Gc, s='vaccines', t='autism')
print nx.minimum_edge_cut(Gc, s='vaccines', t='autism')
In [ ]:
# read edge labels in min cut for Gc
# change source and target
a = nx.minimum_edge_cut(Gc, s='vaccines', t='autism')
#a = nx.minimum_edge_cut(Gc)
labels = nx.get_edge_attributes(Gc,'edge')
edgelabels = {}
for e in labels.keys():
e1 = e[0:2]
edgelabels[e1]=labels[e]
for e in a:
if edgelabels.has_key(e):
print e,edgelabels[e]
else:
rev_e = e[::-1]
print rev_e, edgelabels[rev_e]
In [ ]:
# this takes forever
# average connectivity k of a graph G is the average of local node connectivity over all pairs of nodes of G
#nx.average_node_connectivity(Gc)
In [ ]:
# 8_single_calc_neg
#describeGraph(ugraph)
In [ ]:
# degree histogram: returns a list of frequencies of degrees
print "Degree histogram:", nx.degree_histogram(ugraph)
In [ ]:
# degree rank plot (undirected)
degree_sequence = sorted(nx.degree(ugraph).values(),reverse=True) # degree sequence
print "Degree sequence", degree_sequence
dmax = max(degree_sequence)
plt.loglog(degree_sequence,'b-', marker = 'o')
plt.title("Degree rank plot")
plt.ylabel("degree")
plt.xlabel("rank")
# draw graph in inset
plt.axes([0.45,0.45,0.45,0.45])
Gcc = sorted(nx.connected_component_subgraphs(ugraph), key = len, reverse = True)[0]
pos = nx.spring_layout(Gcc)
plt.axis('off')
nx.draw_networkx_nodes(Gcc, pos, node_size = 20)
nx.draw_networkx_edges(Gcc, pos, alpha = 0.4)
plt.show()
In [ ]:
# degree centrality
dc = nx.degree_centrality(graph)
dc_df = pd.DataFrame.from_dict(dc, orient = 'index')
dc_df.columns = ['degree cent']
dc_df = dc_df.sort_values(by = ['degree cent'])
#dc_df
# betweenness centrality
bc = nx.betweenness_centrality(graph)
bc_df = pd.DataFrame.from_dict(bc, orient = 'index')
bc_df.columns = ['betweenness cent']
bc_df = bc_df.sort_values(by = ['betweenness cent'])
#bc_df
# closeness centrality
cc = nx.closeness_centrality(graph)
cc_df = pd.DataFrame.from_dict(cc, orient = 'index')
cc_df.columns = ['closeness cent']
cc_df = cc_df.sort_values(by = ['closeness cent'])
#cc_df
In [ ]:
# SUMMARY
#network_data
#combined_df
#dc_df, bc_df, cc_df
print "Degree histogram:", nx.degree_histogram(graph)
print "Connected components =", connected_components
print "Greatest component size =", len(Gc)
#describeGraph(ugraph)
print "# of cutsets =", len(cutsets)
print "Min node cut =", min_ncut
print "Min edge cut =", min_ecut
In [ ]: