In [4]:
%run compare_nmi.py
density: 0
******************************
update_rule
Step: 55
Best Cost: 0.00477062597656
Time: 2.286550998687744
Loss: [[ 2436.70269724]]
NMI: 0.413386168644
******************************
abs_adam
Step: 226
Best Cost: 0.00477118554687
Time: 2.481387138366699
Loss: [[ 2445.58704744]]
NMI: 0.406849718649
density: 0
******************************
update_rule
Step: 104
Best Cost: 0.00479587304688
Time: 2.1448729038238525
Loss: [[ 2591.05967952]]
NMI: 0.283783215211
******************************
abs_adam
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
/Users/nuku02/python/sscomdetection/compare_nmi.py in <module>()
104 start = time.time()
105 W, H, best_cost, cost_list, H_list = model.fit_and_transform(edge_list, const,
--> 106 threshold=threshold, steps=max_iters)
107 elapsed = time.time() - start
108 loss = calculate_loss(edge_list, W, H, mlambda, const)
/Users/nuku02/python/sscomdetection/sscd.py in fit_and_transform(self, edge_list, const_pairs, weights, const_weights, steps, log_dir, threshold)
55 for s in range(steps):
56 cost, sm, _ = self.sess.run([self.cost, self.summary, self.opt])
---> 57 sup_term = self.sess.run(self.sup_term)
58 self.writer.add_summary(sm, s)
59 mean_cost = cost / (n_nodes * n_nodes)
/Users/nuku02/.pyenv/versions/3.5.2/lib/python3.5/site-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
764 try:
765 result = self._run(None, fetches, feed_dict, options_ptr,
--> 766 run_metadata_ptr)
767 if run_metadata:
768 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
/Users/nuku02/.pyenv/versions/3.5.2/lib/python3.5/site-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
962 if final_fetches or final_targets:
963 results = self._do_run(handle, final_targets, final_fetches,
--> 964 feed_dict_string, options, run_metadata)
965 else:
966 results = []
/Users/nuku02/.pyenv/versions/3.5.2/lib/python3.5/site-packages/tensorflow/python/client/session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
1012 if handle is None:
1013 return self._do_call(_run_fn, self._session, feed_dict, fetch_list,
-> 1014 target_list, options, run_metadata)
1015 else:
1016 return self._do_call(_prun_fn, self._session, handle, feed_dict,
/Users/nuku02/.pyenv/versions/3.5.2/lib/python3.5/site-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1019 def _do_call(self, fn, *args):
1020 try:
-> 1021 return fn(*args)
1022 except errors.OpError as e:
1023 message = compat.as_text(e.message)
/Users/nuku02/.pyenv/versions/3.5.2/lib/python3.5/site-packages/tensorflow/python/client/session.py in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
1001 return tf_session.TF_Run(session, options,
1002 feed_dict, fetch_list, target_list,
-> 1003 status, run_metadata)
1004
1005 def _prun_fn(session, handle, feed_dict, fetch_list):
KeyboardInterrupt:
In [6]:
elist = np.array(edge_list)
In [8]:
elist.max()
Out[8]:
499
In [9]:
degrees = []
for i in range(500):
degrees.append((elist == i).sum())
In [10]:
degrees
Out[10]:
[4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
4,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
10,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
14,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
16,
18,
18,
18,
18,
18,
18,
18,
18,
18,
18,
20,
20,
20,
20,
20,
20,
20,
20,
22,
22,
22,
22,
22,
22,
22,
22,
24,
24,
24,
24,
24,
24,
26,
26,
26,
26,
28,
28,
28,
28,
28,
28,
30,
30,
30,
32,
32,
32,
32,
34,
34,
34,
34,
36,
36,
36,
36,
38,
38,
40,
40,
42,
44,
46,
46,
48,
58,
58,
64,
66,
68,
68,
78,
80,
82,
86,
90,
94]
In [12]:
const = pd.read_pickle("data/const/diff_degree_LRF_500_5_50_100_100_0.3_0.3.pkl")
In [15]:
for i, j in const:
print(degrees[i], degrees[j])
4 80
4 66
4 46
4 36
4 36
4 28
4 24
4 24
4 18
4 16
4 16
4 16
4 16
4 16
4 14
4 78
4 64
4 58
4 40
4 38
4 32
4 28
4 28
4 24
4 20
4 20
4 18
4 18
4 18
4 16
4 48
4 42
4 38
4 30
4 28
4 26
4 26
4 24
4 22
4 22
4 20
4 20
4 20
4 18
4 18
4 90
4 86
4 82
4 58
4 46
4 36
4 34
4 32
4 32
4 26
4 26
4 24
4 22
4 22
4 22
4 94
4 68
4 68
4 44
4 40
4 36
4 34
4 34
4 34
4 32
4 30
4 30
4 28
4 28
4 24
In [ ]:
Content source: nukui-s/sscomdetection
Similar notebooks: