In [5]:
from scipy.stats import binom
import sdm as sdmlib
import matplotlib.pyplot as plt
In [99]:
_phi_fn_cache = {}
def phi_fn(n, H, r, d, steps=500):
key = (n, H, r, d, steps)
if key in _phi_fn_cache:
return _phi_fn_cache[key]
v = []
for _ in range(steps):
bs1 = sdmlib.Bitstring.init_random(n)
bs2 = bs1.copy()
bs2.flip_random_bits(d)
selected1 = address_space.scan_opencl2(bs1, r)
selected2 = address_space.scan_opencl2(bs2, r)
x = len(set(selected1) & set(selected2))
v.append(x)
mu = 1.0*sum(v)/len(v)
_phi_fn_cache[key] = mu
return mu
In [100]:
n = 1000
sample = 1000000
r = 451
address_space = sdmlib.AddressSpace.init_random(n, sample)
address_space.opencl_init()
Out[100]:
In [101]:
training_set = 200
training_noise = 0.15
training_value = training_set*(1-2*training_noise)
In [102]:
training_shared
Out[102]:
In [103]:
activated_hls = phi_fn(n, sample, r, 0, steps=200)
In [104]:
activated_hls = int(activated_hls+0.5)
In [110]:
x_values = range(40, 51)
y_values = []
for x in x_values:
dist = int(x*n/100.0)
shared = phi_fn(n, sample, r, dist, steps=250)
shared = int(shared+0.5)
print(x, training_value, shared, training_value*shared, activated_hls - shared)
y = binom.cdf(training_value*shared, activated_hls - shared, 0.5)
y_values.append(y)
In [111]:
plt.figure(figsize=(6, 6), dpi=300)
plt.plot(x_values, y_values)
plt.grid()
In [21]:
y_values
Out[21]:
In [ ]: