In [2]:
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import itertools as it
import matplotlib.gridspec as gridspec
import scipy as sp
from scipy.spatial.distance import euclidean
from datetime import datetime
%matplotlib inline
In [3]:
experiment_start = datetime(2015, 12, 23, 14, 48, 0)
experiment_end = datetime(2015, 12, 23, 16, 16, 59)
In [4]:
data_columns = ['tracker_id', 'dB', 'year', 'month', 'day', 'hour', 'minute', 'second']
def as_datetime(x):
return datetime(x.year, x.month, x.day, x.hour, x.minute, x.second)
def read_data(handle):
df = pd.read_csv(handle, header=None)
df.columns = data_columns
df['date'] = df.apply(lambda x:as_datetime(x), axis=1)
df = df[(df['date'] >= experiment_start) & (df['date'] <= experiment_end)]
return df
def get_id(head, tail):
"""
Head: the first two letters of the tracker ID.
Tail: the last two letters of the tracker ID.
Both must be strings.
"""
for t in list(tracker_ids):
header = t.split(':')[0]
tailer = t.split(':')[-1]
if header == head and tailer == tail:
return t
break
bt1 = read_data('bt1.csv')
bt2 = read_data('bt2.csv')
bt3 = read_data('bt3.csv')
bt4 = read_data('bt4.csv')
bt5 = read_data('bt5.csv')
bt6 = read_data('bt6.csv')
print(len(bt1), len(bt2), len(bt3), len(bt4), len(bt5), len(bt6))
In [5]:
tracker_ids = set().union(bt1.tracker_id).union(bt2.tracker_id).union(bt3.tracker_id).union(bt4.tracker_id).union(bt5.tracker_id).union(bt6.tracker_id)
tracker_ids, len(tracker_ids)
Out[5]:
In [6]:
coords = dict()
coords['BT1'] = (49, 0) # x, y
coords['BT2'] = (-49, 0)
coords['BT3'] = (0, 49)
coords['BT4'] = (0, -1) # at x=0, y=-1
coords['BT5'] = (0, -1)
coords['BT6'] = (0, -1)
coords[get_id('F4', '06')] = (0, 0)
coords[get_id('F4', '37')] = (6, 0)
coords[get_id('68', 'DB')] = (12, 0)
coords[get_id('F4', '8C')] = (24, 0)
coords[get_id('F4', '22')] = (48, 0)
coords[get_id('F4', '1B')] = (0, 12)
coords[get_id('F4', 'EE')] = (0, 24)
coords[get_id('68', '03')] = (0, 36)
coords[get_id('68', 'FD')] = (0, 48)
In [7]:
bt1_ids = set(bt1.tracker_id)
bt1_ids
Out[7]:
In [8]:
fig = plt.figure(figsize=(9,6))
gs = gridspec.GridSpec(2,3)
ax1 = fig.add_subplot(gs[0,0])
ax2 = fig.add_subplot(gs[0,1])
ax3 = fig.add_subplot(gs[0,2])
ax4 = fig.add_subplot(gs[1,0])
ax5 = fig.add_subplot(gs[1,1])
ax6 = fig.add_subplot(gs[1,2])
axes = fig.get_axes()
bt1['distance'] = bt1.apply(lambda x: euclidean(coords['BT1'], coords[x.tracker_id]) if x.tracker_id in coords.keys() else None, axis=1)
bt2['distance'] = bt2.apply(lambda x: euclidean(coords['BT2'], coords[x.tracker_id]) if x.tracker_id in coords.keys() else None, axis=1)
bt3['distance'] = bt3.apply(lambda x: euclidean(coords['BT3'], coords[x.tracker_id]) if x.tracker_id in coords.keys() else None, axis=1)
bt4['distance'] = bt4.apply(lambda x: euclidean(coords['BT4'], coords[x.tracker_id]) if x.tracker_id in coords.keys() else None, axis=1)
bt5['distance'] = bt5.apply(lambda x: euclidean(coords['BT5'], coords[x.tracker_id]) if x.tracker_id in coords.keys() else None, axis=1)
bt6['distance'] = bt6.apply(lambda x: euclidean(coords['BT6'], coords[x.tracker_id]) if x.tracker_id in coords.keys() else None, axis=1)
bt1.dropna().plot(x='distance', y='dB', kind='scatter', title='BT1', ax=ax1)
bt2.dropna().plot(x='distance', y='dB', kind='scatter', title='BT2', ax=ax2)
bt3.dropna().plot(x='distance', y='dB', kind='scatter', title='BT3', ax=ax3)
bt4.dropna().plot(x='distance', y='dB', kind='scatter', title='BT4', ax=ax4)
bt5.dropna().plot(x='distance', y='dB', kind='scatter', title='BT5', ax=ax5)
bt6.dropna().plot(x='distance', y='dB', kind='scatter', title='BT6', ax=ax6)
for ax in axes:
ax.set_xlim(-10, 100)
ax.set_ylim(-100, -50)
plt.tight_layout()
In [9]:
def plot_rolling_average_signal_vs_time(df, name, window=30):
"""
df: the dataframe containing the data
name: the name of the base station. should match.
"""
tracker_ids = sorted([x for x in set(df.tracker_id) if x in coords.keys()], key=lambda x:euclidean(coords[name], coords[x]))
figheight = 3
figwidth = 3 * len(tracker_ids)
fig = plt.figure(figsize=(figwidth, figheight))
gs = gridspec.GridSpec(1, len(tracker_ids))
for i, t in enumerate(tracker_ids):
ax = fig.add_subplot(gs[0, i])
distance = euclidean(coords[name], coords[t])
pd.rolling_mean(df[df.tracker_id == t].set_index('date')['dB'], window=window, center=True)\
.plot(marker='o', ax=ax, title='{1}\n({0} ft)'.format(distance, t))
for ax in fig.get_axes():
ax.set_xlim(experiment_start, experiment_end)
ax.set_ylim(-100, -60)
bts = [(bt1, 'BT1'),
(bt2, 'BT2'),
(bt3, 'BT3'),
(bt4, 'BT4'),
(bt5, 'BT5'),
(bt6, 'BT6')]
for (df, name) in bts:
plot_rolling_average_signal_vs_time(df, name)
plt.savefig('plots/{0}rolling_avg_signal_vs_time.pdf'.format(name), bbox_inches='tight')
# plot_rolling_average_signal_vs_time(bt1, 'BT1')
# plot_rolling_average_signal_vs_time(bt2, 'BT2')
# plot_rolling_average_signal_vs_time(bt3, 'BT3')
# plot_rolling_average_signal_vs_time(bt4, 'BT4')
# plot_rolling_average_signal_vs_time(bt5, 'BT5')
# plot_rolling_average_signal_vs_time(bt6, 'BT6')
In [10]:
id1 = get_id('F4', 'D2')
# mobile1 = pd.DataFrame()
# mobile1['BT1'] = bt1[bt1.tracker_id == id1].set_index('date')['dB']
# mobile1['BT2'] = bt2[bt2.tracker_id == id1].set_index('date')['dB']
# mobile1
def plot_rolling_mean_mobile_trackers(tracker_id):
fig = plt.figure(figsize=(9,12))
gs = gridspec.GridSpec(6,1)
for i, df in enumerate([bt1, bt2, bt3, bt4, bt5, bt6]):
ax = fig.add_subplot(gs[i, 0])
# plot actual data
df[df.tracker_id == tracker_id].set_index('date')['dB'].plot(ax=ax, marker='o', alpha=0.3)
# plot rolling mean
pd.rolling_mean(df[df.tracker_id == tracker_id].set_index('date')['dB'], window=10, center=True).plot(ax=ax)
ax.set_title('BT{0}'.format(i+1))
for ax in fig.get_axes():
ax.set_ylim(-100, -60)
ax.set_xlim(experiment_start, experiment_end)
plt.tight_layout()
plot_rolling_mean_mobile_trackers(id1)
In [11]:
id2 = get_id('F4', 'DD')
plot_rolling_mean_mobile_trackers(id2)
In [12]:
bt4[bt4.tracker_id == id1].set_index('date')['dB'].resample('3T').plot(label='BT4')
bt3[bt3.tracker_id == id1].set_index('date')['dB'].resample('3T').plot(label='BT3')
bt2[bt2.tracker_id == id1].set_index('date')['dB'].resample('3T').plot(label='BT2')
bt1[bt1.tracker_id == id1].set_index('date')['dB'].resample('3T').plot(label='BT1')
plt.legend()
Out[12]:
In [13]:
bt6[bt6.tracker_id == id2].set_index('date')['dB'].resample('3T').plot(label='BT6')
bt5[bt5.tracker_id == id2].set_index('date')['dB'].resample('3T').plot(label='BT5')
bt4[bt4.tracker_id == id2].set_index('date')['dB'].resample('3T').plot(label='BT4')
#bt3[bt3.tracker_id == id2].set_index('date')['dB'].resample('3T').plot(label='BT3')
#bt2[bt2.tracker_id == id2].set_index('date')['dB'].resample('3T').plot(label='BT2')
#bt1[bt1.tracker_id == id2].set_index('date')['dB'].resample('3T').plot(label='BT1')
plt.legend()
Out[13]:
In [14]:
combined = bt1.append(bt2).append(bt3).append(bt4).append(bt5).append(bt6)
# combined['dB'] = -combined['dB']
combined['power'] = combined['dB'].apply(lambda x: 10 ** (x/10))
# set(combined['distance'].dropna().values)
In [15]:
# distance = combined.groupby('distance')['dB'].describe().unstack().index
# dB = combined.groupby('distance')['dB'].describe().unstack()['mean'].values
# print(distance)
# print(dB)
distance = combined.sort_values('distance').dropna()['distance']
distance_log = combined.sort_values('distance').dropna()['distance'].apply(np.log10)
dB = combined.sort_values('distance').dropna()['dB']
power = combined.sort_values('distance').dropna()['power']
In [16]:
def paper_func(d, n, A):
"""
Reference: http://www.rn.inf.tu-dresden.de/dargie/papers/icwcuca.pdf
"""
return - (10 * n) * np.log10(d) - A
func = paper_func
opt_parms, parm_cov = sp.optimize.curve_fit(func, xdata=distance, ydata=dB)
opt_parms# , parm_cov
Out[16]:
In [17]:
combined.dropna().plot(x='distance', y='dB', kind='scatter')
plt.plot(distance, func(distance, *opt_parms))
Out[17]:
In [18]:
# combined.groupby('distance')['power'].describe().unstack()
In [47]:
# import pymc3 as pm
# from theano import tensor
# with pm.Model() as model:
# n = pm.Normal('n', mu=1, sd=1)
# A = pm.Normal('A', mu=70, sd=1)
# sigma = pm.HalfCauchy('sigma', beta=10, testval=1.)
# likelihood = pm.Normal('dB',
# mu=paper_func(combined.dropna()['dB'], n, A),
# sd=sigma,
# observed=combined.dropna()['distance'])
In [46]:
# with model:
# start = {'A':70, 'n':1}
# step = pm.NUTS()
# trace = pm.sample(10000, step, start=start)
In [48]:
# pm.traceplot(trace)
In [49]:
# trace[2000]
In [50]:
def inverse_func(rssi, n, A):
"""
Inverse of paper_func
"""
exponent = -(A + rssi) / (10 * n)
return np.power(10, exponent)
def constraints(d):
if d > 96:
return 96
elif d < 1:
return 1
else:
return d
def constrained_inverse_func(rssi, n, A):
"""
Constrain distances to what we've measured.
Constraints are hard-coded: from 1 ft to 96 ft.
"""
return pd.DataFrame(inverse_func(rssi, n, A)).reset_index()[1].apply(lambda x:constraints(x), axis=1).values
In [51]:
inv_parms, parm_cov = sp.optimize.curve_fit(inverse_func, xdata=dB, ydata=distance)
inv_parms, parm_cov
Out[51]:
In [ ]:
In [52]:
combined.plot(x='dB', y='distance', kind='scatter')
dBs = np.arange(min(dB), max(dB))
plt.plot(dBs, inverse_func(dBs, *inv_parms))
Out[52]:
In [53]:
fig = plt.figure()
ax = fig.add_subplot(111)
combined.groupby('distance')['dB'].mean().plot(marker='^', yerr=combined.groupby('distance')['dB'].std(), ax=ax,
label='mean', color='blue')
combined.plot(x='distance', y='dB', kind='scatter', ax=ax, label='data', color='red', alpha=0.1)
plt.ylabel('negative signal (dB)')
ds = np.arange(min(distance), max(distance))
ax.plot(ds, func(ds, *opt_parms), label='fit', color='green')
plt.legend()
plt.tight_layout()
In [54]:
percentiles = pd.DataFrame()
# for i in range(1,11):
# percentiles[str(i*10)] = combined.groupby('distance')['dB'].quantile(i*10/100)
for i in range(1, 20):
percentiles[str(i*5)] = combined.groupby('distance')['dB'].quantile(i*5/100)
percentiles['2.5'] = combined.groupby('distance')['dB'].quantile(0.025)
percentiles['97.5'] = combined.groupby('distance')['dB'].quantile(0.975)
percentiles[['2.5', '50', '97.5']].plot(marker='o')
Out[54]:
In [55]:
parms_low, _ = sp.optimize.curve_fit(func, xdata=percentiles.index, ydata=percentiles['25'])
parms_med, _ = sp.optimize.curve_fit(func, xdata=percentiles.index, ydata=percentiles['50'])
parms_high, _ = sp.optimize.curve_fit(func, xdata=percentiles.index, ydata=percentiles['75'])
In [56]:
ds = np.arange(min(distance), max(distance))
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(ds, func(ds, *parms_low), color='black', ls='--')
ax.plot(ds, func(ds, *parms_med), color='black', ls='--')
ax.plot(ds, func(ds, *parms_high), color='black', ls='--')
ax.fill_between(ds, func(ds, *parms_low), func(ds, *parms_high), color='yellow', alpha=0.3)
percentiles[['2.5', '10', '50', '90', '97.5']].plot(marker='o', ax=ax, alpha=0.3)
Out[56]:
In [30]:
# ds = np.arange(min(distance), max(distance))
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(dBs, inverse_func(dBs, *parms_low), color='blue', label='low')
ax.plot(dBs, inverse_func(dBs, *parms_med), color='green', label='medium')
ax.plot(dBs, inverse_func(dBs, *parms_high), color='red', label='high')
ax.fill_between(dBs, inverse_func(dBs, *parms_low), inverse_func(dBs, *parms_high), color='yellow', alpha=0.3)
# percentiles.plot(marker='o', ax=ax)
percentiles.reset_index().plot(x='2.5', y='distance', kind='scatter', ax=ax, color='blue')
percentiles.reset_index().plot(x='50', y='distance', kind='scatter', ax=ax, color='green')
percentiles.reset_index().plot(x='97.5', y='distance', kind='scatter', ax=ax, color='red')
ax.set_xlabel('dBm')
ax.set_ylabel('distance')
ax.legend()
Out[30]:
In [31]:
inverse_func(-70, *parms_low), inverse_func(-70, *parms_high)
Out[31]:
In [32]:
inverse_func(-95, *parms_low), inverse_func(-95, *parms_high)
Out[32]:
In [33]:
from shapely.geometry import Point
In [34]:
# Get data
def beacon_data(tracker_id, base_station_data):
bcn_idx = base_station_data[base_station_data.tracker_id == tracker_id].index
bcn_bt = base_station_data.loc[bcn_idx]
bcn_bt['min_dist'] = bcn_bt['dB'].apply(lambda x:inverse_func(x, *parms_low))
bcn_bt['max_dist'] = bcn_bt['dB'].apply(lambda x:inverse_func(x, *parms_high))
bcn_bt = bcn_bt.set_index('date').resample('5T')
return bcn_bt
bcn1_bt1 = beacon_data(id1, bt1)
bcn1_bt2 = beacon_data(id1, bt2)
bcn1_bt3 = beacon_data(id1, bt3)
bcn1_bt4 = beacon_data(id1, bt4)
bcn1_bt5 = beacon_data(id1, bt5)
bcn1_bt6 = beacon_data(id1, bt6)
In [35]:
bcn1_bt1['dB'].plot()
Out[35]:
In [36]:
def min_and_max_distances(beacon_names, tracker_id, base_station_data):
assert isinstance(beacon_names, list)
# assert isinstance(tracker_ids, list)
assert isinstance(base_station_data, list)
bcn_min = pd.DataFrame()
bcn_max = pd.DataFrame()
for i, name in enumerate(beacon_names):
# Get min and max data fromt the new beacon being iterated over.
new_min = beacon_data(tracker_id, base_station_data[i])['min_dist']
new_max = beacon_data(tracker_id, base_station_data[i])['max_dist']
# Check to see which index is longer
if len(bcn_min.index) > len(new_min.index):
# Re-index based on bcn_min.
new_min = new_min.reindex(bcn_min.index)
new_max = new_max.reindex(bcn_max.index)
elif len(new_min.index) > len(bcn_min.index):
bcn_min = bcn_min.reindex(new_min.index)
bcn_max = bcn_max.reindex(new_max.index)
bcn_min[name] = new_min
bcn_max[name] = new_max
# constrain the distances between 1 and 96 ft
# this has been hard-coded in on the assumption that calibration has not gone below or above some limit.
bcn_min = bcn_min.applymap(lambda x: 1 if x < 1 else 96 if x > 96 else x)
bcn_max = bcn_max.applymap(lambda x: 1 if x < 1 else 96 if x > 96 else x)
return bcn_min, bcn_max
beacon_names = ['BT{0}'.format(i) for i in range(1,7)]
base_station_data = [bt1, bt2, bt3, bt4, bt5, bt6]
bcn1_min, bcn1_max = min_and_max_distances(beacon_names, id1, base_station_data)
bcn2_min, bcn2_max = min_and_max_distances(beacon_names, id2, base_station_data)
In [37]:
id1, id2
Out[37]:
In [38]:
# Try plotting one.
doi = datetime(2015,12,23,14,45,0) # date of interest
bcn1_min.ix[doi]
# bcn1_max.ix[doi]
Out[38]:
In [39]:
from descartes.patch import PolygonPatch
def area_around_base(base_station_name, beacon_min, beacon_max, time):
bt_min = Point(coords[base_station_name]).buffer(beacon_min.ix[time].fillna(0)[base_station_name])
bt_max = Point(coords[base_station_name]).buffer(beacon_max.ix[time].fillna(0)[base_station_name])
# bt_area = bt_max.symmetric_difference(bt_min)
bt_area = bt_max
return bt_area
bt1_area = area_around_base('BT1', bcn1_min, bcn1_max, doi)
bt2_area = area_around_base('BT2', bcn1_min, bcn1_max, doi)
bt3_area = area_around_base('BT3', bcn1_min, bcn1_max, doi)
bt4_area = area_around_base('BT4', bcn1_min, bcn1_max, doi)
bt5_area = area_around_base('BT5', bcn1_min, bcn1_max, doi)
bt6_area = area_around_base('BT6', bcn1_min, bcn1_max, doi)
# bcn1_area = bt1_area.intersection(bt2_area).intersection(bt3_area).intersection(bt4_area).intersection(bt5_area).intersection(bt6_area)
# bcn1_area
# bcn1_area = bt2_area.intersection(bt3_area).intersection(bt4_area).intersection(bt6_area)
fig = plt.figure()
ax = fig.add_subplot(111)
p2 = PolygonPatch(bt2_area, color='blue', alpha=0.3, label='BT2')
p4 = PolygonPatch(bt4_area, color='red', alpha=0.3, label='BT4')
p6 = PolygonPatch(bt6_area, color='green', alpha=0.3, label='BT6')
ax.add_patch(p2)
ax.add_patch(p4)
ax.add_patch(p6)
ax.autoscale()
ax.set_title('{0}, {1}:{2}'.format(id1, doi.hour, doi.minute))
ax.legend()
Out[39]:
In [40]:
bcn2_max.ix[doi]
Out[40]:
In [41]:
bt1_area = area_around_base('BT1', bcn2_min, bcn2_max, doi)
bt2_area = area_around_base('BT2', bcn2_min, bcn2_max, doi)
bt3_area = area_around_base('BT3', bcn2_min, bcn2_max, doi)
bt4_area = area_around_base('BT4', bcn2_min, bcn2_max, doi)
bt5_area = area_around_base('BT5', bcn2_min, bcn2_max, doi)
bt6_area = area_around_base('BT6', bcn2_min, bcn2_max, doi)
bcn2_area = (bt2_area).intersection(bt3_area).intersection(bt4_area).intersection(bt5_area).intersection(bt6_area)
fig = plt.figure()
ax = fig.add_subplot(111)
p2 = PolygonPatch(bt2_area, color='blue', alpha=0.3, label='BT2')
p3 = PolygonPatch(bt3_area, color='yellow', alpha=0.3, label='BT3')
p4 = PolygonPatch(bt4_area, color='red', alpha=0.3, label='BT4')
p5 = PolygonPatch(bt5_area, color='orange', alpha=0.3, label='BT5')
p6 = PolygonPatch(bt6_area, color='green', alpha=0.3, label='BT6')
ax.add_patch(p2)
ax.add_patch(p3)
ax.add_patch(p4)
ax.add_patch(p5)
ax.add_patch(p6)
ax.autoscale()
ax.set_title('{0}, {1}:{2}'.format(id1, doi.hour, doi.minute))
ax.axvline(0)
ax.axhline(0)
ax.legend()
Out[41]:
In [42]:
bcn2_p = PolygonPatch(bcn2_area)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.add_patch(bcn2_p)
ax.set_xlim(-150, 100)
ax.set_ylim(-100, 150)
ax.axvline(0)
ax.axhline(0)
Out[42]:
In [ ]:
In [43]:
circles = [area_around_base('BT{0}'.format(i), bcn2_min, bcn2_max, doi) for i in range(1,7)]
fig = plt.figure()
ax = fig.add_subplot(111)
# final_c =
# patch = PolygonPatch(bcn2_area)
# ax.add_patch(patch)
patch = PolygonPatch(bcn1_area)
ax.add_patch(patch)
ax.relim()
ax.autoscale()
In [ ]: