In [1]:
#%matplotlib inline
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pims
import trackpy as tp
import os
import time
%pylab inline
In [2]:
import h5py
from __future__ import division # this makes mathematical division work better
myhome = '/home/viva'
datafolder = 'group/viva/Data'
data_date = '2015-09-02'
subfolder = 'Uberscope2--active_motion_in_droplets'
movienumber = '17'
filename = os.path.join(myhome, datafolder, data_date, subfolder, movienumber, 'image0000.h5')
# can't always use bright background; memory error when dividing
bright_bg_filename = os.path.join(myhome, datafolder, data_date, subfolder, '18/image0000.h5')
filename
#filename = ('/home/viva/group/viva/Data/2015-09-02/17/image0000.h5')
Out[2]:
In [3]:
import datetime
today = datetime.date.today().isoformat()
myanalysisfolder = 'group/viva/Analysis'
thismovieanalysisfolder = os.path.join(myhome,
myanalysisfolder,
today,
'data_taken_' + data_date + ',_movie' + movienumber)
thismovieanalysisfolder
Out[3]:
In [4]:
#40x1.0 Uberscope
#926.0540 pixels = 250 um
#40x1.5 Uberscope
#828.0604 pixels = 150 um
scaling = 150/828.0604
In [5]:
f = h5py.File(filename, 'r') # set file to read
raw_xyt_frames = f['images'] # extract the image frames from the file
bright_bg_f = h5py.File(bright_bg_filename, 'r') # set file to read
bright_bg_frames = bright_bg_f['images'] # extract the image frames from the file
In [6]:
import holopy as hp
plt.imshow(raw_xyt_frames[:,:,100], cmap=cm.gray) # display a frame from the xyt hyperstack movie
Out[6]:
In [7]:
data=f.values()[0][...]
number_of_frames=4000
rawframes = []
# Convert xyt hyperstack data into appropriate format for trackpy
# There's probably a faster way to do this.
time1 = time.time()
for i in range(number_of_frames):
framei = hp.core.Image(data[:,:,i],spacing=scaling)
rawframes.append(framei)
elapsed = time.time()-time1
bg_flag = False;
print('Converted xyt hyperstack into trackpy-friendly format in ' + str(elapsed/60.0) + ' minutes.')
In [8]:
# convert xyt hyperstack data into one median image
bg = np.median(bright_bg_frames,axis=2)
In [9]:
plt.imshow(bg, cmap=cm.gray)
plt.title('Brightfield background')
Out[9]:
In [10]:
del bright_bg_frames # clear some memory
del raw_xyt_frames
In [ ]:
if bg_flag == True:
print 'Already backgrounded!'
else:
#rawframes = frames
#del frames
try:
time1 = time.time()
frames = rawframes/bg # background divide (slow)
elapsed = time.time() - time1
bg_flag = True;
print('Backgrounded movie in ' + str(elapsed/60.0) + ' minutes.')
del rawframes # clear memory
except MemoryError:
elapsed = time.time() - time1
frames = rawframes
print('Unable to background divide images after ' + str(elapsed/60.0) + ' minutes; not enough memory.')
hp.show(frames[100])
if bg_flag == True:
plt.title('Backgrounded frame from movie')
else:
plt.title('Frame from movie (not backgrounded)')
In [ ]:
# find bright spots in a frame.
# featuresize must be odd.
# read up on this in the trackpy literature.
i = 100
featuresize = 13
f1 = tp.locate(frames[i], diameter=featuresize, invert=True, minmass=1000)
tp.annotate(f1, frames[i])
tp.subpx_bias(f1)
#f1
In [ ]:
# The subpixel bias isn't a good indicator when most of the objects I'm picking up aren't particles.
In [13]:
# Now that we have picked out an appropriate featuresize and settings, it's time to go through ALL the frames,
# finding the coordinates of the bright spots in each frame.
f_coords = tp.batch(frames[:], featuresize, invert=True, minmass=1000) # Slow!
#f_coords = pd.read_pickle('C:\\Users\\Viva\\Desktop\\local_copy_of_data\\2015-09-02\\10\\analysis\\f_coords.pkl')
# Documentation: http://soft-matter.github.io/trackpy/generated/trackpy.batch.html
# invert : Set to True if features are darker than background.
# This is an implementation of the Crocker-Grier centroid-finding algorithm.
# Crocker, J.C., Grier, D.G. http://dx.doi.org/10.1006/jcis.1996.0217
In [14]:
# tell me how many frames are in the movie
nframes = f_coords['frame'].max() - f_coords['frame'].min() + 1
nframes
Out[14]:
In [15]:
# We have just built a list of coordinates called f_coords where we have seen particles. '
# Now we want to link these together from one frame to the next
# so we can identify the trajectory for each particle.
# Documentation: http://soft-matter.github.io/trackpy/generated/trackpy.link_df.html
t = tp.link_df(features=f_coords, search_range=10, memory=3)
#t = pd.read_pickle('C:\\Users\\Viva\\Desktop\\local_copy_of_data\\2015-09-02\\10\\analysis\\t.pkl')
# search_range gives the maximum distance features can move between frames.
# I think it's measured in pixels.
# memory gives the maximum number of frames during which a feature can vanish,
# then reappear nearby, and still be considered the same particle.
# This will run faster if the numba package is available.
#trajectory = tp.plot_traj(t, superimpose = frames[500], label=False)
# plots trajectory in pixels
In [16]:
#trajectory_plot = tp.plot_traj(t, superimpose = frames[2000], label=False)
In [17]:
t1 = tp.filter_stubs(t, 100)
# Compare the number of particles in the unfiltered and filtered data.
print 'Before:', t['particle'].nunique()
print 'After:', t1['particle'].nunique()
In [18]:
trajectory_plot = tp.plot_traj(t1, superimpose = frames[2000], label=False)
In [19]:
tracks = t1['particle'].astype(int).unique()
print size(tracks)
In [20]:
if size(tracks) < 2000:
print tracks
In [21]:
## Filter out trajectories that don't move very far. Careful! You might need to consider what threshold to use!
def distance(x1,x2,y1,y2):
return sqrt((x1-x2)**2 + (y1-y2)**2)
list_of_long_trajectories = []
list_of_short_trajectories = []
threshold = 4 # microns
for i in range(size(tracks)):
max_x = max(t1[t1['particle']==tracks[i]].x)
min_x = min(t1[t1['particle']==tracks[i]].x)
max_y = max(t1[t1['particle']==tracks[i]].y)
min_y = min(t1[t1['particle']==tracks[i]].y)
displacement_um = scaling * distance(min_x,max_x,min_y,max_y)
if displacement_um < threshold:
list_of_short_trajectories.append(tracks[i])
else:
list_of_long_trajectories.append(tracks[i])
# remove each of the undesired trajectories
t_extended = t1.copy()
for i in list_of_short_trajectories:
t_extended = t_extended[t_extended.particle != i]
# now t_extended only consists of trajectories of particles that travel further than the threshold.
In [22]:
trajectory_plot = tp.plot_traj(t_extended, superimpose = frames[2000], label=False)
In [23]:
try:
axes().set_aspect('equal', 'datalim')
except:
pass
trajectory_plot = tp.plot_traj(t_extended, mpp=scaling)
In [24]:
error # don't run the following code unless you intend to
In [ ]:
if not os.path.exists(thismovieanalysisfolder):
os.makedirs(thismovieanalysisfolder)
print 'Created ' + thismovieanalysisfolder
else:
print 'Already exists: ' + thismovieanalysisfolder
In [ ]:
f_coords_filename = os.path.join(thismovieanalysisfolder, 'f_coords.pkl')
f_coords.to_pickle(f_coords_filename)
f_coords.head()
In [ ]:
t_filename = os.path.join(thismovieanalysisfolder, 't.pkl')
t.to_pickle(t_filename)
t.head()
In [ ]:
t1_filename = os.path.join(thismovieanalysisfolder, 't1.pkl')
t1.to_pickle(t1_filename)
t1.head()
In [ ]:
t_extended_filename = os.path.join(thismovieanalysisfolder, 't_extended.pkl')
t_extended.to_pickle(t_extended_filename)
t_extended.head()
In [ ]:
error # don't run the following code unless you intend to!
In [ ]:
# clear up some memory, now that the raw data has been saved to hard disk
del f_coords, t
In [ ]: