Import investigator package of the PyOTIC software


In [ ]:
# Import os to easily join names of filepaths
import os

# Add the path of the PyOTIC Software to the system path
# Adjust this path to where the PyOTIC Software package is located
import sys
sys.path.append('../../')

#Load investigator package
import pyoti
pyoti.info()

#Create an experiment
experiment = pyoti.create_experiment()

Create experiment file (or open previously saved one)


In [ ]:
# Choose the path, were the experiment should be created (or opened from)
#
# datadir: The path to where the experiment (and the data) are located
# datafile: The name of the file that contains the data. Here it is only used to generate dbfile.
#           The data is loaded further down upon creation of a Record.
# dbfile: The name of the database file the experiment is saved to (or loaded from).
datadir = '../data/'
datafile = 'B01.bin'

# For the name of the experiment, exchange the extension '.bin' with '.fs'
dbfile = os.path.join(datadir, datafile.replace('.bin', '.fs'))

# Create/open the experiment dbfile
experiment.open(dbfile)

#datadir = '/srv/files/common/Practicals/SingleMoleculeBiophysics SS2015/ASWAD 2015-09-24/'
#datadir = 'Z:\\Practicals\\SingleMoleculeBiophysics SS2015\\ASWAD 2015-09-24\\'

In [ ]:
# show status of Records, Views, MultiRegions, and Modifications in experiment
experiment.print_status()

In [ ]:
# cleanup/pack database file
experiment.cleanup()

In [ ]:
# save the state of the experiment in the database file
experiment.save(pack=True)

In [ ]:
# revert changes since last commit of experiment
experiment.abort()

In [ ]:
# close database file
experiment.close()

Create a calibration


In [ ]:
# Choose the calibration type that should be created.
# See 'pyoti/etc/calibration.cfg' for known types.
# If you choose an unknown type, a generic calibration is created.
calibration_type='pyoticf'

# You can provide a calibration file, where previously stored calibration values are loaded from.
# Make sure to set a proper corresponding calibration_type, which will load the files provided.
calibdir = os.path.join("..", "calibration", "converted_data")
#calibfile = 'B01__hc_results.txt'
calibfile = datafile.replace('.bin', '__hc_results.txt')

# Create a calibration and assign it to the variable 'calibration'
calibration = pyoti.create_calibration(calibration_type=calibration_type, filename=calibfile, directory=calibdir)

#calibdir = os.path.join(datadir, 'analysis')
#calibdir = os.path.join('/home/tobiasj/experiments/ASWAD/2013-12-18/flow_cell_c', 'hdcalibration/analysis')
#calibdir = '/media/tobiasj/cbd_drive/data/ASWAD/2015-10-28 - unzipping/analysis/'

Create record(s) and add to experiment

Either: Define a generic function to read in the data and create a record:


In [ ]:
# Define a name for the record (defaults to 'default')
name='alpha'

# Define a function that is used to read in the data you want to analyze.
# The function needs to receive at least the positional parameter 'filename'.
# The return value of the function needs to be the data as a numpy array.
# You can (beside other options) use functions that the package numpy offers to read in data:
# http://docs.scipy.org/doc/numpy/reference/routines.io.html
#
# One example, to read in data from a text file with 5 header lines followed by the data,
# could look like this:

import numpy as np
import pyoti.data.labview as lv
import os

def load_data(filename):
    #data = np.loadtxt(filename, skiprows=5)
    data = lv.read_labview_bin_data(filename)[:,0:3]
    return data

# Define the samplingrate (either provide a function or simply a variable).
# The function gets executed once, upon initialisation of the record. The
# return value of the function (or the value of the variable) gets stored in
# the record object:
def samplingrate():
    samplingrate = 40000.0
    return samplingrate
#samplingrate = 40000.0

# Name the traces here, the load_data() function returns. Make sure the 
# traces are properly describing the data returned by load_data function.
# This definition takes precedence over the traces defined in the
# configfile (see below)
traces = [ 'psdX', 'psdY', 'psdZ' ]

# You can provide a configfile, which, for instance, defines the traces returned by load_data().
# If not provided, configfile defaults to '../pyoti/etc/GenericDataFile.cfg'.
# You could also create your own setup specific configfile and use GenericDataFile as a template.
# Make sure to also add the parameter cfgfile to the function call below, if you define a cfgfile,
# like: experiment.create_record(cfgfile=cfgfile, ...)
#cfgfile = '../pyoti/etc/record/GenericDataFile.cfg' 

record = experiment.create_record(name=name, calibration=calibration, traces=traces, load_data=load_data, filename=datafile, directory=datadir, samplingrate=samplingrate)

Or: Read in a record for a predefined setup:


In [ ]:
# Define a name for the record (defaults to 'default')
name='alpha'

# Choose the file, where standard values for the Record are defined
cfgfile = '../pyoti/etc/record/ASWAD.cfg'

experiment.create_record(name=name, calibration=calibration, cfgfile=cfgfile, filename=datafile, directory=datadir)

In [ ]:
# Create/load additional records (e.g. extra_unzipping or beadscan)
name = 'beta'
extradatadir = datadir
extradatafile = 'B01b.bin'

experiment.create_record(name=name, calibration=calibration, cfgfile=cfgfile, filename=extradatafile, directory=extradatadir)
#experiment.records.beta.calibration = experiment.records.alpha.calibration

In [ ]:
name = 'generic'
group = 'modification'
parent = 'used'

traces_apply=['psdX', 'psdYZ']
extra_mod_params='factor'
import numpy as np

def modify(self, data, samples, data_traces, data_index, mod_index):
    #   data: Contains the data, indexed by samples and data_traces
    #   samples: Is the index of the samples contained in data, which was
    #       given/asked by the user/process who called _get_data().
    #   data_traces: Contains a list of traces (str) existent in data, which
    #       was given/asked by the user/process who called _get_data().
    #   data_index: data[:,data_index] gives the data, which is modified by
    #       this modification (defined by traces_apply)
    #   mod_index: numpy.array(self.traces_apply)[mod_index] gives the traces,
    #       which are existent in data and also modified by this modfication
    #       self.mod_params[mod_index] gives the mod_params of the traces
    #       self.mod_params gives a list of all available mod_parameters
    #       self.get_mod_params(names=...) returns a list with the
    #       mod_parameters with names=...
    #       self.name gives the mod_parameter with name name
    #
    # Modify and return the data ...
    print('Modifying the data ...')
    #
    #
    # A simple example of a modification (subtraction of the mod_param multiplied with
    # the extra_mod_param factor from traces):
    #data[:, data_index] -= self.mod_params[np.newaxis, mod_index] * self.factor
    #
    return data

experiment.add_group(name, parent, group_type=group, adjust=True, modify=modify, traces_apply=traces_apply, mod_params=extra_mod_params)

Analyse and modify data


In [ ]:
name = 'used'
group = 'selection'
parent = 'alpha'

experiment.add_group(name, parent, group_type=group)

In [ ]:
name = 'used_beta'
group = 'selection'
parent = 'beta'

experiment.add_group(name, parent, group_type=group)

In [ ]:
name = 'offset'
group = 'offset'
parent = 'used'

experiment.add_group(name, parent, group_type=group)

In [ ]:
name = 'offset_beta'
group = 'offset'
parent = 'used_beta'

experiment.add_group(name, parent, group_type=group)

In [ ]:
experiment.concatenate('offset_concatenated', 'offset', 'offset_beta')

In [ ]:
name = 'touchdown'
group = 'touchdown'
parent = 'offset'

experiment.add_group(name, parent, group_type=group)

In [ ]:
experiment.replace_in('touchdown', 'offset', 'offset_concatenated')
experiment.replace_in('touchdown_mod', 'offset', 'offset_concatenated')

In [ ]:
name = 'beadscan'
group = 'beadscan'
parent = 'touchdown'

experiment.add_group(name, parent, group_type=group)

In [ ]:
experiment.remove('beadscan')
experiment.remove('beadscan_mod')

In [ ]:
name = 'attachment'
group = 'attachment'
parent = 'beadscan'

experiment.add_group(name, parent, group_type=group)

In [ ]:
name = 'attachment_2nd'
group = 'attachment'
parent = 'attachment'

experiment.add_group(name, parent, group_type=group)

In [ ]:
name = 'baseline'
group = 'baseline'
parent = 'attachment_2nd'

experiment.add_group(name, parent, group_type=group)

In [ ]:
name = 'rotation'
group = 'rotation'
parent = 'baseline'

experiment.add_group(name, parent, group_type=group)

In [ ]:
name = 'rotation_2nd'
group = 'rotation'
parent = 'rotation'

experiment.add_group(name, parent, group_type=group)

Select data to generate the results


In [ ]:
name = 'results'
group = 'selection'
parent = 'rotation_2nd'

# traces used to select data from
traces = ['psdXYZ', 'positionXYZ']

results_region = experiment.add_group(name, parent, group_type=group, traces=traces)

# Enable caching for results region, for faster data return
experiment.set_cached_region(name)

In [ ]:
# Choose resolution for presentation of data (extension, force)
resolution = 1000 # points/s resolution

# Create Result objects to obtain force and extension
tether = pyoti.create_tether(region=results_region, resolution=resolution)

# Show the autodetected minima, maxima and sections
#tether._sf.highest_frequency=32
#tether._sf.reduce_false_positives = True
#tether._sf.compare_time = 0.005
tether.update()
tether.init_rfig(legend=True)

In [ ]:
# Create force extension curves
prefix = ''.join((os.path.splitext(os.path.basename(experiment.filename))[0], "_"))
resultsdir = os.path.join("..", "results")

# Save force extension stress/release pair plots
tether.save_force_extension_plots(directory=resultsdir,
                                  file_prefix=prefix,
                                  bps=9018)

# Display saved force extension stress/release pair plots
# pyoti.gui.browse_images(directory=resultsdir, prefix=prefix)

In [ ]:
# Display force extension stress/release pair plots
tether.init_fe_fig()
tether.show_force_extension_plots(bps=1399, autolimit=False)

In [ ]:
# plot Timecourse of Extension
plt.close('all')
plt.figure()
plt.grid(True)

# Timevector, extension and stress/release pairs
t = tether.timevector
e = tether.extension
pl, ps = tether.stress_release_pairs()

# Plot all stress/release extension/timevector sections
for pl, ps in zip(pl, ps):
    plt.plot(t[pl], e[pl] * 1000, 'g.', ms=1.0)
    plt.plot(t[ps], e[ps] * 1000, 'r.', ms=1.0)

plt.title('Timecourse of extension')
plt.ylabel("Extension (nm)")
plt.xlabel("Time (s)")

plt.show(plt.gcf())
plt.close()

In [ ]:
plt.close('all')
plt.figure()
plt.grid(True)

fXYZ = tether.forceXYZ
rpl_ = tether.sections(direction='right', cycle='stress')
lpl_ = tether.sections(direction='left', cycle='stress')

for rpl in rpl_:
    plt.plot(fXYZ[rpl, 1] * 1000, fXYZ[rpl, 2] * 1000, 'r')
for lpl in lpl_:
    plt.plot(fXYZ[lpl, 1] * 1000, fXYZ[lpl ,2] * 1000, 'g')

excited_axis = results_region.excited_axis
plt.xlabel(''.join(("Force (", excited_axis,")")))
plt.ylabel("Force (Z)")
plt.title("Y vs. Z")
plt.show(plt.gcf())
plt.close()