In [1]:
import argparse
import numpy as np
import matplotlib as mpl

import matplotlib.pyplot as plt
from BayesianBlocks import bayesian_blocks
import MySQLdb
import datetime as dt
%matplotlib inline

In [129]:
def rolling_avg_rating(rating):
    limited_avg = [0]*len(rating)
    limited_avg[0] = float(rating[0])
    roller = 40
    for k in range(1, len(rating)):
        if k<roller:
            limited_avg[k]= float(np.mean(rating[:k]))
        else:
            limited_avg[k]=float(np.mean(rating[k-roller:k]))
    return limited_avg

In [130]:
def avg_rating(rating):
    avg = [0]*len(rating)
    avg[0] = float(rating[0])
    for k in range(1, len(rating)):
        avg[k]= float(np.mean(rating[:k]))
    return avg

In [131]:
def cuts_out_repeats(x, y):
    hold = x[0]
    xnew = []
    ynew = []
    for i in range(1,len(x)):
        if x[i] == hold:
            pass
        else:
            xnew.append(x[i]/100)
            ynew.append(y[i])
            hold = x[i]
    return xnew, ynew

In [132]:
def get_data(PID, cursor, tablename):
    sql = "Select RTime, RScore From " +tablename + " Where PID = " + '"' + PID +'";'
    cursor.execute(sql)
    data = cursor.fetchall()
    data = sorted(data)
    rating = np.array(zip(*data)[1], dtype = int)
    time = np.array(zip(*data)[0], dtype = float)
    dates=[dt.datetime.fromtimestamp(ts) for ts in time]
    return rating, time, dates

In [133]:
PIDlist = [' B0000X7CMQ', ' B0000E2PEI', ' B000GTR2F6', ' B000AQSMPO', ' B00005MF9C', ' B0000E2PEI', ' B0006SFFAQ', ' B00005AQ9Q', ' B00005R19P', ' B000FFQ554', ' B0006ZUHR0']

In [134]:
db = MySQLdb.connect(host="localhost", user="root", db = "home_kitchen")
cursor = db.cursor()

In [135]:
tablename = 'all_hk'
pid1 = PIDlist[0]
pid2 = PIDlist[1]
pid3 = PIDlist[2]

In [136]:
r1, t1, d1 = get_data(pid1, cursor, tablename)
r2, t2, d2 = get_data(pid2, cursor, tablename)
r3, t3, d3 = get_data(pid3, cursor, tablename)

In [144]:
x = np.array(t2)
y = np.array(r2)
xnew, ynew = cuts_out_repeats(x, y)
x = xnew
y = ynew

#windowSize = 15 #len(x)/10
modulo = len(x)%30

x = x[:len(x)-modulo]
y = y[:len(y)-modulo]

x = np.array(x)
#y = np.array(map(lambda foo: int(foo), y))
y = np.array(y)
avg = rolling_avg_rating(y)
y = np.array(map(lambda foo: int(foo*10), avg))



print len(x), '   ', x[:10]
print len(y), '   ', y[:10]
#sampleSize = 100
windowSize = 150 #len(x)/10


#gauss = lambda x,u,s: np.exp(-0.5*((x-u)/s)**2)

#np.random.seed(1)
#x = np.arange(1, sampleSize+1, dtype=float)
#y = np.round(10 + 2*np.sin(x/250) + 8*gauss(x, 700, 15) + 
             #15*gauss(x, 760, 30) +
             #2*np.random.randn(sampleSize))

# Sliding-window Bayesian blocks
# 1) Given a window size, find edges
# 2) Slide window to second edge, which is either a changepoint or the end of
#    the first window
# 3) End when reaching the end of the data stream
n = len(y)
xm = []
ym = []
i = 0
while True:
    j = min(i + windowSize, n)
    #print j
    #print x[i:j]
    #break
    e = bayesian_blocks(x[i:j], y[i:j], p0=1e-7)
    #print(e)
    if len(e) < 2:
        break
    xm += [e[0], e[1]]
    select = np.logical_and(x>=e[0], x<=e[1])
    #print selct
    yavg = np.average(y[select])
    ym += [yavg, yavg]
    i = np.searchsorted(x, e[1])

    #if j == n and len(e) == 2:
    #    break


420     [ 11119680.  11131776.  11194848.  11365920.  11437632.  11439360.
  11558592.  11670912.  11707200.  11753856.]
420     [20 20 20 30 25 22 26 30 31 32]
[ 11119680.  11125728.  11689056.  12003120.  12649392.  12664080.
  12673584.  12721536.  12740112.  12741840.  12794112.  12810960.
  12816144.  12861936.  12964320.  12977280.  12978144.]
[ 11131776.  11689056.  12003120.  12649392.  12664080.  12673584.
  12721536.  12740112.  12741840.  12794112.  12810960.  12816144.
  12861936.  12979872.]
[ 11707200.  12003120.  12649392.  12664080.  12673584.  12721536.
  12740112.  12741840.  12794112.  12810960.  12816144.  12861936.
  12996288.]
[ 12038112.  12649392.  12664080.  12673584.  12721536.  12740112.
  12741840.  12794112.  12810960.  12816144.  12861936.  13001040.
  13019616.]
[ 12652416.  12664080.  12673584.  12721536.  12740112.  12741840.
  12794112.  12810960.  12816144.  12861936.  13001040.  13029552.
  13031712.]
[ 12664512.  12673584.  12721536.  12740112.  12741840.  12794112.
  12810960.  12816144.  12861936.  13001040.  13035168.  13052880.
  13067136.]
[ 12674016.  12721536.  12740112.  12741840.  12794112.  12810960.
  12816144.  12861936.  13001040.  13035168.  13052880.  13070160.
  13070592.]
[ 12722400.  12740112.  12741840.  12794112.  12810960.  12816144.
  12861936.  13001040.  13035168.  13052880.  13080096.  13093920.
  13117680.  13138416.  13152672.]
[ 12740544.  12741840.  12794112.  12810960.  12816144.  12861936.
  13001040.  13035168.  13052880.  13080096.  13093920.  13117680.
  13138416.  13153536.  13158720.]
[ 12742272.  12794112.  12810960.  12816144.  12861936.  13001040.
  13035168.  13052880.  13080096.  13093920.  13117680.  13138416.
  13153536.  13162176.]
[ 12794976.  12795408.  12810960.  12816144.  12861936.  13001040.
  13035168.  13052880.  13080096.  13093920.  13117680.  13138416.
  13153536.  13167792.  13192416.  13195872.]
[ 12795840.  12810960.  12816144.  12861936.  13001040.  13035168.
  13052880.  13080096.  13093920.  13117680.  13138416.  13153536.
  13167792.  13192416.  13199328.]
[ 12811392.  12816144.  12861936.  13001040.  13035168.  13052880.
  13080096.  13093920.  13117680.  13138416.  13153536.  13167792.
  13192416.  13211424.]
[ 12816576.  12861936.  13001040.  13035168.  13052880.  13080096.
  13093920.  13117680.  13138416.  13153536.  13167792.  13192416.
  13220928.]
[ 12862368.  13001040.  13035168.  13052880.  13080096.  13093920.
  13117680.  13138416.  13153536.  13167792.  13192416.  13220064.
  13230864.  13239936.  13250736.  13254624.]
[ 13003200.  13035168.  13052880.  13080096.  13093920.  13117680.
  13138416.  13153536.  13167792.  13192416.  13220064.  13230864.
  13239936.  13250736.  13268880.  13280976.  13315968.  13327200.
  13388112.  13388544.]
[ 13038624.  13052880.  13080096.  13093920.  13117680.  13138416.
  13153536.  13167792.  13192416.  13220064.  13230864.  13239936.
  13250736.  13268880.  13280976.  13315968.  13327200.  13386384.
  13404960.]
[ 13054176.  13054608.  13080096.  13093920.  13117680.  13138416.
  13153536.  13167792.  13192416.  13220064.  13230864.  13239936.
  13250736.  13268880.  13280976.  13315968.  13327200.  13414464.]
[ 13055040.  13080096.  13093920.  13117680.  13138416.  13153536.
  13167792.  13192416.  13220064.  13230864.  13239936.  13250736.
  13268880.  13280976.  13315968.  13327200.  13386384.  13405824.
  13414896.  13415328.]
[ 13080960.  13093920.  13117680.  13138416.  13153536.  13167792.
  13192416.  13220064.  13230864.  13239936.  13250736.  13268880.
  13280976.  13315968.  13327200.  13386384.  13428720.  13434336.]
[ 13095648.  13096080.  13117680.  13138416.  13153536.  13167792.
  13192416.  13220064.  13230864.  13239936.  13250736.  13268880.
  13280976.  13315968.  13327200.  13386384.  13428720.  13436928.]
[ 13096512.  13117680.  13138416.  13153536.  13167792.  13192416.
  13220064.  13230864.  13239936.  13250736.  13268880.  13280976.
  13315968.  13327200.  13386384.  13428720.  13436064.  13442112.]
[ 13118112.  13138416.  13153536.  13167792.  13192416.  13220064.
  13230864.  13239936.  13250736.  13268880.  13280976.  13315968.
  13327200.  13386384.  13428720.  13436064.  13466304.]
[ 13138848.  13153536.  13167792.  13192416.  13220064.  13230864.
  13239936.  13250736.  13268880.  13280976.  13315968.  13327200.
  13386384.  13428720.  13436064.  13471920.  13481424.  13493520.
  13498272.]
[ 13154400.  13167792.  13192416.  13220064.  13230864.  13239936.
  13250736.  13268880.  13280976.  13315968.  13327200.  13386384.
  13428720.  13436064.  13471920.  13508208.  13508640.]
[ 13168224.  13192416.  13220064.  13230864.  13239936.  13250736.
  13268880.  13280976.  13315968.  13327200.  13386384.  13428720.
  13436064.  13471920.  13524624.  13525920.]
[ 13193280.  13193712.  13220064.  13230864.  13239936.  13250736.
  13268880.  13280976.  13315968.  13327200.  13386384.  13428720.
  13436064.  13471920.  13534128.  13535424.]
[ 13194144.  13220064.  13230864.  13239936.  13250736.  13268880.
  13280976.  13315968.  13327200.  13386384.  13428720.  13436064.
  13471920.  13534128.  13536288.]
[ 13220928.  13230864.  13239936.  13250736.  13268880.  13280976.
  13315968.  13327200.  13386384.  13428720.  13436064.  13471920.
  13516848.  13553568.]
[ 13232160.  13232592.  13239936.  13250736.  13268880.  13280976.
  13315968.  13327200.  13386384.  13428720.  13436064.  13471920.
  13516848.  13555296.]
[ 13233024.  13239936.  13250736.  13268880.  13280976.  13315968.
  13327200.  13386384.  13428720.  13436064.  13471920.  13508208.
  13558752.]
[ 13240800.  13250736.  13268880.  13280976.  13315968.  13327200.
  13386384.  13428720.  13436064.  13471920.  13516848.  13563072.]
[ 13251168.  13251600.  13268880.  13280976.  13315968.  13327200.
  13386384.  13428720.  13436064.  13471920.  13516848.  13565664.]
[ 13252032.  13268880.  13280976.  13315968.  13327200.  13386384.
  13428720.  13436064.  13471920.  13516848.  13566528.]
[ 13269312.  13280976.  13315968.  13327200.  13386384.  13428720.
  13436064.  13471920.  13516848.  13578624.]
[ 13282272.  13282704.  13315968.  13327200.  13386384.  13428720.
  13436064.  13471920.  13516848.  13582080.]
[ 13283136.  13315968.  13327200.  13386384.  13428720.  13436064.
  13471920.  13516848.  13582944.]
[ 13316832.  13327200.  13386384.  13428720.  13436064.  13471920.
  13516848.  13588128.]
[ 13328064.  13386384.  13428720.  13436064.  13471920.  13516848.
  13588128.]
[ 13387680.  13388112.  13428720.  13436064.  13471920.  13516848.
  13588128.]
[ 13388544.  13428720.  13436064.  13471920.  13516848.  13588128.]
[ 13429152.  13436064.  13471920.  13516848.  13588128.]
[ 13436928.  13471920.  13516848.  13588128.]
[ 13472352.  13481424.  13508208.  13588128.]
[ 13481856.  13508208.  13588128.]
[ 13508640.  13509936.  13588128.]
[ 13510368.  13588128.]
[ 13588128.]

In [145]:
# Full-sample Bayesian blocks
edges = bayesian_blocks(x, y, p0=1e-5)
yavg = []
for i in range(len(edges)-1):
    select = np.logical_and(x >= edges[i], x <= edges[i+1])
    if np.any(select):
        yavg.append(np.average(y[select]))
yavg.append(yavg[-1])

In [146]:
mpl.rc("font", family="serif", size=14)

fig = plt.figure(1, figsize=(10,5))
ax = fig.add_subplot(111)
ax.plot(x, y, 'o', alpha=0.5, label="data")
ax.plot(edges[:len(yavg)], yavg, lw=2, drawstyle="steps-post", color="green",
        alpha=0.7, label="Bayesian Blocks (full sample)")
ax.plot(xm, ym, lw=2, drawstyle="steps-post", color="red",
        alpha=0.7, label="Bayesian Blocks (sliding window: %d)" % windowSize)
ax.set_ylim([0, 1.5*max(y)])
ax.set_xlabel(r"$x$")
ax.set_ylabel(r"$y$")

h, l = ax.get_legend_handles_labels()
leg = ax.legend(h, l, loc=1, prop={"size":10})
leg.get_frame().set_linewidth(0)

#fig.tight_layout()

plt.show()



In [147]:
avg = avg_rating(y)

In [148]:
plt.scatter(x, avg)


Out[148]:
<matplotlib.collections.PathCollection at 0x110df3650>

In [31]:
x[0]%1


Out[31]:
0.0

In [36]:
x = np.array(map(lambda foo: int(foo), x))

In [34]:
x


Out[34]:
array([ 9879840,  9881568,  9885024,  9886752,  9892800,  9894528,
        9902304,  9904032,  9904896,  9905760,  9913536,  9921312,
        9943776,  9957600,  9970560,  9981792,  9982656,  9988704,
        9998208, 10000800, 10001664, 10008576, 10012032, 10012896,
       10033632, 10037952, 10038816, 10041408, 10044000, 10045728,
       10050912, 10051776, 10062144, 10068192, 10069056, 10073376,
       10079424, 10080288, 10082016, 10085472, 10086336, 10091520,
       10094976, 10100160, 10102752, 10107936, 10111392, 10114848,
       10117440, 10125216, 10127808, 10128672, 10133856, 10140768,
       10153728, 10160640, 10165824, 10195200, 10203840, 10238400,
       10239264, 10245312, 10248768, 10249632, 10258272, 10259136,
       10260000, 10265184, 10266912, 10270368, 10273824, 10281600,
       10289376, 10301472, 10315296, 10357632, 10387008, 10391328,
       10393056, 10394784])

In [40]:
bayesian_blocks(np.arange(100), np.random.randint(-5,5,100), sigma=1e-4)


Out[40]:
array([  0.,  99.])

In [45]:
%cat BayesianBlocks.py


#!/usr/bin/env python

import numpy as np

"""
Bayesian Block implementation
=============================

Dynamic programming algorithm for finding the optimal adaptive-width histogram.

Based on Scargle et al 2012 [1]_

References
----------
.. [1] http://adsabs.harvard.edu/abs/2012arXiv1207.5578S
"""

class FitnessFunc(object):
    """Base class for fitness functions

    Each fitness function class has the following:
    - fitness(...) : compute fitness function.
       Arguments accepted by fitness must be among [T_k, N_k, a_k, b_k, c_k]
    - prior(N, Ntot) : compute prior on N given a total number of points Ntot
    """
    def __init__(self, p0=0.05, gamma=None):
        self.p0 = p0
        self.gamma = gamma

    def validate_input(self, t, x, sigma):
        """Check that input is valid"""
        pass

    def fitness(**kwargs):
        raise NotImplementedError()

    def prior(self, N, Ntot):
        if self.gamma is None:
            return self.p0_prior(N, Ntot)
        else:
            return self.gamma_prior(N, Ntot)

    def p0_prior(self, N, Ntot):
        # eq. 21 from Scargle 2012
        return 4 - np.log(73.53 * self.p0 * (N ** -0.478))

    def gamma_prior(self, N, Ntot):
        """Basic prior, parametrized by gamma (eq. 3 in Scargle 2012)"""
        if self.gamma == 1:
            return 0
        else:
            return (np.log(1 - self.gamma)
                    - np.log(1 - self.gamma ** (Ntot + 1))
                    + N * np.log(self.gamma))

    # the fitness_args property will return the list of arguments accepted by
    # the method fitness().  This allows more efficient computation below.
    @property
    def args(self):
        return self.fitness.func_code.co_varnames[1:]

class Events(FitnessFunc):
    """Fitness for binned or unbinned events

    Parameters
    ----------
    p0 : float
        False alarm probability, used to compute the prior on N
        (see eq. 21 of Scargle 2012).  Default prior is for p0 = 0.
    gamma : float or None
        If specified, then use this gamma to compute the general prior form,
        p ~ gamma^N.  If gamma is specified, p0 is ignored.
    """
    def fitness(self, N_k, T_k):
        # eq. 19 from Scargle 2012
        return N_k * (np.log(N_k) - np.log(T_k))

    def prior(self, N, Ntot):
        if self.gamma is not None:
            return self.gamma_prior(N, Ntot)
        else:
            # eq. 21 from Scargle 2012
            return 4 - np.log(73.53 * self.p0 * (N ** -0.478))

class RegularEvents(FitnessFunc):
    """Fitness for regular events

    This is for data which has a fundamental "tick" length, so that all
    measured values are multiples of this tick length.  In each tick, there
    are either zero or one counts.

    Parameters
    ----------
    dt : float
        tick rate for data
    gamma : float
        specifies the prior on the number of bins: p ~ gamma^N
    """

        self.dt = dt
        self.p0 = p0
        self.gamma = gamma

    def validate_input(self, t, x, sigma):
        unique_x = np.unique(x)
        if list(unique_x) not in ([0], [1], [0, 1]):
            raise ValueError("Regular events must have only 0 and 1 in x")

    def fitness(self, T_k, N_k):
        # Eq. 75 of Scargle 2012
        M_k = T_k / self.dt
        N_over_M = N_k * 1. / M_k

        eps = 1E-8
        if np.any(N_over_M > 1 + eps):
            import warnings
            warnings.warn('regular events: N/M > 1.  '
                          'Is the time step correct?')

        one_m_NM = 1 - N_over_M
        N_over_M[N_over_M <= 0] = 1
        one_m_NM[one_m_NM <= 0] = 1

        return N_k * np.log(N_over_M) + (M_k - N_k) * np.log(one_m_NM)

class PointMeasures(FitnessFunc):
    """Fitness for point measures

    Parameters
    ----------
    gamma : float
        specifies the prior on the number of bins: p ~ gamma^N
        if gamma is not specified, then a prior based on simulations
        will be used (see sec 3.3 of Scargle 2012)
    """
    def __init__(self, p0=None, gamma=None):
        self.p0 = p0
        self.gamma = gamma

    def fitness(self, a_k, b_k):
        # eq. 41 from Scargle 2012
        return (b_k * b_k) / (4 * a_k)

    def prior(self, N, Ntot):
        if self.gamma is not None:
            return self.gamma_prior(N, Ntot)
        elif self.p0 is not None:
            return self.p0_prior(N, Ntot)
        else:
            # eq. at end of sec 3.3 in Scargle 2012
            return 1.32 + 0.577 * np.log10(N)

def bayesian_blocks(t, x=None, sigma=None,
                    fitness='events', **kwargs):
    """Bayesian Blocks Implementation

    This is a flexible implementation of the Bayesian Blocks algorithm
    described in Scargle 2012 [1]_

    Parameters
    ----------
    t : array_like
        data times (one dimensional, length N)
    x : array_like (optional)
        data values
    sigma : array_like or float (optional)
        data errors
    fitness : str or object
        the fitness function to use.
        If a string, the following options are supported:

        - 'events' : binned or unbinned event data
            extra arguments are `p0`, which gives the false alarm probability
            to compute the prior, or `gamma` which gives the slope of the
            prior on the number of bins.
        - 'regular_events' : non-overlapping events measured at multiples
            of a fundamental tick rate, `dt`, which must be specified as an
            additional argument.  The prior can be specified through `gamma`,
            which gives the slope of the prior on the number of bins.
        - 'measures' : fitness for a measured sequence with Gaussian errors
            The prior can be specified using `gamma`, which gives the slope
            of the prior on the number of bins.  If `gamma` is not specified,


        Alternatively, the fitness can be a user-specified object of
        type derived from the FitnessFunc class.

    Returns
    -------
    edges : ndarray
        array containing the (N+1) bin edges

    Examples
    --------
    Event data:

    >>> t = np.random.normal(size=100)
    >>> bins = bayesian_blocks(t, fitness='events', p0=0.01)

    Event data with repeats:

    >>> t = np.random.normal(size=100)
    >>> t[80:] = t[:20]
    >>> bins = bayesian_blocks(t, fitness='events', p0=0.01)

    Regular event data:

    >>> dt = 0.01
    >>> t = dt * np.arange(1000)
    >>> x = np.zeros(len(t))
    >>> x[np.random.randint(0, len(t), len(t) / 10)] = 1
    >>> bins = bayesian_blocks(t, fitness='regular_events', dt=dt, gamma=0.9)

    Measured point data with errors:

    >>> t = 100 * np.random.random(100)
    >>> x = np.exp(-0.5 * (t - 50) ** 2)
    >>> sigma = 0.1
    >>> x_obs = np.random.normal(x, sigma)


    References
    ----------
    .. [1] Scargle, J `et al.` (2012)
           http://adsabs.harvard.edu/abs/2012arXiv1207.5578S

    See Also
    --------
    astroML.plotting.hist : histogram plotting function which can make use
                            of bayesian blocks.
    """
    # validate array input
    t = np.asarray(t, dtype=float)
    if x is not None:
        x = np.asarray(x)
    if sigma is not None:
        sigma = np.asarray(sigma)

    # verify the fitness function
    if fitness == 'events':
        if x is not None and np.any(x % 1 > 0):
            raise ValueError("x must be integer counts for fitness='events'")
        fitfunc = Events(**kwargs)
    elif fitness == 'regular_events':
        if x is not None and (np.any(x % 1 > 0) or np.any(x > 1)):
            raise ValueError("x must be 0 or 1 for fitness='regular_events'")
        fitfunc = RegularEvents(**kwargs)
    elif fitness == 'measures':
        if x is None:
            raise ValueError("x must be specified for fitness='measures'")
        fitfunc = PointMeasures(**kwargs)
    else:
        if not (hasattr(fitness, 'args') and
                hasattr(fitness, 'fitness') and
                hasattr(fitness, 'prior')):
            raise ValueError("fitness not understood")
        fitfunc = fitness

    # find unique values of t
    t = np.array(t, dtype=float)
    assert t.ndim == 1
    unq_t, unq_ind, unq_inv = np.unique(t, return_index=True,
                                        return_inverse=True)

    # if x is not specified, x will be counts at each time
    if x is None:
        if sigma is not None:
            raise ValueError("If sigma is specified, x must be specified")

        if len(unq_t) == len(t):
            x = np.ones_like(t)
        else:
            x = np.bincount(unq_inv)

        t = unq_t
        sigma = 1

    # if x is specified, then we need to sort t and x together
    else:
        x = np.asarray(x)

        if len(t) != len(x):
            raise ValueError("Size of t and x does not match")

        if len(unq_t) != len(t):
            raise ValueError("Repeated values in t not supported when "
                             "x is specified")
        t = unq_t
        x = x[unq_ind]

    # verify the given sigma value
    N = t.size
    if sigma is not None:
        sigma = np.asarray(sigma)
        if sigma.shape not in [(), (1,), (N,)]:
            raise ValueError('sigma does not match the shape of x')
    else:
        sigma = 1

    # validate the input
    fitfunc.validate_input(t, x, sigma)

    # compute values needed for computation, below
    if 'a_k' in fitfunc.args:
        ak_raw = np.ones_like(x) / sigma / sigma
    if 'b_k' in fitfunc.args:
        bk_raw = x / sigma / sigma
    if 'c_k' in fitfunc.args:
        ck_raw = x * x / sigma / sigma

    # create length-(N + 1) array of cell edges
    edges = np.concatenate([t[:1],
                            0.5 * (t[1:] + t[:-1]),
                            t[-1:]])
    block_length = t[-1] - edges

    # arrays to store the best configuration
    best = np.zeros(N, dtype=float)
    last = np.zeros(N, dtype=int)

    #-----------------------------------------------------------------
    # Start with first data cell; add one cell at each iteration
    #-----------------------------------------------------------------
    for R in range(N):
        # Compute fit_vec : fitness of putative last block (end at R)
        kwds = {}

        # T_k: width/duration of each block
        if 'T_k' in fitfunc.args:
            kwds['T_k'] = block_length[:R + 1] - block_length[R + 1]

        # N_k: number of elements in each block
        if 'N_k' in fitfunc.args:
            kwds['N_k'] = np.cumsum(x[:R + 1][::-1])[::-1]

        # a_k: eq. 31
        if 'a_k' in fitfunc.args:
            kwds['a_k'] = 0.5 * np.cumsum(ak_raw[:R + 1][::-1])[::-1]

        # b_k: eq. 32
        if 'b_k' in fitfunc.args:
            kwds['b_k'] = - np.cumsum(bk_raw[:R + 1][::-1])[::-1]

        # c_k: eq. 33
        if 'c_k' in fitfunc.args:
            kwds['c_k'] = 0.5 * np.cumsum(ck_raw[:R + 1][::-1])[::-1]

        # evaluate fitness function
        fit_vec = fitfunc.fitness(**kwds)

        A_R = fit_vec - fitfunc.prior(R + 1, N)
        A_R[1:] += best[:R]

        i_max = np.argmax(A_R)
        last[R] = i_max
        best[R] = A_R[i_max]

    #-----------------------------------------------------------------
    # Now find changepoints by iteratively peeling off the last block
    #-----------------------------------------------------------------
    change_points = np.zeros(N, dtype=int)
    i_cp = N
    ind = N
    while True:
        i_cp -= 1
        change_points[i_cp] = ind
        if ind == 0:
            break
        ind = last[ind - 1]
    change_points = change_points[i_cp:]

    return edges[change_points]


In [ ]: