In [1]:
import numpy as np
import pandas as pd
import Quandl
from matplotlib import pyplot as plt
from sklearn.linear_model import BayesianRidge
%matplotlib inline

In [ ]:


In [2]:
tnxp_df = Quandl.get("YAHOO/TNXP", authtoken='DVhizWXNTePyzzy1eHWR')

In [3]:
tnxp_df


Out[3]:
Open High Low Close Volume Adjusted Close
Date
2012-05-10 1.50 1.50 1.50 1.50 0 30.000000
2012-05-11 1.50 1.50 1.50 1.50 0 30.000000
2012-05-14 1.50 1.50 1.50 1.50 0 30.000000
2012-05-15 1.50 1.50 1.50 1.50 0 30.000000
2012-05-16 1.32 1.32 1.32 1.32 0 26.400000
2012-05-17 0.98 1.09 0.98 1.01 100 20.200001
2012-05-18 1.01 1.01 1.01 1.01 0 20.200001
2012-05-21 1.01 1.01 1.01 1.01 0 20.200001
2012-05-22 1.01 1.01 1.01 1.01 0 20.200001
2012-05-23 1.00 1.05 1.00 1.00 1100 20.000000
2012-05-24 1.00 1.00 1.00 1.00 100 20.000000
2012-05-25 0.85 0.95 0.85 0.95 500 19.000000
2012-05-29 0.95 0.95 0.95 0.95 0 19.000000
2012-05-30 0.95 0.95 0.95 0.95 0 19.000000
2012-05-31 0.95 0.95 0.95 0.95 0 19.000000
2012-06-01 0.84 0.84 0.84 0.84 100 16.799999
2012-06-04 0.84 0.84 0.84 0.84 0 16.799999
2012-06-05 0.84 0.84 0.84 0.84 0 16.799999
2012-06-06 0.85 0.85 0.85 0.85 100 17.000000
2012-06-07 1.00 1.00 1.00 1.00 200 20.000000
2012-06-08 1.05 1.05 0.95 0.95 700 19.000000
2012-06-11 1.00 1.00 1.00 1.00 0 20.000000
2012-06-12 1.00 1.00 1.00 1.00 0 20.000000
2012-06-13 1.00 1.00 1.00 1.00 0 20.000000
2012-06-14 1.00 1.00 1.00 1.00 0 20.000000
2012-06-15 0.95 0.95 0.95 0.95 0 19.000000
2012-06-18 0.95 0.95 0.95 0.95 0 19.000000
2012-06-19 0.95 0.95 0.95 0.95 0 19.000000
2012-06-20 0.83 0.83 0.83 0.83 200 16.600001
2012-06-21 0.83 0.83 0.83 0.83 0 16.600001
... ... ... ... ... ... ...
2015-06-29 9.18 9.19 8.46 8.50 331600 8.500000
2015-06-30 8.50 8.99 8.39 8.97 119500 8.970000
2015-07-01 9.05 9.48 8.73 8.80 173700 8.800000
2015-07-02 8.75 9.22 8.75 9.19 74100 9.190000
2015-07-06 9.25 9.50 8.91 9.14 157100 9.140000
2015-07-07 9.10 9.11 8.86 8.92 106000 8.920000
2015-07-08 8.93 9.43 8.47 8.58 185300 8.580000
2015-07-09 8.70 8.79 8.38 8.58 161000 8.580000
2015-07-10 8.65 8.65 8.11 8.22 204300 8.220000
2015-07-13 8.15 8.59 8.15 8.24 233400 8.240000
2015-07-14 7.60 7.70 7.44 7.47 2501900 7.470000
2015-07-15 7.48 7.84 7.48 7.81 737900 7.810000
2015-07-16 7.80 8.05 7.73 8.00 331300 8.000000
2015-07-17 8.11 8.73 7.94 8.66 511500 8.660000
2015-07-20 8.62 8.88 8.31 8.70 369200 8.700000
2015-07-21 8.65 9.50 8.48 9.38 785400 9.380000
2015-07-22 9.40 9.89 9.00 9.04 531900 9.040000
2015-07-23 9.04 9.28 8.52 8.62 227300 8.620000
2015-07-24 8.56 8.70 8.08 8.14 222300 8.140000
2015-07-27 8.22 8.22 7.87 8.02 292800 8.020000
2015-07-28 8.05 8.95 7.84 8.08 160400 8.080000
2015-07-29 8.08 8.33 7.86 8.08 208800 8.080000
2015-07-30 8.04 8.28 7.90 8.15 178100 8.150000
2015-07-31 8.16 8.56 8.01 8.43 145100 8.430000
2015-08-03 8.38 8.68 8.15 8.31 181900 8.310000
2015-08-04 8.26 8.48 8.17 8.21 119100 8.210000
2015-08-05 8.23 8.48 8.10 8.13 207100 8.130000
2015-08-06 8.13 8.26 7.77 7.81 294600 7.810000
2015-08-07 7.82 7.85 7.13 7.55 529700 7.550000
2015-08-10 7.50 7.54 7.20 7.27 301100 7.270000

817 rows × 6 columns


In [82]:
tnxp_df.describe()


Out[82]:
Open High Low Close Volume Adjusted Close
count 779.000000 779.000000 779.000000 779.000000 779.000000 779.000000
mean 5.910359 6.108280 5.733646 5.914685 89213.093710 9.854442
std 4.576316 4.762763 4.421976 4.575448 185719.184065 4.716885
min 0.240000 0.240000 0.240000 0.240000 0.000000 0.360000
25% 0.905000 0.910000 0.905000 0.910000 100.000000 6.255000
50% 6.170000 6.280000 6.010000 6.150000 34600.000000 8.590000
75% 8.920000 9.255000 8.525000 8.945000 101150.000000 13.580000
max 18.610001 21.000000 17.250000 18.670000 2347000.000000 30.000000

In [83]:
tnxp_df['Close'].plot()


Out[83]:
<matplotlib.axes._subplots.AxesSubplot at 0x10f949790>


In [94]:
tnxp_df.head()


Out[94]:
Open High Low Close Volume Adjusted Close
Date
2012-05-10 1.50 1.50 1.50 1.50 0 30.0
2012-05-11 1.50 1.50 1.50 1.50 0 30.0
2012-05-14 1.50 1.50 1.50 1.50 0 30.0
2012-05-15 1.50 1.50 1.50 1.50 0 30.0
2012-05-16 1.32 1.32 1.32 1.32 0 26.4

In [95]:
close_values = tnxp_df['Close'].values[:-1]

In [96]:
tnxp_df['YesterdayClose'] = np.insert(close_values, 0, 0.0)

In [100]:
tnxp_df = tnxp_df.ix[1:]

In [101]:
tnxp_df.tail()


Out[101]:
Open High Low Close Volume Adjusted Close YesterdayClose
Date
2015-06-10 8.40 8.75 8.18 8.44 414500 8.44 8.40
2015-06-11 8.75 8.75 7.11 8.15 731700 8.15 8.44
2015-06-12 8.60 9.33 8.40 8.82 1011200 8.82 8.15
2015-06-15 8.90 9.34 8.68 9.26 607400 9.26 8.82
2015-06-16 9.19 9.95 8.99 9.90 674400 9.90 9.26

In [102]:
X = tnxp_df[['Open', 'Low', 'Volume', 'YesterdayClose']].values
y = tnxp_df[['High']].values.flatten()
print X.shape, y.shape


(778, 4) (778,)

In [103]:
indices = np.random.permutation(np.arange(0,X.shape[0]-1))

In [105]:
train_count = np.floor(X.shape[0] * 0.80)
print train_count
print np.ceil(X.shape[0] * 0.20)


622.0
156.0

In [106]:
X_train = X[:train_count]
y_train = y[:train_count]
X_test = X[train_count:]
y_test = y[train_count:]

print len(X_train), len(X_test), len(y_train), len(y_test)


622 156 622 156

In [107]:
clf = BayesianRidge(compute_score=True)
clf.fit(X_train, y_train)


Out[107]:
BayesianRidge(alpha_1=1e-06, alpha_2=1e-06, compute_score=True, copy_X=True,
       fit_intercept=True, lambda_1=1e-06, lambda_2=1e-06, n_iter=300,
       normalize=False, tol=0.001, verbose=False)

In [108]:
clf.score(X_test, y_test)


Out[108]:
0.91061550759807308

In [124]:
# 'Open', 'Low', 'Volume', 'YesterdayClose'
# june 16
prediction = clf.predict(np.array([9.19, 8.99, 674400, 9.26])) 
print prediction, prediction - (prediction*0.05)


[ 10.1271046] [ 9.62074937]

In [123]:
# june 17
prediction = clf.predict(np.array([9.97, 9.67, 797492, 9.90]))
print prediction, prediction - (prediction*0.05)


[ 11.03600107] [ 10.48420101]

In [125]:
# june 17
prediction = clf.predict(np.array([9.97, 9.67, 797400, 9.90]))
print prediction, prediction - (prediction*0.05)


[ 11.03590219] [ 10.48410708]

In [ ]:


In [ ]:


In [ ]:



In [72]:
X = tnxp_df[['Open', 'High', 'Low', 'Volume']].values
y = tnxp_df[['Close']].values.flatten()
print X.shape, y.shape


(779, 4) (779,)

In [73]:
indices = np.random.permutation(np.arange(0,X.shape[0]-1))

In [74]:
train_count = np.floor(X.shape[0] * 0.80)
print train_count
print np.ceil(X.shape[0] * 0.20)


623.0
156.0

In [75]:
X_train = X[:train_count]
y_train = y[:train_count]
X_test = X[train_count:]
y_test = y[train_count:]

print len(X_train), len(X_test), len(y_train), len(y_test)


623 156 623 156

In [76]:
clf = BayesianRidge(compute_score=True)
clf.fit(X_train, y_train)


Out[76]:
BayesianRidge(alpha_1=1e-06, alpha_2=1e-06, compute_score=True, copy_X=True,
       fit_intercept=True, lambda_1=1e-06, lambda_2=1e-06, n_iter=300,
       normalize=False, tol=0.001, verbose=False)

In [77]:
clf.score(X_test, y_test)


Out[77]:
0.97043973543978568

In [78]:
clf.predict(np.array([9.97, 10.45, 9.67, 797492]))


Out[78]:
array([ 10.00664805])

In [79]:
clf.predict(np.array([7.18, 7.18, 6.79, 123700]))


Out[79]:
array([ 6.95554819])

In [ ]:


In [8]:
print(__doc__)

import numpy as np
import matplotlib.pyplot as plt
from scipy import stats

from sklearn.linear_model import BayesianRidge, LinearRegression

###############################################################################
# Generating simulated data with Gaussian weigthts
np.random.seed(0)
n_samples, n_features = 100, 100
X = np.random.randn(n_samples, n_features)  # Create Gaussian data
# Create weigts with a precision lambda_ of 4.
lambda_ = 4.
w = np.zeros(n_features)
# Only keep 10 weights of interest
relevant_features = np.random.randint(0, n_features, 10)
for i in relevant_features:
    w[i] = stats.norm.rvs(loc=0, scale=1. / np.sqrt(lambda_))
# Create noise with a precision alpha of 50.
alpha_ = 50.
noise = stats.norm.rvs(loc=0, scale=1. / np.sqrt(alpha_), size=n_samples)
# Create the target
y = np.dot(X, w) + noise

###############################################################################
# Fit the Bayesian Ridge Regression and an OLS for comparison
clf = BayesianRidge(compute_score=True)
clf.fit(X, y)

ols = LinearRegression()
ols.fit(X, y)

###############################################################################
# Plot true weights, estimated weights and histogram of the weights
plt.figure(figsize=(6, 5))
plt.title("Weights of the model")
plt.plot(clf.coef_, 'b-', label="Bayesian Ridge estimate")
plt.plot(w, 'g-', label="Ground truth")
plt.plot(ols.coef_, 'r--', label="OLS estimate")
plt.xlabel("Features")
plt.ylabel("Values of the weights")
plt.legend(loc="best", prop=dict(size=12))

plt.figure(figsize=(6, 5))
plt.title("Histogram of the weights")
plt.hist(clf.coef_, bins=n_features, log=True)
plt.plot(clf.coef_[relevant_features], 5 * np.ones(len(relevant_features)),
         'ro', label="Relevant features")
plt.ylabel("Features")
plt.xlabel("Values of the weights")
plt.legend(loc="lower left")

plt.figure(figsize=(6, 5))
plt.title("Marginal log-likelihood")
plt.plot(clf.scores_)
plt.ylabel("Score")
plt.xlabel("Iterations")
plt.show()


Automatically created module for IPython interactive environment


In [4]:
#https://www.google.com/finance/getprices?i=1&p=1000d&f=d,o,h,l,c,v&df=cpct&q=LPTH

In [2]:
tickers_df = pd.read_csv('WIKI_tickers.csv')
tickers_df.shape


Out[2]:
(2666, 2)

In [3]:
tickers = tickers_df['quandl code'].values.tolist()
print len(tickers)
print tickers[:5]


2666
['WIKI/ACT', 'WIKI/ADM', 'WIKI/AEE', 'WIKI/ADP', 'WIKI/ADS']

In [6]:
new_tickers = []
for ticker in tickers[5:10]:
    stock = Quandl.get(ticker, authtoken='DVhizWXNTePyzzy1eHWR')
    if stock['Close'].values[-1] < 20.00:
        new_tickers.append(ticker)
        
print len(new_tickers)


1

In [7]:
new_tickers


Out[7]:
['WIKI/AES']

In [8]:
stock = Quandl.get(new_tickers[0], authtoken='DVhizWXNTePyzzy1eHWR')

In [9]:
stock.tail()


Out[9]:
Open High Low Close Volume Ex-Dividend Split Ratio Adj. Open Adj. High Adj. Low Adj. Close Adj. Volume
Date
2015-06-08 13.12 13.160 13.020 13.08 4512754 0 1 13.12 13.160 13.020 13.08 4512754
2015-06-09 13.07 13.510 13.060 13.51 9146188 0 1 13.07 13.510 13.060 13.51 9146188
2015-06-10 13.64 13.650 13.300 13.32 5886451 0 1 13.64 13.650 13.300 13.32 5886451
2015-06-11 13.40 13.580 13.320 13.52 4962357 0 1 13.40 13.580 13.320 13.52 4962357
2015-06-12 13.48 13.515 13.305 13.35 3640202 0 1 13.48 13.515 13.305 13.35 3640202

In [54]:
plt.subplot(2,1,1)
plt.plot(stock.index[-10:],stock['Close'].values[-10:])
plt.subplot(2,1,2)
plt.bar(stock.index[-10:],stock['Volume'].values[-10:])
plt.xticks(rotation=45)
plt.show()



In [29]:
plt.plot(stock.index[-10:],stock['Close'].values[-10:])
plt.show()



In [30]:
plt.bar(stock.index[-10:], stock['Volume'].values[-10:])


Out[30]:
<Container object of 10 artists>


In [63]:
#!/usr/bin/env python
import matplotlib.pyplot as plt
from matplotlib.dates import DateFormatter, WeekdayLocator, DayLocator, MONDAY
from matplotlib.finance import quotes_historical_yahoo_ohlc, candlestick_ohlc


# (Year, month, day) tuples suffice as args for quotes_historical_yahoo
date1 = (2015, 5, 15)
date2 = (2015, 6, 15)


mondays = WeekdayLocator(MONDAY)        # major ticks on the mondays
alldays = DayLocator()              # minor ticks on the days
weekFormatter = DateFormatter('%b %d')  # e.g., Jan 12
dayFormatter = DateFormatter('%d')      # e.g., 12

quotes = quotes_historical_yahoo_ohlc('AES', date1, date2)
if len(quotes) == 0:
    raise SystemExit

fig, ax = plt.subplots()
fig.subplots_adjust(bottom=0.2)
ax.xaxis.set_major_locator(mondays)
ax.xaxis.set_minor_locator(alldays)
ax.xaxis.set_major_formatter(weekFormatter)
#ax.xaxis.set_minor_formatter(dayFormatter)

#plot_day_summary(ax, quotes, ticksize=3)
candlestick_ohlc(ax, quotes, width=0.6, colorup='g', colordown='r')

ax.xaxis_date()
ax.autoscale_view()
plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')

plt.show()



In [64]:
import datetime
import numpy as np
import matplotlib.colors as colors
import matplotlib.finance as finance
import matplotlib.dates as mdates
import matplotlib.ticker as mticker
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
import matplotlib.font_manager as font_manager


startdate = datetime.date(2006,1,1)
today = enddate = datetime.date.today()
ticker = 'SPY'


fh = finance.fetch_historical_yahoo(ticker, startdate, enddate)
# a numpy record array with fields: date, open, high, low, close, volume, adj_close)

r = mlab.csv2rec(fh); fh.close()
r.sort()


def moving_average(x, n, type='simple'):
    """
    compute an n period moving average.

    type is 'simple' | 'exponential'

    """
    x = np.asarray(x)
    if type=='simple':
        weights = np.ones(n)
    else:
        weights = np.exp(np.linspace(-1., 0., n))

    weights /= weights.sum()


    a =  np.convolve(x, weights, mode='full')[:len(x)]
    a[:n] = a[n]
    return a

def relative_strength(prices, n=14):
    """
    compute the n period relative strength indicator
    http://stockcharts.com/school/doku.php?id=chart_school:glossary_r#relativestrengthindex
    http://www.investopedia.com/terms/r/rsi.asp
    """

    deltas = np.diff(prices)
    seed = deltas[:n+1]
    up = seed[seed>=0].sum()/n
    down = -seed[seed<0].sum()/n
    rs = up/down
    rsi = np.zeros_like(prices)
    rsi[:n] = 100. - 100./(1.+rs)

    for i in range(n, len(prices)):
        delta = deltas[i-1] # cause the diff is 1 shorter

        if delta>0:
            upval = delta
            downval = 0.
        else:
            upval = 0.
            downval = -delta

        up = (up*(n-1) + upval)/n
        down = (down*(n-1) + downval)/n

        rs = up/down
        rsi[i] = 100. - 100./(1.+rs)

    return rsi

def moving_average_convergence(x, nslow=26, nfast=12):
    """
    compute the MACD (Moving Average Convergence/Divergence) using a fast and slow exponential moving avg'
    return value is emaslow, emafast, macd which are len(x) arrays
    """
    emaslow = moving_average(x, nslow, type='exponential')
    emafast = moving_average(x, nfast, type='exponential')
    return emaslow, emafast, emafast - emaslow


plt.rc('axes', grid=True)
plt.rc('grid', color='0.75', linestyle='-', linewidth=0.5)

textsize = 9
left, width = 0.1, 0.8
rect1 = [left, 0.7, width, 0.2]
rect2 = [left, 0.3, width, 0.4]
rect3 = [left, 0.1, width, 0.2]


fig = plt.figure(facecolor='white')
axescolor  = '#f6f6f6'  # the axes background color

ax1 = fig.add_axes(rect1, axisbg=axescolor)  #left, bottom, width, height
ax2 = fig.add_axes(rect2, axisbg=axescolor, sharex=ax1)
ax2t = ax2.twinx()
ax3  = fig.add_axes(rect3, axisbg=axescolor, sharex=ax1)



### plot the relative strength indicator
prices = r.adj_close
rsi = relative_strength(prices)
fillcolor = 'darkgoldenrod'

ax1.plot(r.date, rsi, color=fillcolor)
ax1.axhline(70, color=fillcolor)
ax1.axhline(30, color=fillcolor)
ax1.fill_between(r.date, rsi, 70, where=(rsi>=70), facecolor=fillcolor, edgecolor=fillcolor)
ax1.fill_between(r.date, rsi, 30, where=(rsi<=30), facecolor=fillcolor, edgecolor=fillcolor)
ax1.text(0.6, 0.9, '>70 = overbought', va='top', transform=ax1.transAxes, fontsize=textsize)
ax1.text(0.6, 0.1, '<30 = oversold', transform=ax1.transAxes, fontsize=textsize)
ax1.set_ylim(0, 100)
ax1.set_yticks([30,70])
ax1.text(0.025, 0.95, 'RSI (14)', va='top', transform=ax1.transAxes, fontsize=textsize)
ax1.set_title('%s daily'%ticker)

### plot the price and volume data
dx = r.adj_close - r.close
low = r.low + dx
high = r.high + dx

deltas = np.zeros_like(prices)
deltas[1:] = np.diff(prices)
up = deltas>0
ax2.vlines(r.date[up], low[up], high[up], color='black', label='_nolegend_')
ax2.vlines(r.date[~up], low[~up], high[~up], color='black', label='_nolegend_')
ma20 = moving_average(prices, 20, type='simple')
ma200 = moving_average(prices, 200, type='simple')

linema20, = ax2.plot(r.date, ma20, color='blue', lw=2, label='MA (20)')
linema200, = ax2.plot(r.date, ma200, color='red', lw=2, label='MA (200)')


last = r[-1]
s = '%s O:%1.2f H:%1.2f L:%1.2f C:%1.2f, V:%1.1fM Chg:%+1.2f' % (
    today.strftime('%d-%b-%Y'),
    last.open, last.high,
    last.low, last.close,
    last.volume*1e-6,
    last.close-last.open )
t4 = ax2.text(0.3, 0.9, s, transform=ax2.transAxes, fontsize=textsize)

props = font_manager.FontProperties(size=10)
leg = ax2.legend(loc='center left', shadow=True, fancybox=True, prop=props)
leg.get_frame().set_alpha(0.5)


volume = (r.close*r.volume)/1e6  # dollar volume in millions
vmax = volume.max()
poly = ax2t.fill_between(r.date, volume, 0, label='Volume', facecolor=fillcolor, edgecolor=fillcolor)
ax2t.set_ylim(0, 5*vmax)
ax2t.set_yticks([])


### compute the MACD indicator
fillcolor = 'darkslategrey'
nslow = 26
nfast = 12
nema = 9
emaslow, emafast, macd = moving_average_convergence(prices, nslow=nslow, nfast=nfast)
ema9 = moving_average(macd, nema, type='exponential')
ax3.plot(r.date, macd, color='black', lw=2)
ax3.plot(r.date, ema9, color='blue', lw=1)
ax3.fill_between(r.date, macd-ema9, 0, alpha=0.5, facecolor=fillcolor, edgecolor=fillcolor)


ax3.text(0.025, 0.95, 'MACD (%d, %d, %d)'%(nfast, nslow, nema), va='top',
         transform=ax3.transAxes, fontsize=textsize)

#ax3.set_yticks([])
# turn off upper axis tick labels, rotate the lower ones, etc
for ax in ax1, ax2, ax2t, ax3:
    if ax!=ax3:
        for label in ax.get_xticklabels():
            label.set_visible(False)
    else:
        for label in ax.get_xticklabels():
            label.set_rotation(30)
            label.set_horizontalalignment('right')

    ax.fmt_xdata = mdates.DateFormatter('%Y-%m-%d')



class MyLocator(mticker.MaxNLocator):
    def __init__(self, *args, **kwargs):
        mticker.MaxNLocator.__init__(self, *args, **kwargs)

    def __call__(self, *args, **kwargs):
        return mticker.MaxNLocator.__call__(self, *args, **kwargs)

# at most 5 ticks, pruning the upper and lower so they don't overlap
# with other ticks
#ax2.yaxis.set_major_locator(mticker.MaxNLocator(5, prune='both'))
#ax3.yaxis.set_major_locator(mticker.MaxNLocator(5, prune='both'))

ax2.yaxis.set_major_locator(MyLocator(5, prune='both'))
ax3.yaxis.set_major_locator(MyLocator(5, prune='both'))

plt.show()



In [66]:
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.finance import candlestick
from matplotlib.finance import volume_overlay3
from matplotlib.dates import num2date
from matplotlib.dates import date2num
import matplotlib.mlab as mlab
import datetime

datafile = 'data.csv'
r = mlab.csv2rec(datafile, delimiter=';')

# the dates in my example file-set are very sparse (and annoying) change the dates to be sequential
for i in range(len(r)-1):
    r['date'][i+1] = r['date'][i] + datetime.timedelta(days=1)

candlesticks = zip(date2num(r['date']),r['open'],r['close'],r['max'],r['min'],r['volume'])

fig = plt.figure()
ax = fig.add_subplot(1,1,1)

ax.set_ylabel('Quote ($)', size=20)
candlestick(ax, candlesticks,width=1,colorup='g', colordown='r')

# shift y-limits of the candlestick plot so that there is space at the bottom for the volume bar chart
pad = 0.25
yl = ax.get_ylim()
ax.set_ylim(yl[0]-(yl[1]-yl[0])*pad,yl[1])

# create the second axis for the volume bar-plot
ax2 = ax.twinx()


# set the position of ax2 so that it is short (y2=0.32) but otherwise the same size as ax
ax2.set_position(matplotlib.transforms.Bbox([[0.125,0.1],[0.9,0.32]]))

# get data from candlesticks for a bar plot
dates = [x[0] for x in candlesticks]
dates = np.asarray(dates)
volume = [x[5] for x in candlesticks]
volume = np.asarray(volume)

# make bar plots and color differently depending on up/down for the day
pos = r['open']-r['close']<0
neg = r['open']-r['close']>0
ax2.bar(dates[pos],volume[pos],color='green',width=1,align='center')
ax2.bar(dates[neg],volume[neg],color='red',width=1,align='center')

#scale the x-axis tight
ax2.set_xlim(min(dates),max(dates))
# the y-ticks for the bar were too dense, keep only every third one
yticks = ax2.get_yticks()
ax2.set_yticks(yticks[::3])

ax2.yaxis.set_label_position("right")
ax2.set_ylabel('Volume', size=20)

# format the x-ticks with a human-readable date. 
xt = ax.get_xticks()
new_xticks = [datetime.date.isoformat(num2date(d)) for d in xt]
ax.set_xticklabels(new_xticks,rotation=45, horizontalalignment='right')

plt.ion()
plt.show()


/usr/local/lib/python2.7/site-packages/matplotlib/finance.py:865: MatplotlibDeprecationWarning: This function has been deprecated in 1.4 in favor of `candlestick_ochl`, which maintains the original argument order, or `candlestick_ohlc`, which uses the open-high-low-close order. This function will be removed in 1.5
  mplDeprecation)