In [1]:
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
import pytz
pytz.common_timezones[-5:]
Out[1]:
In [2]:
tz = pytz.timezone('US/Eastern')
tz
Out[2]:
In [3]:
rng = pd.date_range('3/9/2012 9:30', periods=6, freq='D')
ts = Series(np.random.randn(len(rng)), index=rng)
In [4]:
print(ts.index.tz)
In [5]:
pd.date_range('3/9/2012 9:30', periods=10, freq='D', tz='UTC')
Out[5]:
In [6]:
ts_utc = ts.tz_localize('UTC')
ts_utc
Out[6]:
In [7]:
ts_utc.index
Out[7]:
In [8]:
ts_utc.tz_convert('US/Eastern')
Out[8]:
In [9]:
ts_eastern = ts.tz_localize('US/Eastern')
ts_eastern.tz_convert('UTC')
Out[9]:
In [10]:
ts_eastern.tz_convert('Europe/Berlin')
Out[10]:
In [11]:
ts.index.tz_localize('Asia/Shanghai')
Out[11]:
In [12]:
stamp = pd.Timestamp('2011-03-12 04:00')
stamp_utc = stamp.tz_localize('utc')
stamp_utc.tz_convert('US/Eastern')
Out[12]:
In [13]:
stamp_moscow = pd.Timestamp('2011-03-12 04:00', tz='Europe/Moscow')
stamp_moscow
Out[13]:
In [14]:
stamp_utc.value
Out[14]:
In [15]:
stamp_utc.tz_convert('US/Eastern').value
Out[15]:
In [16]:
from pandas.tseries.offsets import Hour
stamp = pd.Timestamp('2012-03-12 01:30', tz='US/Eastern')
stamp
Out[16]:
In [17]:
stamp + Hour()
Out[17]:
In [18]:
stamp = pd.Timestamp('2012-11-04 00:30', tz='US/Eastern')
stamp
Out[18]:
In [19]:
stamp + 2 * Hour()
Out[19]:
In [20]:
rng = pd.date_range('3/7/2012 9:30', periods=10, freq='B')
ts = Series(np.random.randn(len(rng)), index=rng)
ts
Out[20]:
In [21]:
ts1 = ts[:7].tz_localize('Europe/London')
ts2 = ts1[2:].tz_convert('Europe/Moscow')
result = ts1 + ts2
result.index
Out[21]:
In [22]:
p = pd.Period(2007, freq='A-DEC')
p
Out[22]:
In [23]:
p + 5
Out[23]:
In [24]:
p - 2
Out[24]:
In [25]:
pd.Period('2014', freq='A-DEC') - p
Out[25]:
In [26]:
rng = pd.period_range('1/1/2000', '6/30/2000', freq='M')
rng
Out[26]:
In [27]:
Series(np.random.randn(6), index=rng)
Out[27]:
In [28]:
values = ['2001Q3', '2002Q2', '2003Q1']
index = pd.PeriodIndex(values, freq='Q-DEC')
index
Out[28]:
In [30]:
p = pd.Period('2007', freq='A-DEC')
p.asfreq('M', how='start')
Out[30]:
In [31]:
p.asfreq('M', how='end')
Out[31]:
In [32]:
p = pd.Period('2007', freq='A-JUN')
p.asfreq('M', 'start')
Out[32]:
In [33]:
p.asfreq('M', 'end')
Out[33]:
In [34]:
p = pd.Period('Aug-2007', 'M')
p.asfreq('A-JUN')
Out[34]:
In [35]:
rng = pd.period_range('2006', '2009', freq='A-DEC')
ts = Series(np.random.randn(len(rng)), index=rng)
ts
Out[35]:
In [36]:
ts.asfreq('M', how='start')
Out[36]:
In [37]:
ts.asfreq('B', how='end')
Out[37]:
In [38]:
p = pd.Period('2012Q4', freq='Q-JAN')
p
Out[38]:
In [39]:
p.asfreq('D', 'start')
Out[39]:
In [40]:
p.asfreq('D', 'end')
Out[40]:
In [41]:
p4pm = (p.asfreq('B', 'e') - 1).asfreq('T', 's') + 16 * 60
p4pm
Out[41]:
In [42]:
p4pm.to_timestamp()
Out[42]:
In [43]:
rng = pd.period_range('2011Q3', '2012Q4', freq='Q-JAN')
ts = Series(np.arange(len(rng)), index=rng)
ts
Out[43]:
In [44]:
new_rng = (rng.asfreq('B', 'e') - 1).asfreq('T', 's') + 16 * 60
ts.index = new_rng.to_timestamp()
ts
Out[44]:
In [46]:
rng = pd.date_range('1/1/2000', periods=3, freq='M')
ts = Series(np.random.randn(3), index=rng)
pts = ts.to_period()
ts
Out[46]:
In [47]:
pts
Out[47]:
In [49]:
rng = pd.date_range('1/29/2000', periods=6, freq='D')
ts2 = Series(np.random.randn(6), index=rng)
ts2.to_period('M')
Out[49]:
In [50]:
pts = ts.to_period()
pts
Out[50]:
In [51]:
pts.to_timestamp(how='end')
Out[51]:
In [52]:
data = pd.read_csv('macrodata.csv')
data.year
Out[52]:
In [53]:
data.quarter
Out[53]:
In [54]:
index = pd.PeriodIndex(year=data.year, quarter=data.quarter, freq='Q-DEC')
index
Out[54]:
In [55]:
data.index = index
data.infl
Out[55]:
In [57]:
rng = pd.date_range('1/1/2000', periods=100, freq='D')
ts = Series(np.random.randn(len(rng)), index=rng)
ts.resample('M').mean()
Out[57]:
In [58]:
ts.resample('M', kind='period').mean()
Out[58]:
In [59]:
rng = pd.date_range('1/1/2000', periods=12, freq='T')
ts = Series(np.arange(12), index=rng)
ts
Out[59]:
In [61]:
ts.resample('5min').sum()
Out[61]:
In [62]:
ts.resample('5min', closed='left').sum()
Out[62]:
In [63]:
ts.resample('5min', closed='left', label='left').sum()
Out[63]:
In [64]:
ts.resample('5min', loffset='-1s').sum()
Out[64]:
In [66]:
ts.resample('5min').ohlc()
Out[66]:
In [67]:
rng = pd.date_range('1/1/2000', periods=100, freq='D')
ts = Series(np.arange(100), index=rng)
ts.groupby(lambda x: x.month).mean()
Out[67]:
In [68]:
ts.groupby(lambda x: x.weekday).mean()
Out[68]:
In [70]:
frame = DataFrame(np.random.randn(2, 4),
index=pd.date_range('1/1/2000', periods=2, freq='W-WED'),
columns=['Colorado', 'Texas', 'New York', 'Ohio'])
frame
Out[70]:
In [71]:
df_daily = frame.resample('D')
df_daily
Out[71]:
In [73]:
frame.resample('D').ffill()
Out[73]:
In [74]:
frame.resample('D').ffill(limit=2)
Out[74]:
In [75]:
frame.resample('W-THU').ffill()
Out[75]:
In [76]:
frame = DataFrame(np.random.randn(24, 4),
index=pd.period_range('1-2000', '12-2001', freq='M'),
columns=['Colorado', 'Texas', 'New York', 'Ohio'])
frame[:5]
Out[76]:
In [78]:
annual_frame = frame.resample('A-DEC').mean()
annual_frame
Out[78]:
In [79]:
annual_frame.resample('Q-DEC').ffill()
Out[79]:
In [80]:
annual_frame.resample('Q-DEC', convention='start').ffill()
Out[80]:
In [81]:
annual_frame.resample('Q-MAR').ffill()
Out[81]:
In [83]:
close_px_all = pd.read_csv('stock_px.csv', parse_dates=True, index_col=0)
close_px = close_px_all[['AAPL', 'MSFT', 'XOM']]
close_px = close_px.resample('B').ffill()
close_px.info()
In [84]:
%matplotlib inline
In [85]:
close_px['AAPL'].plot()
Out[85]:
In [86]:
close_px.ix['2009'].plot()
Out[86]:
In [87]:
close_px['AAPL'].ix['01-2011':'03-2011'].plot()
Out[87]:
In [88]:
appl_q = close_px['AAPL'].resample('Q-DEC').ffill()
appl_q.ix['2009':].plot()
Out[88]:
In [89]:
close_px = close_px.asfreq('B').fillna(method='ffill')
In [97]:
close_px.AAPL.plot()
pd.rolling_mean(close_px.AAPL, 250).plot()
Out[97]:
In [92]:
from matplotlib import pyplot as plt
plt.figure()
Out[92]:
In [96]:
appl_std250 = pd.rolling_std(close_px.AAPL, 250, min_periods=10)
appl_std250[5:12]
Out[96]:
In [98]:
appl_std250.plot()
Out[98]:
In [100]:
expanding_mean = lambda x: rolling_mean(x, len(x), min_periods=1)
pd.rolling_mean(close_px, 60).plot(logy=True)
Out[100]:
In [101]:
plt.close('all')
In [102]:
fig, axes = plt.subplots(nrows=2, ncols=1, sharex=True, sharey=True,
figsize=(12, 7))
aapl_px = close_px.AAPL['2005':'2009']
ma60 = pd.rolling_mean(aapl_px, 60, min_periods=50)
ewma60 = pd.ewma(aapl_px, span=60)
aapl_px.plot(style='k-', ax=axes[0])
ma60.plot(style='k--', ax=axes[0])
aapl_px.plot(style='k-', ax=axes[1])
ewma60.plot(style='k--', ax=axes[1])
axes[0].set_title('Simple MA')
axes[1].set_title('Exponentially-weighted MA')
Out[102]:
In [103]:
close_px
spx_px = close_px_all['SPX']
In [106]:
spx_rets = spx_px / spx_px.shift(1) - 1
returns = close_px.pct_change()
corr = pd.rolling_corr(returns.AAPL, spx_rets, 125, min_periods=100)
corr.plot()
Out[106]:
In [107]:
corr = pd.rolling_corr(returns, spx_rets, 125, min_periods=100)
corr.plot()
Out[107]:
In [108]:
from scipy.stats import percentileofscore
score_at_2percent = lambda x: percentileofscore(x, 0.02)
result = pd.rolling_apply(returns.AAPL, 250, score_at_2percent)
result.plot()
Out[108]:
In [110]:
rng = pd.date_range('1/1/2000', periods=10000000, freq='10ms')
ts = Series(np.random.randn(len(rng)), index=rng)
ts
Out[110]:
In [111]:
ts.resample('15min', how='ohlc').info()
In [113]:
%timeit ts.resample('15min').ohlc()
In [114]:
rng = pd.date_range('1/1/2000', periods=10000000, freq='1s')
ts = Series(np.random.randn(len(rng)), index=rng)
%timeit ts.resample('15s').ohlc()
In [ ]: