I am trying to replicate the sample shown on https://www.backtrader.com/docu/analyzers/pyfolio.html on my computer so that I can begin to use it on my own strategy knowing it works properly. I am using python 3.6 and have downgraded to the version of PyFolio used for the backtrader integration (0.5.1).
The code I have is identical to the sample in the link:
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import argparse
import datetime
import random
import backtrader as bt
class St(bt.Strategy):
params = (
('printout', False),
('stake', 1000),
)
def __init__(self):
pass
def start(self):
if self.p.printout:
txtfields = list()
txtfields.append('Len')
txtfields.append('Datetime')
txtfields.append('Open')
txtfields.append('High')
txtfields.append('Low')
txtfields.append('Close')
txtfields.append('Volume')
txtfields.append('OpenInterest')
print(','.join(txtfields))
def next(self):
if self.p.printout:
# Print only 1st data ... is just a check that things are running
txtfields = list()
txtfields.append('%04d' % len(self))
txtfields.append(self.data.datetime.datetime(0).isoformat())
txtfields.append('%.2f' % self.data0.open[0])
txtfields.append('%.2f' % self.data0.high[0])
txtfields.append('%.2f' % self.data0.low[0])
txtfields.append('%.2f' % self.data0.close[0])
txtfields.append('%.2f' % self.data0.volume[0])
txtfields.append('%.2f' % self.data0.openinterest[0])
print(','.join(txtfields))
# Data 0
for data in self.datas:
toss = random.randint(1, 10)
curpos = self.getposition(data)
if curpos.size:
if toss > 5:
size = curpos.size // 2
self.sell(data=data, size=size)
if self.p.printout:
print('SELL {} @%{}'.format(size, data.close[0]))
elif toss < 5:
self.buy(data=data, size=self.p.stake)
if self.p.printout:
print('BUY {} @%{}'.format(self.p.stake, data.close[0]))
def runstrat(args=None):
args = parse_args(args)
cerebro = bt.Cerebro()
cerebro.broker.set_cash(args.cash)
dkwargs = dict()
if args.fromdate:
fromdate = datetime.datetime.strptime(args.fromdate, '%Y-%m-%d')
dkwargs['fromdate'] = fromdate
if args.todate:
todate = datetime.datetime.strptime(args.todate, '%Y-%m-%d')
dkwargs['todate'] = todate
data0 = bt.feeds.BacktraderCSVData(dataname=args.data0, **dkwargs)
cerebro.adddata(data0, name='Data0')
data1 = bt.feeds.BacktraderCSVData(dataname=args.data1, **dkwargs)
cerebro.adddata(data1, name='Data1')
data2 = bt.feeds.BacktraderCSVData(dataname=args.data2, **dkwargs)
cerebro.adddata(data2, name='Data2')
cerebro.addstrategy(St, printout=args.printout)
if not args.no_pyfolio:
cerebro.addanalyzer(bt.analyzers.PyFolio, _name='pyfolio')
results = cerebro.run()
if not args.no_pyfolio:
strat = results[0]
pyfoliozer = strat.analyzers.getbyname('pyfolio')
returns, positions, transactions, gross_lev = pyfoliozer.get_pf_items()
if args.printout:
print('-- RETURNS')
print(returns)
print('-- POSITIONS')
print(positions)
print('-- TRANSACTIONS')
print(transactions)
print('-- GROSS LEVERAGE')
print(gross_lev)
import pyfolio as pf
pf.create_full_tear_sheet(
returns,
positions=positions,
transactions=transactions,
gross_lev=gross_lev,
live_start_date='2005-05-01',
round_trips=True)
if args.plot:
cerebro.plot(style=args.plot_style)
def parse_args(args=None):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='Sample for pivot point and cross plotting')
parser.add_argument('--data0', required=False,
default='../../datas/yhoo-1996-2015.txt',
help='Data to be read in')
parser.add_argument('--data1', required=False,
default='../../datas/orcl-1995-2014.txt',
help='Data to be read in')
parser.add_argument('--data2', required=False,
default='../../datas/nvda-1999-2014.txt',
help='Data to be read in')
parser.add_argument('--fromdate', required=False,
default='2005-01-01',
help='Starting date in YYYY-MM-DD format')
parser.add_argument('--todate', required=False,
default='2006-12-31',
help='Ending date in YYYY-MM-DD format')
parser.add_argument('--printout', required=False, action='store_true',
help=('Print data lines'))
parser.add_argument('--cash', required=False, action='store',
type=float, default=50000,
help=('Cash to start with'))
parser.add_argument('--plot', required=False, action='store_true',
help=('Plot the result'))
parser.add_argument('--plot-style', required=False, action='store',
default='bar', choices=['bar', 'candle', 'line'],
help=('Plot style'))
parser.add_argument('--no-pyfolio', required=False, action='store_true',
help=('Do not do pyfolio things'))
import sys
aargs = args if args is not None else sys.argv[1:]
return parser.parse_args(aargs)
runstrat([])
However, my output is different to the one in the link (the values of the statistics, and produces some error messages along the way which I believe is preventing some of the statistics from being calculated):
Entire data start date: 2005-01-03
Entire data end date: 2006-12-29
Out-of-Sample Months: 20
Backtest Months: 3
Performance statistics All history Backtest Out of sample
annual_return -0.01 -0.07 0.00
annual_volatility 0.23 0.57 0.00
sharpe_ratio 0.05 0.12 0.22
calmar_ratio -0.05 -0.30 0.14
stability_of_timeseries -0.19 -0.41 -0.15
max_drawdown -0.24 -0.24 -0.00
omega_ratio 1.08 1.08 1.06
sortino_ratio 0.11 0.28 0.25
skew 15.26 6.14 -9.40
kurtosis 339.59 52.70 145.92
tail_ratio 0.77 0.06 1.05
common_sense_ratio 0.76 0.06 1.05
information_ratio -0.02 0.02 -0.09
alpha 0.01 0.07 -0.00
beta 0.00 0.02 0.00
/Users/maxpaton/anaconda3/lib/python3.6/site-packages/numpy/core/fromnumeric.py:52: FutureWarning: 'argmin' is deprecated, use 'idxmin' instead. The behavior of 'argmin'
will be corrected to return the positional minimum in the future.
Use 'series.values.argmin' to get the position of the minimum now.
return getattr(obj, method)(*args, **kwds)
Worst Drawdown Periods net drawdown in % peak date valley date recovery date duration
0 24.27 2005-01-10 2005-01-25 NaT NaN
1 0.80 2005-01-03 2005-01-04 2005-01-07 5
2 0.00 2005-01-03 2005-01-03 2005-01-03 1
3 0.00 2005-01-03 2005-01-03 2005-01-03 1
4 0.00 2005-01-03 2005-01-03 2005-01-03 1
[-0.029 -0.069]
/Users/maxpaton/anaconda3/lib/python3.6/site-packages/seaborn/categorical.py:454: FutureWarning: remove_na is deprecated and is a private function. Do not use.
box_data = remove_na(group_data)
Stress Events mean min max
Low Volatility Bull Market 0.00% -11.26% 29.32%
Top 10 long positions of all time max
Data2 35.47%
Empty DataFrame
Columns: [max]
Index: []
Top 10 positions of all time max
Data2 35.47%
All positions ever held max
Data2 35.47%
/Users/maxpaton/anaconda3/lib/python3.6/site-packages/matplotlib/axes/_axes.py:6462: UserWarning: The 'normed' kwarg is deprecated, and has been replaced by the 'density' kwarg.
warnings.warn("The 'normed' kwarg is deprecated, and has been "
/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pyfolio/round_trips.py:127: FutureWarning: Interpreting tuple 'by' as a list of keys, rather than a single key. Use 'by=[...]' instead of 'by=(...)'. In the future, a tuple will always mean a single key.
'block_time'))
/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pyfolio/round_trips.py:130: FutureWarning: Interpreting tuple 'by' as a list of keys, rather than a single key. Use 'by=[...]' instead of 'by=(...)'. In the future, a tuple will always mean a single key.
grouped_rest = t.groupby(('block_dir', 'block_time')).agg({
Traceback (most recent call last):
File "BT_pyfolio_eg.py", line 177, in <module>
runstrat([])
File "BT_pyfolio_eg.py", line 120, in runstrat
round_trips=True)
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pyfolio/tears.py", line 192, in create_full_tear_sheet
sector_mappings=sector_mappings)
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pyfolio/plotting.py", line 49, in call_w_context
return func(*args, **kwargs)
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pyfolio/tears.py", line 584, in create_round_trip_tear_sheet
portfolio_value=positions.sum(axis='columns') / (1 + returns)
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pyfolio/round_trips.py", line 261, in extract_round_trips
.join(pv, on='date', lsuffix='_')
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pandas/core/frame.py", line 6326, in join
rsuffix=rsuffix, sort=sort)
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pandas/core/frame.py", line 6341, in _join_compat
suffixes=(lsuffix, rsuffix), sort=sort)
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pandas/core/reshape/merge.py", line 60, in merge
validate=validate)
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pandas/core/reshape/merge.py", line 554, in __init__
self._maybe_coerce_merge_keys()
File "/Users/maxpaton/anaconda3/lib/python3.6/site-packages/pandas/core/reshape/merge.py", line 986, in _maybe_coerce_merge_keys
raise ValueError(msg)
ValueError: You are trying to merge on datetime64[ns, UTC] and datetime64[ns] columns. If you wish to proceed you should use pd.concat
Please could you tell me why this is, and suggest a solution?