Skip to content

Commit

Permalink
Fix a bunch of deprecations and stylistic problems
Browse files Browse the repository at this point in the history
  • Loading branch information
twiecki committed Aug 15, 2019
1 parent 712716a commit c455289
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 26 deletions.
4 changes: 2 additions & 2 deletions pyfolio/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -645,7 +645,7 @@ def show_perf_stats(returns, factor_returns=None, positions=None,
perf_stats = pd.DataFrame(perf_stats_all, columns=['Backtest'])

for column in perf_stats.columns:
for stat, value in perf_stats[column].iteritems():
for stat, value in perf_stats[column].items():
if stat in STAT_FUNCS_PCT:
perf_stats.loc[stat, column] = str(np.round(value * 100,
1)) + '%'
Expand Down Expand Up @@ -1702,7 +1702,7 @@ def cumulate_returns(x):
y=monthly_rets.values,
color='steelblue')

locs, labels = plt.xticks()
_, labels = plt.xticks()
plt.setp(labels, rotation=90)

# only show x-labels on year boundary
Expand Down
20 changes: 11 additions & 9 deletions pyfolio/round_trips.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def vwap(transaction):
transaction.amount.sum()

out = []
for sym, t in txn.groupby('symbol'):
for _, t in txn.groupby('symbol'):
t = t.sort_index()
t.index.name = 'dt'
t = t.reset_index()
Expand All @@ -130,11 +130,11 @@ def vwap(transaction):
1) != t.order_sign).astype(int).cumsum()
t['block_time'] = ((t.dt.sub(t.dt.shift(1))) >
max_delta).astype(int).cumsum()
grouped_price = (t.groupby(('block_dir',
'block_time'))
grouped_price = (t.groupby(['block_dir',
'block_time'])
.apply(vwap))
grouped_price.name = 'price'
grouped_rest = t.groupby(('block_dir', 'block_time')).agg({
grouped_rest = t.groupby(['block_dir', 'block_time']).agg({
'amount': 'sum',
'symbol': 'first',
'dt': 'first'})
Expand Down Expand Up @@ -265,7 +265,7 @@ def extract_round_trips(transactions,
minute=0,
second=0))

tmp = roundtrips.join(pv, on='date', lsuffix='_')
tmp = roundtrips.set_index('date').join(pv.set_index('date'), lsuffix='_').reset_index()

roundtrips['returns'] = tmp.pnl / tmp.portfolio_value
roundtrips = roundtrips.drop('date', axis='columns')
Expand Down Expand Up @@ -301,15 +301,17 @@ def add_closing_transactions(positions, transactions):
# they don't conflict with other round_trips executed at that time.
end_dt = open_pos.name + pd.Timedelta(seconds=1)

for sym, ending_val in open_pos.iteritems():
for sym, ending_val in open_pos.items():
txn_sym = transactions[transactions.symbol == sym]

ending_amount = txn_sym.amount.sum()

ending_price = ending_val / ending_amount
closing_txn = {'symbol': sym,
'amount': -ending_amount,
'price': ending_price}
closing_txn = OrderedDict([
('amount', -ending_amount),
('price', ending_price),
('symbol', sym),
])

closing_txn = pd.DataFrame(closing_txn, index=[end_dt])
closed_txns = closed_txns.append(closing_txn)
Expand Down
6 changes: 3 additions & 3 deletions pyfolio/tears.py
Original file line number Diff line number Diff line change
Expand Up @@ -1066,7 +1066,7 @@ def create_capacity_tear_sheet(returns, positions, transactions,
llt[llt['max_pct_bar_consumed'] > trade_daily_vol_limit * 100])

bt_starting_capital = positions.iloc[0].sum() / (1 + returns.iloc[0])
fig, ax_capacity_sweep = plt.subplots(figsize=(14, 6))
_, ax_capacity_sweep = plt.subplots(figsize=(14, 6))
plotting.plot_capacity_sweep(returns, transactions, market_data,
bt_starting_capital,
min_pv=100000,
Expand Down Expand Up @@ -1516,7 +1516,7 @@ def create_perf_attrib_tear_sheet(returns,

if factor_partitions is not None:

for factor_type, partitions in factor_partitions.iteritems():
for factor_type, partitions in factor_partitions.items():

columns_to_select = perf_attrib_data.columns.intersection(
partitions
Expand All @@ -1531,7 +1531,7 @@ def create_perf_attrib_tear_sheet(returns,
)
current_section += 1

for factor_type, partitions in factor_partitions.iteritems():
for factor_type, partitions in factor_partitions.items():

perf_attrib.plot_risk_exposures(
portfolio_exposures[portfolio_exposures.columns
Expand Down
8 changes: 4 additions & 4 deletions pyfolio/tests/test_perf_attrib.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,8 @@ def test_perf_attrib_simple(self):

expected_perf_attrib_output = pd.DataFrame(
index=dts,
columns=['risk_factor1', 'risk_factor2', 'common_returns',
'specific_returns', 'total_returns'],
columns=['risk_factor1', 'risk_factor2', 'total_returns',
'common_returns', 'specific_returns'],
data={'risk_factor1': [0.025, 0.025],
'risk_factor2': [0.025, 0.025],
'common_returns': [0.05, 0.05],
Expand Down Expand Up @@ -157,8 +157,8 @@ def test_perf_attrib_simple(self):

expected_perf_attrib_output = pd.DataFrame(
index=dts,
columns=['risk_factor1', 'risk_factor2', 'common_returns',
'specific_returns', 'total_returns'],
columns=['risk_factor1', 'risk_factor2', 'total_returns',
'common_returns', 'specific_returns'],
data={'risk_factor1': [0.0, 0.0],
'risk_factor2': [0.0, 0.0],
'common_returns': [0.0, 0.0],
Expand Down
4 changes: 2 additions & 2 deletions pyfolio/tests/test_round_trips.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,12 +152,12 @@ def test_add_closing_trades(self):
[-5, 10, 'A'],
[-1, 10, 'B']],
columns=['amount', 'price', 'symbol'],
index=[dates[:3]])
index=dates[:3])
positions = DataFrame(data=[[20, 10, 0],
[-30, 10, 30],
[-60, 0, 30]],
columns=['A', 'B', 'cash'],
index=[dates[:3]])
index=dates[:3])

expected_ix = dates[:3].append(DatetimeIndex([dates[2] +
Timedelta(seconds=1)]))
Expand Down
9 changes: 5 additions & 4 deletions pyfolio/tests/test_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def test_gen_drawdown_table_relative(
first_expected_recovery, first_net_drawdown,
second_expected_peak, second_expected_valley,
second_expected_recovery, second_net_drawdown
):
):

rets = px.pct_change()

Expand Down Expand Up @@ -264,11 +264,12 @@ class TestStats(TestCase):
dt_2 = pd.date_range('2000-1-3', periods=8, freq='D')

@parameterized.expand([
(simple_rets[:5], 2, '[nan, inf, inf, 11.224972160321828, inf]')
(simple_rets[:5], 2, [np.nan, np.inf, np.inf, 11.224972160321, np.inf])
])
def test_sharpe_2(self, returns, rolling_sharpe_window, expected):
self.assertEqual(str(timeseries.rolling_sharpe(
returns, rolling_sharpe_window).values.tolist()), expected)
np.testing.assert_array_almost_equal(timeseries.rolling_sharpe(returns,
rolling_sharpe_window).values,
np.asarray(expected))

@parameterized.expand([
(simple_rets[:5], simple_benchmark, 2, 0)
Expand Down
2 changes: 1 addition & 1 deletion pyfolio/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -952,7 +952,7 @@ def get_top_drawdowns(returns, top=10):
underwater = df_cum / running_max - 1

drawdowns = []
for t in range(top):
for _ in range(top):
peak, valley, recovery = get_max_drawdown_underwater(underwater)
# Slice out draw-down period
if not pd.isnull(recovery):
Expand Down
2 changes: 1 addition & 1 deletion pyfolio/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ def estimate_intraday(returns, positions, transactions, EOD_hour=23):
# Calculate exposure, then take peak of exposure every day
txn_val['exposure'] = txn_val.abs().sum(axis=1)
condition = (txn_val['exposure'] == txn_val.groupby(
pd.TimeGrouper('24H'))['exposure'].transform(max))
pd.Grouper(freq='24H'))['exposure'].transform(max))
txn_val = txn_val[condition].drop('exposure', axis=1)

# Compute cash delta
Expand Down

0 comments on commit c455289

Please sign in to comment.