Skip to content

Commit c455289

Browse files
committed
Fix a bunch of deprecations and stylistic problems
1 parent 712716a commit c455289

File tree

8 files changed

+29
-26
lines changed

8 files changed

+29
-26
lines changed

pyfolio/plotting.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -645,7 +645,7 @@ def show_perf_stats(returns, factor_returns=None, positions=None,
645645
perf_stats = pd.DataFrame(perf_stats_all, columns=['Backtest'])
646646

647647
for column in perf_stats.columns:
648-
for stat, value in perf_stats[column].iteritems():
648+
for stat, value in perf_stats[column].items():
649649
if stat in STAT_FUNCS_PCT:
650650
perf_stats.loc[stat, column] = str(np.round(value * 100,
651651
1)) + '%'
@@ -1702,7 +1702,7 @@ def cumulate_returns(x):
17021702
y=monthly_rets.values,
17031703
color='steelblue')
17041704

1705-
locs, labels = plt.xticks()
1705+
_, labels = plt.xticks()
17061706
plt.setp(labels, rotation=90)
17071707

17081708
# only show x-labels on year boundary

pyfolio/round_trips.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def vwap(transaction):
120120
transaction.amount.sum()
121121

122122
out = []
123-
for sym, t in txn.groupby('symbol'):
123+
for _, t in txn.groupby('symbol'):
124124
t = t.sort_index()
125125
t.index.name = 'dt'
126126
t = t.reset_index()
@@ -130,11 +130,11 @@ def vwap(transaction):
130130
1) != t.order_sign).astype(int).cumsum()
131131
t['block_time'] = ((t.dt.sub(t.dt.shift(1))) >
132132
max_delta).astype(int).cumsum()
133-
grouped_price = (t.groupby(('block_dir',
134-
'block_time'))
133+
grouped_price = (t.groupby(['block_dir',
134+
'block_time'])
135135
.apply(vwap))
136136
grouped_price.name = 'price'
137-
grouped_rest = t.groupby(('block_dir', 'block_time')).agg({
137+
grouped_rest = t.groupby(['block_dir', 'block_time']).agg({
138138
'amount': 'sum',
139139
'symbol': 'first',
140140
'dt': 'first'})
@@ -265,7 +265,7 @@ def extract_round_trips(transactions,
265265
minute=0,
266266
second=0))
267267

268-
tmp = roundtrips.join(pv, on='date', lsuffix='_')
268+
tmp = roundtrips.set_index('date').join(pv.set_index('date'), lsuffix='_').reset_index()
269269

270270
roundtrips['returns'] = tmp.pnl / tmp.portfolio_value
271271
roundtrips = roundtrips.drop('date', axis='columns')
@@ -301,15 +301,17 @@ def add_closing_transactions(positions, transactions):
301301
# they don't conflict with other round_trips executed at that time.
302302
end_dt = open_pos.name + pd.Timedelta(seconds=1)
303303

304-
for sym, ending_val in open_pos.iteritems():
304+
for sym, ending_val in open_pos.items():
305305
txn_sym = transactions[transactions.symbol == sym]
306306

307307
ending_amount = txn_sym.amount.sum()
308308

309309
ending_price = ending_val / ending_amount
310-
closing_txn = {'symbol': sym,
311-
'amount': -ending_amount,
312-
'price': ending_price}
310+
closing_txn = OrderedDict([
311+
('amount', -ending_amount),
312+
('price', ending_price),
313+
('symbol', sym),
314+
])
313315

314316
closing_txn = pd.DataFrame(closing_txn, index=[end_dt])
315317
closed_txns = closed_txns.append(closing_txn)

pyfolio/tears.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1066,7 +1066,7 @@ def create_capacity_tear_sheet(returns, positions, transactions,
10661066
llt[llt['max_pct_bar_consumed'] > trade_daily_vol_limit * 100])
10671067

10681068
bt_starting_capital = positions.iloc[0].sum() / (1 + returns.iloc[0])
1069-
fig, ax_capacity_sweep = plt.subplots(figsize=(14, 6))
1069+
_, ax_capacity_sweep = plt.subplots(figsize=(14, 6))
10701070
plotting.plot_capacity_sweep(returns, transactions, market_data,
10711071
bt_starting_capital,
10721072
min_pv=100000,
@@ -1516,7 +1516,7 @@ def create_perf_attrib_tear_sheet(returns,
15161516

15171517
if factor_partitions is not None:
15181518

1519-
for factor_type, partitions in factor_partitions.iteritems():
1519+
for factor_type, partitions in factor_partitions.items():
15201520

15211521
columns_to_select = perf_attrib_data.columns.intersection(
15221522
partitions
@@ -1531,7 +1531,7 @@ def create_perf_attrib_tear_sheet(returns,
15311531
)
15321532
current_section += 1
15331533

1534-
for factor_type, partitions in factor_partitions.iteritems():
1534+
for factor_type, partitions in factor_partitions.items():
15351535

15361536
perf_attrib.plot_risk_exposures(
15371537
portfolio_exposures[portfolio_exposures.columns

pyfolio/tests/test_perf_attrib.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -117,8 +117,8 @@ def test_perf_attrib_simple(self):
117117

118118
expected_perf_attrib_output = pd.DataFrame(
119119
index=dts,
120-
columns=['risk_factor1', 'risk_factor2', 'common_returns',
121-
'specific_returns', 'total_returns'],
120+
columns=['risk_factor1', 'risk_factor2', 'total_returns',
121+
'common_returns', 'specific_returns'],
122122
data={'risk_factor1': [0.025, 0.025],
123123
'risk_factor2': [0.025, 0.025],
124124
'common_returns': [0.05, 0.05],
@@ -157,8 +157,8 @@ def test_perf_attrib_simple(self):
157157

158158
expected_perf_attrib_output = pd.DataFrame(
159159
index=dts,
160-
columns=['risk_factor1', 'risk_factor2', 'common_returns',
161-
'specific_returns', 'total_returns'],
160+
columns=['risk_factor1', 'risk_factor2', 'total_returns',
161+
'common_returns', 'specific_returns'],
162162
data={'risk_factor1': [0.0, 0.0],
163163
'risk_factor2': [0.0, 0.0],
164164
'common_returns': [0.0, 0.0],

pyfolio/tests/test_round_trips.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,12 +152,12 @@ def test_add_closing_trades(self):
152152
[-5, 10, 'A'],
153153
[-1, 10, 'B']],
154154
columns=['amount', 'price', 'symbol'],
155-
index=[dates[:3]])
155+
index=dates[:3])
156156
positions = DataFrame(data=[[20, 10, 0],
157157
[-30, 10, 30],
158158
[-60, 0, 30]],
159159
columns=['A', 'B', 'cash'],
160-
index=[dates[:3]])
160+
index=dates[:3])
161161

162162
expected_ix = dates[:3].append(DatetimeIndex([dates[2] +
163163
Timedelta(seconds=1)]))

pyfolio/tests/test_timeseries.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def test_gen_drawdown_table_relative(
6060
first_expected_recovery, first_net_drawdown,
6161
second_expected_peak, second_expected_valley,
6262
second_expected_recovery, second_net_drawdown
63-
):
63+
):
6464

6565
rets = px.pct_change()
6666

@@ -264,11 +264,12 @@ class TestStats(TestCase):
264264
dt_2 = pd.date_range('2000-1-3', periods=8, freq='D')
265265

266266
@parameterized.expand([
267-
(simple_rets[:5], 2, '[nan, inf, inf, 11.224972160321828, inf]')
267+
(simple_rets[:5], 2, [np.nan, np.inf, np.inf, 11.224972160321, np.inf])
268268
])
269269
def test_sharpe_2(self, returns, rolling_sharpe_window, expected):
270-
self.assertEqual(str(timeseries.rolling_sharpe(
271-
returns, rolling_sharpe_window).values.tolist()), expected)
270+
np.testing.assert_array_almost_equal(timeseries.rolling_sharpe(returns,
271+
rolling_sharpe_window).values,
272+
np.asarray(expected))
272273

273274
@parameterized.expand([
274275
(simple_rets[:5], simple_benchmark, 2, 0)

pyfolio/timeseries.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -952,7 +952,7 @@ def get_top_drawdowns(returns, top=10):
952952
underwater = df_cum / running_max - 1
953953

954954
drawdowns = []
955-
for t in range(top):
955+
for _ in range(top):
956956
peak, valley, recovery = get_max_drawdown_underwater(underwater)
957957
# Slice out draw-down period
958958
if not pd.isnull(recovery):

pyfolio/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -354,7 +354,7 @@ def estimate_intraday(returns, positions, transactions, EOD_hour=23):
354354
# Calculate exposure, then take peak of exposure every day
355355
txn_val['exposure'] = txn_val.abs().sum(axis=1)
356356
condition = (txn_val['exposure'] == txn_val.groupby(
357-
pd.TimeGrouper('24H'))['exposure'].transform(max))
357+
pd.Grouper(freq='24H'))['exposure'].transform(max))
358358
txn_val = txn_val[condition].drop('exposure', axis=1)
359359

360360
# Compute cash delta

0 commit comments

Comments
 (0)