Skip to content

Commit

Permalink
Distinguish between copy and replace
Browse files Browse the repository at this point in the history
  • Loading branch information
polakowo committed Aug 27, 2021
1 parent 436d07a commit d9fe6a3
Show file tree
Hide file tree
Showing 27 changed files with 132 additions and 108 deletions.
2 changes: 1 addition & 1 deletion apps/candlestick-patterns/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -1307,7 +1307,7 @@ def _simulate_portfolio(size, init_cash='autoalign'):
# Align initial cash across main and random strategies
aligned_portfolio = _simulate_portfolio(np.hstack((main_size[:, None], rand_size)))
# Fixate initial cash for indexing
aligned_portfolio = aligned_portfolio.copy(
aligned_portfolio = aligned_portfolio.replace(
init_cash=aligned_portfolio.init_cash
)
# Separate portfolios
Expand Down
21 changes: 12 additions & 9 deletions tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,14 +347,14 @@ def test_eq(self):
df2_wrapper = array_wrapper.ArrayWrapper.from_obj(df2)
df4_wrapper = array_wrapper.ArrayWrapper.from_obj(df4)

sr2_wrapper_co = sr2_wrapper.copy(column_only_select=True)
df4_wrapper_co = df4_wrapper.copy(column_only_select=True)
sr2_wrapper_co = sr2_wrapper.replace(column_only_select=True)
df4_wrapper_co = df4_wrapper.replace(column_only_select=True)

sr2_grouped_wrapper = sr2_wrapper.copy(group_by=np.array(['g1']), group_select=True)
df4_grouped_wrapper = df4_wrapper.copy(group_by=np.array(['g1', 'g1', 'g2']), group_select=True)
sr2_grouped_wrapper = sr2_wrapper.replace(group_by=np.array(['g1']), group_select=True)
df4_grouped_wrapper = df4_wrapper.replace(group_by=np.array(['g1', 'g1', 'g2']), group_select=True)

sr2_grouped_wrapper_co = sr2_grouped_wrapper.copy(column_only_select=True, group_select=True)
df4_grouped_wrapper_co = df4_grouped_wrapper.copy(column_only_select=True, group_select=True)
sr2_grouped_wrapper_co = sr2_grouped_wrapper.replace(column_only_select=True, group_select=True)
df4_grouped_wrapper_co = df4_grouped_wrapper.replace(column_only_select=True, group_select=True)


class TestArrayWrapper:
Expand Down Expand Up @@ -674,13 +674,13 @@ def test_shape_2d(self):

def test_freq(self):
assert sr2_wrapper.freq is None
assert sr2_wrapper.copy(freq='1D').freq == day_dt
assert sr2_wrapper.copy(index=pd.Index([
assert sr2_wrapper.replace(freq='1D').freq == day_dt
assert sr2_wrapper.replace(index=pd.Index([
datetime(2020, 1, 1),
datetime(2020, 1, 2),
datetime(2020, 1, 3)
], freq='1D')).freq == day_dt
assert sr2_wrapper.copy(index=pd.Index([
assert sr2_wrapper.replace(index=pd.Index([
datetime(2020, 1, 1),
datetime(2020, 1, 2),
datetime(2020, 1, 3)
Expand Down Expand Up @@ -2636,6 +2636,9 @@ def combine_func_nb(x, y, a):
# ############# accessors.py ############# #

class TestAccessors:
def test_indexing(self):
pd.testing.assert_series_equal(df4.vbt['a6'].obj, df4['a6'].vbt.obj)

def test_freq(self):
ts = pd.Series([1, 2, 3], index=pd.DatetimeIndex([
datetime(2018, 1, 1),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -1025,7 +1025,7 @@ def test_stats(self):
data.stats(column='feat0')
)
pd.testing.assert_series_equal(
data.copy(wrapper=data.wrapper.copy(group_by=True)).stats(),
data.replace(wrapper=data.wrapper.replace(group_by=True)).stats(),
data.stats(group_by=True)
)
stats_df = data.stats(agg_func=None)
Expand Down
3 changes: 3 additions & 0 deletions tests/test_generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ def teardown_module():


class TestAccessors:
def test_indexing(self):
assert df.vbt['a'].min() == df['a'].vbt.min()

def test_set_by_mask(self):
np.testing.assert_array_equal(
nb.set_by_mask_1d_nb(
Expand Down
1 change: 1 addition & 0 deletions tests/test_indicators.py
Original file line number Diff line number Diff line change
Expand Up @@ -2209,6 +2209,7 @@ def test_dir(self):
'post_resolve_attr',
'pre_resolve_attr',
'regroup',
'replace',
'resolve_attr',
'resolve_self',
'run',
Expand Down
2 changes: 1 addition & 1 deletion tests/test_portfolio.py
Original file line number Diff line number Diff line change
Expand Up @@ -7481,7 +7481,7 @@ def test_stats(self):
pf.stats(column='second', group_by=group_by)
)
pd.testing.assert_series_equal(
pf.copy(wrapper=pf.wrapper.copy(freq='10d')).stats(),
pf.replace(wrapper=pf.wrapper.replace(freq='10d')).stats(),
pf.stats(settings=dict(freq='10d'))
)
stats_df = pf.stats(agg_func=None)
Expand Down
30 changes: 15 additions & 15 deletions tests/test_records.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,11 @@
ndim=2,
freq='1 days'
)
wrapper_grouped = wrapper.copy(group_by=group_by)
wrapper_grouped = wrapper.replace(group_by=group_by)

records = vbt.records.Records(wrapper, records_arr)
records_grouped = vbt.records.Records(wrapper_grouped, records_arr)
records_nosort = records.copy(records_arr=records_nosort_arr)
records_nosort = records.replace(records_arr=records_nosort_arr)
records_nosort_grouped = vbt.records.Records(wrapper_grouped, records_nosort_arr)


Expand Down Expand Up @@ -186,8 +186,8 @@ def test_is_sorted(self):
mapped_array_nosort = records_nosort.map_field('some_field1')
mapped_array_nosort_grouped = records_nosort_grouped.map_field('some_field1')
mapping = {x: 'test_' + str(x) for x in pd.unique(mapped_array.values)}
mp_mapped_array = mapped_array.copy(mapping=mapping)
mp_mapped_array_grouped = mapped_array_grouped.copy(mapping=mapping)
mp_mapped_array = mapped_array.replace(mapping=mapping)
mp_mapped_array_grouped = mapped_array_grouped.replace(mapping=mapping)


class TestMappedArray:
Expand Down Expand Up @@ -824,7 +824,7 @@ def test_value_counts(self):
columns=pd.Index(['g1', 'g2'], dtype='object')
)
)
mapped_array2 = mapped_array.copy(mapped_arr=[4, 4, 3, 2, np.nan, 4, 3, 2, 1])
mapped_array2 = mapped_array.replace(mapped_arr=[4, 4, 3, 2, np.nan, 4, 3, 2, 1])
pd.testing.assert_frame_equal(
mapped_array2.value_counts(sort_uniques=False),
pd.DataFrame(
Expand Down Expand Up @@ -1700,10 +1700,10 @@ def test_coverage(self):
)
pd.testing.assert_series_equal(
ranges.coverage(),
ranges.copy(records_arr=np.repeat(ranges.values, 2)).coverage()
ranges.replace(records_arr=np.repeat(ranges.values, 2)).coverage()
)
pd.testing.assert_series_equal(
ranges.copy(records_arr=np.repeat(ranges.values, 2)).coverage(overlapping=True),
ranges.replace(records_arr=np.repeat(ranges.values, 2)).coverage(overlapping=True),
pd.Series(
np.array([1.0, 1.0, 1.0, np.nan]),
index=ts2.columns
Expand All @@ -1717,7 +1717,7 @@ def test_coverage(self):
).rename('coverage')
)
pd.testing.assert_series_equal(
ranges.copy(records_arr=np.repeat(ranges.values, 2)).coverage(overlapping=True, normalize=False),
ranges.replace(records_arr=np.repeat(ranges.values, 2)).coverage(overlapping=True, normalize=False),
pd.Series(
np.array([3.0, 3.0, 3.0, np.nan]),
index=ts2.columns
Expand All @@ -1732,7 +1732,7 @@ def test_coverage(self):
)
pd.testing.assert_series_equal(
ranges_grouped.coverage(),
ranges_grouped.copy(records_arr=np.repeat(ranges_grouped.values, 2)).coverage()
ranges_grouped.replace(records_arr=np.repeat(ranges_grouped.values, 2)).coverage()
)

def test_stats(self):
Expand Down Expand Up @@ -1844,7 +1844,7 @@ def test_ts(self):
drawdowns_grouped['g1'].ts,
ts2[['a', 'b']]
)
assert drawdowns.copy(ts=None)['a'].ts is None
assert drawdowns.replace(ts=None)['a'].ts is None

def test_from_ts(self):
record_arrays_close(
Expand Down Expand Up @@ -2378,7 +2378,7 @@ def test_close(self):
orders_grouped['g1'].close,
close[['a', 'b']]
)
assert orders.copy(close=None)['a'].close is None
assert orders.replace(close=None)['a'].close is None

def test_records_readable(self):
records_readable = orders.records_readable
Expand Down Expand Up @@ -2593,7 +2593,7 @@ def test_close(self):
exit_trades_grouped['g1'].close,
close[['a', 'b']]
)
assert exit_trades.copy(close=None)['a'].close is None
assert exit_trades.replace(close=None)['a'].close is None

def test_records_arr(self):
record_arrays_close(
Expand All @@ -2614,7 +2614,7 @@ def test_records_arr(self):
(12, 2, 1., 7, 8., 0.08, 7, 8., 0., -0.08, -0.01, 0, 0, 9)
], dtype=trade_dt)
)
reversed_col_orders = orders.copy(records_arr=np.concatenate((
reversed_col_orders = orders.replace(records_arr=np.concatenate((
orders.values[orders.values['col'] == 2],
orders.values[orders.values['col'] == 1],
orders.values[orders.values['col'] == 0]
Expand Down Expand Up @@ -3121,7 +3121,7 @@ def test_records_arr(self):
(12, 2, 1.0, 7, 8.0, 0.08, 7, 8.0, 0.0, -0.08, -0.01, 0, 0, 9)
], dtype=trade_dt)
)
reversed_col_orders = orders.copy(records_arr=np.concatenate((
reversed_col_orders = orders.replace(records_arr=np.concatenate((
orders.values[orders.values['col'] == 2],
orders.values[orders.values['col'] == 1],
orders.values[orders.values['col'] == 0]
Expand Down Expand Up @@ -3153,7 +3153,7 @@ def test_records_arr(self):
(9, 2, 1., 7, 8., 0.08, 7, 8., 0., -0.08, -0.01, 0, 0, 9)
], dtype=trade_dt)
)
reversed_col_trades = exit_trades.copy(records_arr=np.concatenate((
reversed_col_trades = exit_trades.replace(records_arr=np.concatenate((
exit_trades.values[exit_trades.values['col'] == 2],
exit_trades.values[exit_trades.values['col'] == 1],
exit_trades.values[exit_trades.values['col'] == 0]
Expand Down
3 changes: 3 additions & 0 deletions tests/test_returns.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,9 @@ def teardown_module():


class TestAccessors:
def test_indexing(self):
assert rets.vbt.returns['a'].total() == rets['a'].vbt.returns.total()

def test_benchmark_rets(self):
ret_acc = rets.vbt.returns(benchmark_rets=benchmark_rets)
pd.testing.assert_frame_equal(ret_acc.benchmark_rets, benchmark_rets)
Expand Down
3 changes: 3 additions & 0 deletions tests/test_signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ def teardown_module():


class TestAccessors:
def test_indexing(self):
assert mask.vbt.signals['a'].total() == mask['a'].vbt.signals.total()

def test_freq(self):
assert mask.vbt.signals.wrapper.freq == day_dt
assert mask['a'].vbt.signals.wrapper.freq == day_dt
Expand Down
4 changes: 2 additions & 2 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -920,8 +920,8 @@ def __init__(self, a, b=2, **kwargs):
self.my_cfg = config.Config(dict(sr=pd.Series([1, 2, 3])))

assert H(1).config == {'a': 1, 'b': 2}
assert H(1).copy(b=3).config == {'a': 1, 'b': 3}
assert H(1).copy(c=4).config == {'a': 1, 'b': 2, 'c': 4}
assert H(1).replace(b=3).config == {'a': 1, 'b': 3}
assert H(1).replace(c=4).config == {'a': 1, 'b': 2, 'c': 4}
assert H(pd.Series([1, 2, 3])) == H(pd.Series([1, 2, 3]))
assert H(pd.Series([1, 2, 3])) != H(pd.Series([1, 2, 4]))
assert H(pd.DataFrame([1, 2, 3])) == H(pd.DataFrame([1, 2, 3]))
Expand Down
12 changes: 6 additions & 6 deletions vectorbt/base/accessors.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,13 +104,13 @@ def __init__(self, obj: tp.SeriesFrame, wrapper: tp.Optional[ArrayWrapper] = Non
if wrapper is None:
wrapper = ArrayWrapper.from_obj(obj, **wrapping_kwargs)
else:
wrapper = wrapper.copy(**wrapping_kwargs)
wrapper = wrapper.replace(**wrapping_kwargs)
Wrapping.__init__(self, wrapper, obj=obj, **kwargs)

def __call__(self: BaseAccessorT, **kwargs) -> BaseAccessorT:
"""Allows passing arguments to the initializer."""

return self.copy(**kwargs)
return self.replace(**kwargs)

@property
def sr_accessor_cls(self) -> tp.Type["BaseSRAccessor"]:
Expand All @@ -127,13 +127,13 @@ def indexing_func(self: BaseAccessorT, pd_indexing_func: tp.PandasIndexingFunc,
new_wrapper, idx_idxs, _, col_idxs = self.wrapper.indexing_func_meta(pd_indexing_func, **kwargs)
new_obj = new_wrapper.wrap(self.to_2d_array()[idx_idxs, :][:, col_idxs], group_by=False)
if checks.is_series(new_obj):
return self.copy(
_class=self.sr_accessor_cls,
return self.replace(
cls_=self.sr_accessor_cls,
obj=new_obj,
wrapper=new_wrapper
)
return self.copy(
_class=self.df_accessor_cls,
return self.replace(
cls_=self.df_accessor_cls,
obj=new_obj,
wrapper=new_wrapper
)
Expand Down
20 changes: 10 additions & 10 deletions vectorbt/base/array_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ class ArrayWrapper(Configured, PandasIndexer):
`**kwargs` are passed to `vectorbt.base.column_grouper.ColumnGrouper`.
!!! note
This class is meant to be immutable. To change any attribute, use `ArrayWrapper.copy`.
This class is meant to be immutable. To change any attribute, use `ArrayWrapper.replace`.
Use methods that begin with `get_` to get group-aware results."""

Expand Down Expand Up @@ -306,7 +306,7 @@ def indexing_func_meta(self: ArrayWrapperT,
ungrouped_group_idxs[group_lens[:-1]] = 1
ungrouped_group_idxs = np.cumsum(ungrouped_group_idxs)

return _self.copy(
return _self.replace(
index=new_index,
columns=ungrouped_columns,
ndim=ungrouped_ndim,
Expand All @@ -316,7 +316,7 @@ def indexing_func_meta(self: ArrayWrapperT,

# Selection based on columns
col_idxs_arr = reshape_fns.to_1d_array(col_idxs)
return _self.copy(
return _self.replace(
index=new_index,
columns=new_columns,
ndim=new_ndim,
Expand All @@ -325,7 +325,7 @@ def indexing_func_meta(self: ArrayWrapperT,
), idx_idxs, col_idxs, col_idxs

# Grouping disabled
return _self.copy(
return _self.replace(
index=new_index,
columns=new_columns,
ndim=new_ndim,
Expand Down Expand Up @@ -485,7 +485,7 @@ def regroup(self: ArrayWrapperT, group_by: tp.GroupByLike, **kwargs) -> ArrayWra
if self.grouper.is_grouped(group_by=group_by):
if not self.grouper.is_group_count_changed(group_by=group_by):
grouped_ndim = self.grouped_ndim
return self.copy(grouped_ndim=grouped_ndim, group_by=group_by, **kwargs)
return self.replace(grouped_ndim=grouped_ndim, group_by=group_by, **kwargs)
return self # important for keeping cache

@cached_method
Expand All @@ -495,7 +495,7 @@ def resolve(self: ArrayWrapperT, group_by: tp.GroupByLike = None, **kwargs) -> A
Replaces columns and other metadata with groups."""
_self = self.regroup(group_by=group_by, **kwargs)
if _self.grouper.is_grouped():
return _self.copy(
return _self.replace(
columns=_self.grouper.get_columns(),
ndim=_self.grouped_ndim,
grouped_ndim=None,
Expand Down Expand Up @@ -696,7 +696,7 @@ def __init__(self, wrapper: ArrayWrapper, **kwargs) -> None:

def indexing_func(self: WrappingT, pd_indexing_func: tp.PandasIndexingFunc, **kwargs) -> WrappingT:
"""Perform indexing on `Wrapping`."""
return self.copy(wrapper=self.wrapper.indexing_func(pd_indexing_func, **kwargs))
return self.replace(wrapper=self.wrapper.indexing_func(pd_indexing_func, **kwargs))

@property
def wrapper(self) -> ArrayWrapper:
Expand All @@ -711,7 +711,7 @@ def regroup(self: WrappingT, group_by: tp.GroupByLike, **kwargs) -> WrappingT:
`**kwargs` will be passed to `ArrayWrapper.regroup`."""
if self.wrapper.grouper.is_grouping_changed(group_by=group_by):
self.wrapper.grouper.check_group_by(group_by=group_by)
return self.copy(wrapper=self.wrapper.regroup(group_by, **kwargs))
return self.replace(wrapper=self.wrapper.regroup(group_by, **kwargs))
return self # important for keeping cache

def resolve_self(self: AttrResolverT,
Expand All @@ -733,13 +733,13 @@ def resolve_self(self: AttrResolverT,
silence_warnings = array_wrapper_cfg['silence_warnings']

if 'freq' in cond_kwargs:
wrapper_copy = self.wrapper.copy(freq=cond_kwargs['freq'])
wrapper_copy = self.wrapper.replace(freq=cond_kwargs['freq'])

if wrapper_copy.freq != self.wrapper.freq:
if not silence_warnings:
warnings.warn(f"Changing the frequency will create a copy of this object. "
f"Consider setting it upon object creation to re-use existing cache.", stacklevel=2)
self_copy = self.copy(wrapper=wrapper_copy)
self_copy = self.replace(wrapper=wrapper_copy)
for alias in self.self_aliases:
if alias not in custom_arg_names:
cond_kwargs[alias] = self_copy
Expand Down
2 changes: 1 addition & 1 deletion vectorbt/base/column_grouper.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ class ColumnGrouper(Configured):
Columns should build groups that are coherent and sorted for using `get_group_lens_nb`.
!!! note
This class is meant to be immutable. To change any attribute, use `ColumnGrouper.copy`."""
This class is meant to be immutable. To change any attribute, use `ColumnGrouper.replace`."""

def __init__(self, columns: tp.Index, group_by: tp.GroupByLike = None, allow_enable: bool = True,
allow_disable: bool = True, allow_modify: bool = True) -> None:
Expand Down
6 changes: 3 additions & 3 deletions vectorbt/data/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ def indexing_func(self: DataT, pd_indexing_func: tp.PandasIndexingFunc, **kwargs
"""Perform indexing on `Data`."""
new_wrapper = pd_indexing_func(self.wrapper)
new_data = {k: pd_indexing_func(v) for k, v in self.data.items()}
return self.copy(
return self.replace(
wrapper=new_wrapper,
data=new_data
)
Expand Down Expand Up @@ -657,8 +657,8 @@ def update(self: DataT, **kwargs) -> DataT:

# Create new instance
new_index = new_data[self.symbols[0]].index
return self.copy(
wrapper=self.wrapper.copy(index=new_index),
return self.replace(
wrapper=self.wrapper.replace(index=new_index),
data=new_data
)

Expand Down
Loading

0 comments on commit d9fe6a3

Please sign in to comment.