Skip to content

Commit

Permalink
Fixed catalyst tests except example tests
Browse files Browse the repository at this point in the history
  • Loading branch information
cfromknecht committed Jun 19, 2017
1 parent 30e0520 commit 99efa7a
Show file tree
Hide file tree
Showing 112 changed files with 705 additions and 641 deletions.
6 changes: 3 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ install:
- if [[ "$TRAVIS_SECURE_ENV_VARS" = "true" && "$TRAVIS_BRANCH" = "master" && "$TRAVIS_PULL_REQUEST" = "false" ]]; then DO_UPLOAD="true"; else DO_UPLOAD="false"; fi
- |
for recipe in $(ls -d conda/*/ | xargs -I {} basename {}); do
if [[ "$recipe" = "zipline" ]]; then continue; fi
if [[ "$recipe" = "catalyst" ]]; then continue; fi
conda build conda/$recipe --python=$CONDA_PY --numpy=$CONDA_NPY --skip-existing -c quantopian -c quantopian/label/ci
RECIPE_OUTPUT=$(conda build conda/$recipe --python=$CONDA_PY --numpy=$CONDA_NPY --output)
Expand All @@ -61,14 +61,14 @@ install:
before_script:
- pip freeze | sort
script:
- flake8 zipline tests
- flake8 catalyst tests
- nosetests --with-coverage
# deactive env to get access to anaconda command
- source deactivate

# unshallow the clone so the conda build can clone it.
- git fetch --unshallow
- exec 3>&1; ZP_OUT=$(conda build conda/zipline --python=$CONDA_PY --numpy=$CONDA_NPY -c quantopian -c quantopian/label/ci | tee >(cat - >&3))
- exec 3>&1; ZP_OUT=$(conda build conda/catalyst --python=$CONDA_PY --numpy=$CONDA_NPY -c quantopian -c quantopian/label/ci | tee >(cat - >&3))
- ZP_OUTPUT=$(echo "$ZP_OUT" | grep "anaconda upload" | awk '{print $NF}')
- if [[ "$DO_UPLOAD" = "true" ]]; then anaconda -t $ANACONDA_TOKEN upload $ZP_OUTPUT -u quantopian --label ci; fi
# reactivate env (necessary for coveralls)
Expand Down
6 changes: 4 additions & 2 deletions catalyst/algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -1115,7 +1115,7 @@ def schedule_function(self,
if calendar is None:
cal = self.trading_calendar
elif calendar is calendars.CRYPTO_ASSETS:
cal = get_environment('OPEN')
cal = get_calendar('OPEN')
elif calendar is calendars.US_EQUITIES:
cal = get_calendar('NYSE')
elif calendar is calendars.US_FUTURES:
Expand All @@ -1124,7 +1124,9 @@ def schedule_function(self,
raise ScheduleFunctionInvalidCalendar(
given_calendar=calendar,
allowed_calendars=(
'[calendars.CRYPTO_ASSETS, calendars.US_EQUITIES, calendars.US_FUTURES]'
'[calendars.CRYPTO_ASSETS, '
'calendars.US_EQUITIES, '
'calendars.US_FUTURES]'
),
)

Expand Down
3 changes: 3 additions & 0 deletions catalyst/data/benchmarks.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ def get_benchmark_returns(symbol, first_date, last_date):
first_date is **not** included because we need the close from day N - 1 to
compute the returns for day N.
"""
if symbol == '^GSPC':
symbol = 'spy'

data = pd_reader.DataReader(
symbol,
'google',
Expand Down
1 change: 1 addition & 0 deletions catalyst/data/bundles/quandl.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,7 @@ def quantopian_quandl_bundle(environ,
'https://s3.amazonaws.com/quantopian-public-zipline-data/quandl'
)


@bundles.register(
'catalyst',
calendar_name='NYSE',
Expand Down
5 changes: 4 additions & 1 deletion catalyst/data/us_equity_pricing.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,10 @@
preprocess,
verify_indices_all_unique,
)
from catalyst.utils.sqlite_utils import group_into_chunks, coerce_string_to_conn
from catalyst.utils.sqlite_utils import (
group_into_chunks,
coerce_string_to_conn,
)
from catalyst.utils.memoize import lazyval
from catalyst.utils.cli import maybe_show_progress
from ._equities import _compute_row_slices, _read_bcolz_data
Expand Down
13 changes: 5 additions & 8 deletions catalyst/examples/momentum_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,26 +10,23 @@
pipeline_output,
record,
schedule_function,
symbol,
)
from catalyst.pipeline import Pipeline
from catalyst.pipeline.factors.crypto import RSI as cRSI
from catalyst.pipeline.factors.equity import RSI as eRSI
from catalyst.pipeline.factors.equity import RSI


def make_pipeline():
crsi = cRSI()
ersi = eRSI()
rsi = RSI()
return Pipeline(
columns={
'longs': crsi.top(3),
'shorts': crsi.bottom(3),
'equity': ersi.top(3),
'longs': rsi.top(3),
'shorts': rsi.bottom(3),
},
)


def rebalance(context, data):

# Pipeline data will be a dataframe with boolean columns named 'longs' and
# 'shorts'.
pipeline_data = context.pipeline_data
Expand Down
5 changes: 3 additions & 2 deletions catalyst/pipeline/api_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,9 @@ def restrict_to_dtype(dtype, message_template):
Terms with a specific dtype.
This is conceptually similar to
catalyst.utils.input_validation.expect_dtypes, but provides more flexibility
for providing error messages that are specifically targeting Term methods.
catalyst.utils.input_validation.expect_dtypes, but provides more
flexibility for providing error messages that are specifically targeting
Term methods.
Parameters
----------
Expand Down
3 changes: 2 additions & 1 deletion catalyst/pipeline/classifiers/classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,8 @@ def to_workspace_value(self, result, assets):
Called with the result of a pipeline. This needs to return an object
which can be put into the workspace to continue doing computations.
This is the inverse of :func:`~catalyst.pipeline.term.Term.postprocess`.
This is the inverse of
:func:`~catalyst.pipeline.term.Term.postprocess`.
"""
if self.dtype == int64_dtype:
return super(Classifier, self).to_workspace_value(result, assets)
Expand Down
8 changes: 4 additions & 4 deletions catalyst/pipeline/data/crypto_pricing.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ class CryptoPricing(DataSet):
"""
Dataset representing daily trading prices and volumes of crypto-assets.
"""
open = Column(float64_dtype)
high = Column(float64_dtype)
low = Column(float64_dtype)
close = Column(float64_dtype)
open = Column(float64_dtype)
high = Column(float64_dtype)
low = Column(float64_dtype)
close = Column(float64_dtype)
volume = Column(float64_dtype)
5 changes: 4 additions & 1 deletion catalyst/pipeline/data/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,10 @@
with_metaclass,
)

from catalyst.pipeline.classifiers import Classifier, Latest as LatestClassifier
from catalyst.pipeline.classifiers import (
Classifier,
Latest as LatestClassifier,
)
from catalyst.pipeline.factors import Factor, Latest as LatestFactor
from catalyst.pipeline.filters import Filter, Latest as LatestFilter
from catalyst.pipeline.sentinels import NotSpecified
Expand Down
8 changes: 4 additions & 4 deletions catalyst/pipeline/data/equity_pricing.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ class USEquityPricing(DataSet):
"""
Dataset representing daily trading prices and volumes.
"""
open = Column(float64_dtype)
high = Column(float64_dtype)
low = Column(float64_dtype)
close = Column(float64_dtype)
open = Column(float64_dtype)
high = Column(float64_dtype)
low = Column(float64_dtype)
close = Column(float64_dtype)
volume = Column(float64_dtype)
3 changes: 3 additions & 0 deletions catalyst/pipeline/factors/equity/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
from .statistical import (
RollingPearson,
RollingLinearRegression,
RollingLinearRegressionOfReturns,
RollingPearsonOfReturns,
RollingSpearman,
RollingSpearmanOfReturns,
)
from .technical import (
Expand Down
11 changes: 6 additions & 5 deletions catalyst/pipeline/factors/factor.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,8 +338,9 @@ class Factor(RestrictedDTypeMixin, ComputableTerm):
>>> f2 = SomeOtherFactor(...) # doctest: +SKIP
>>> average = (f1 + f2) / 2.0 # doctest: +SKIP
Factors can also be converted into :class:`catalyst.pipeline.Filter` objects
via comparison operators: (``<``, ``<=``, ``!=``, ``eq``, ``>``, ``>=``).
Factors can also be converted into :class:`catalyst.pipeline.Filter`
objects via comparison operators:
(``<``, ``<=``, ``!=``, ``eq``, ``>``, ``>=``).
There are many natural operators defined on Factors besides the basic
numerical operators. These include methods identifying missing or
Expand Down Expand Up @@ -699,7 +700,7 @@ def pearsonr(self, target, correlation_length, mask=NotSpecified):
:class:`catalyst.pipeline.factors.RollingPearsonOfReturns`
:meth:`Factor.spearmanr`
"""
from .statistical import RollingPearson
from .equity import RollingPearson
return RollingPearson(
base_factor=self,
target=target,
Expand Down Expand Up @@ -764,7 +765,7 @@ def spearmanr(self, target, correlation_length, mask=NotSpecified):
:class:`catalyst.pipeline.factors.RollingSpearmanOfReturns`
:meth:`Factor.pearsonr`
"""
from .statistical import RollingSpearman
from .equity import RollingSpearman
return RollingSpearman(
base_factor=self,
target=target,
Expand Down Expand Up @@ -826,7 +827,7 @@ def linear_regression(self, target, regression_length, mask=NotSpecified):
:func:`scipy.stats.linregress`
:class:`catalyst.pipeline.factors.RollingLinearRegressionOfReturns`
"""
from .statistical import RollingLinearRegression
from .equity import RollingLinearRegression
return RollingLinearRegression(
dependent=self,
independent=target,
Expand Down
2 changes: 1 addition & 1 deletion catalyst/pipeline/filters/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ class Filter(RestrictedDTypeMixin, ComputableTerm):
construct a Factor computing 10-day VWAP and compare it to the scalar value
20.0::
>>> from catalyst.pipeline.factors import VWAP
>>> from catalyst.pipeline.factors.equity import VWAP
>>> vwap_10 = VWAP(window_length=10)
>>> vwaps_under_20 = (vwap_10 <= 20)
Expand Down
4 changes: 4 additions & 0 deletions catalyst/pipeline/loaders/blaze/estimates.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,10 @@ def load_adjusted_array(self, columns, dates, assets, mask):
mask,
)

@property
def columns(self):
return self._columns


class BlazeNextEstimatesLoader(BlazeEstimatesLoader):
loader = NextEarningsEstimatesLoader
Expand Down
6 changes: 6 additions & 0 deletions catalyst/pipeline/loaders/blaze/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ def __init__(self,
self._data_query_time = data_query_time
self._data_query_tz = data_query_tz

self._columns = next_value_columns

def load_adjusted_array(self, columns, dates, assets, mask):
raw = load_raw_data(assets,
dates,
Expand All @@ -109,3 +111,7 @@ def load_adjusted_array(self, columns, dates, assets, mask):
assets,
mask,
)

@property
def columns(self):
return self._columns
5 changes: 1 addition & 4 deletions catalyst/pipeline/loaders/crypto_pricing_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,7 @@
uint32,
)

from catalyst.data.us_equity_pricing import (
BcolzDailyBarReader,
SQLiteAdjustmentReader,
)
from catalyst.data.us_equity_pricing import BcolzDailyBarReader
from catalyst.lib.adjusted_array import AdjustedArray
from catalyst.errors import NoFurtherDataError
from catalyst.utils.calendars import get_calendar
Expand Down
5 changes: 5 additions & 0 deletions catalyst/pipeline/loaders/earnings_estimates.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ def __init__(self,
}

self.name_map = name_map
self._columns = set(name_map.keys())

@abstractmethod
def get_zeroth_quarter_idx(self, stacked_last_per_qtr):
Expand Down Expand Up @@ -664,6 +665,10 @@ def load_adjusted_array(self, columns, dates, assets, mask):
)
return out

@property
def columns(self):
return self._columns

def get_last_data_per_qtr(self, assets_with_data, columns, dates):
"""
Determine the last piece of information we know for each column on each
Expand Down
2 changes: 1 addition & 1 deletion catalyst/pipeline/loaders/equity_pricing_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def load_adjusted_array(self, columns, dates, assets, mask):
c.missing_value,
)
return out

@property
def columns(self):
return self._columns
Expand Down
6 changes: 6 additions & 0 deletions catalyst/pipeline/loaders/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,8 @@ def __init__(self,
# Columns to load with self.load_previous_events.
self.previous_value_columns = previous_value_columns

self._columns = events.columns

def split_next_and_previous_event_columns(self, requested_columns):
"""
Split requested columns into columns that should load the next known
Expand Down Expand Up @@ -229,3 +231,7 @@ def load_adjusted_array(self, columns, dates, sids, mask):
self.load_next_events(n, dates, sids, mask),
self.load_previous_events(p, dates, sids, mask),
)

@property
def columns(self):
return self._columns
7 changes: 7 additions & 0 deletions catalyst/pipeline/loaders/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,8 @@ def __init__(self, column, baseline, adjustments=None):
self.dates = baseline.index
self.assets = baseline.columns

self._columns = self.assets

if adjustments is None:
adjustments = DataFrame(
index=DatetimeIndex([]),
Expand Down Expand Up @@ -174,3 +176,8 @@ def load_adjusted_array(self, columns, dates, assets, mask):
missing_value=column.missing_value,
),
}

@property
def columns(self):
return self._columns

16 changes: 16 additions & 0 deletions catalyst/pipeline/loaders/synthetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,9 @@ class PrecomputedLoader(PipelineLoader):
"""
def __init__(self, constants, dates, sids):
loaders = {}
columns = []
for column, const in iteritems(constants):
columns.append(column)
frame = DataFrame(
const,
index=dates,
Expand All @@ -75,6 +77,7 @@ def __init__(self, constants, dates, sids):
)

self._loaders = loaders
self._columns = set(columns)

def load_adjusted_array(self, columns, dates, assets, mask):
"""
Expand All @@ -91,6 +94,9 @@ def load_adjusted_array(self, columns, dates, assets, mask):
)
return out

def columns(self):
return self._columns


class EyeLoader(PrecomputedLoader):
"""
Expand All @@ -108,12 +114,17 @@ class EyeLoader(PrecomputedLoader):
"""
def __init__(self, columns, dates, sids):
shape = (len(dates), len(sids))
self._columns = columns
super(EyeLoader, self).__init__(
{column: eye(shape, dtype=column.dtype) for column in columns},
dates,
sids,
)

@property
def columns(self):
return self._columns


class SeededRandomLoader(PrecomputedLoader):
"""
Expand All @@ -133,6 +144,7 @@ class SeededRandomLoader(PrecomputedLoader):

def __init__(self, seed, columns, dates, sids):
self._seed = seed
self._columns = columns
super(SeededRandomLoader, self).__init__(
{c: self.values(c.dtype, dates, sids) for c in columns},
dates,
Expand All @@ -151,6 +163,10 @@ def values(self, dtype, dates, sids):
bool_dtype: self._bool_values,
object_dtype: self._object_values,
}[dtype](shape)

@property
def columns(self):
return self._columns

@property
def state(self):
Expand Down
Loading

0 comments on commit 99efa7a

Please sign in to comment.