Notebook
In [20]:
from quantopian.pipeline import Pipeline, CustomFactor  
from quantopian.pipeline.data import EquityPricing, factset  
from quantopian.pipeline.domain import JP_EQUITIES  
from quantopian.research import run_pipeline
from quantopian.pipeline.data.factset import Fundamentals
from alphalens.tears import create_information_tear_sheet
from alphalens.tears import create_returns_tear_sheet
from alphalens.utils import get_clean_factor_and_forward_returns
from alphalens.performance import mean_information_coefficient


class Momentum(CustomFactor):
    # Default inputs
    inputs = [EquityPricing.close]

    # Compute momentum
    def compute(self, today, assets, out, close):
        out[:] = close[-1] / close[0]
        
def make_pipeline():    
    momentum_1m = Momentum(window_length=22)
    momentum_6m = Momentum(window_length=132)    
    momentum_2_6m = momentum_6m/momentum_1m

    earningyield = Fundamentals.earn_yld_af.latest
    high_ey = earningyield.percentile_between(50, 100)

    price_book = Fundamentals.pbk_af.latest
    low_pb = price_book.percentile_between(0, 50)

    roic = Fundamentals.roic_af.latest
    high_roic = roic.percentile_between(50, 100)

    market_cap = Fundamentals.mkt_val_public.latest
    small_cap = market_cap.percentile_between(0,50)

    erp5= high_ey & low_pb & high_roic 

    volume = EquityPricing.volume.latest 

    return Pipeline(  
        columns={  
            'Earning Yield': earningyield,  
            'Price to Book': price_book,
            'ROIC': roic,
            'Momentum' : momentum_2_6m,
            'ERP5' : erp5
        }, 
        screen = (volume > 50000) & erp5 & small_cap,
        domain=JP_EQUITIES ,  
    )

factor_data = run_pipeline(make_pipeline(),  start_date='2007-1-1', end_date='2019-1-1')
pricing_data = get_pricing(factor_data.index.levels[1], start_date='2007-1-1', end_date='2020-2-1', fields='open_price')

sector_labels, sector_labels[-1] = dict(Sector.SECTOR_NAMES), "Unknown"

merged_data = get_clean_factor_and_forward_returns(
    factor=factor_data['ERP5'],
    prices=pricing_data,
    quantiles=5,
    groupby=factor_data['ROIC'],
    groupby_labels=sector_labels,
    binning_by_group=True,
    periods=(126,252)
)# week = 5, month = 21, quarter = 63, year = 252

mean_information_coefficient(merged_data).plot(title="IC Decay")

create_full_tear_sheet(merged_data)
create_information_tear_sheet(merged_data, by_group=True, group_neutral=True)
create_returns_tear_sheet(merged_data, by_group=True, group_neutral=True)

print('Number of securities that passed the filter: %d' % len(result))

Pipeline Execution Time: 12.31 Seconds
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-20-54217d112631> in <module>()
     52 
     53 factor_data = run_pipeline(make_pipeline(),  start_date='2007-1-1', end_date='2019-1-1')
---> 54 pricing_data = get_pricing(factor_data.index.levels[1], start_date='2007-1-1', end_date='2020-2-1', fields='open_price')
     55 
     56 sector_labels, sector_labels[-1] = dict(Sector.SECTOR_NAMES), "Unknown"

/build/src/qexec_repo/qexec/research/api.py in get_pricing(symbols, start_date, end_date, symbol_reference_date, frequency, fields, handle_missing, start_offset)
    442             readers=readers,
    443             user_id=user_id,
--> 444             min_asset_data_date=min_date,
    445         )
    446 

/build/src/qexec_repo/qexec/research/_api.py in inner_get_pricing(symbols, start_date, end_date, symbol_reference_date, frequency, fields, handle_missing, start_offset, readers, user_id, min_asset_data_date)
    433 
    434         asset_specific_data = _get_pricing_internal(
--> 435             data_portal, assets, pinched_start, pinched_end, frequency, fields,
    436         )
    437 

/build/src/qexec_repo/qexec/research/trades.py in _get_pricing_internal(data_portal, assets, start_date, end_date, frequency, fields)
    115         start_date=ensure_timestamp(start_date),
    116         end_date=ensure_timestamp(end_date),
--> 117         data_frequency=frequency,
    118     )
    119 

/build/src/qexec_repo/qexec/research/trades.py in ohlcv_panel_from_source(data_portal, sids, start_date, end_date, data_frequency, fields)
    189                 freq,
    190                 _field,
--> 191                 data_frequency,
    192             )
    193             # For date_indexes which extend past the last available dt, reindex

/build/src/qexec_repo/zipline_repo/zipline/data/data_portal.py in get_history_window(self, assets, end_dt, bar_count, frequency, field, data_frequency, ffill)
    965             else:
    966                 df = self._get_history_daily_window(assets, end_dt, bar_count,
--> 967                                                     field, data_frequency)
    968         elif frequency == "1m":
    969             if field == "price":

/build/src/qexec_repo/zipline_repo/zipline/data/data_portal.py in _get_history_daily_window(self, assets, end_dt, bar_count, field_to_use, data_frequency)
    804 
    805         data = self._get_history_daily_window_data(
--> 806             assets, days_for_window, end_dt, field_to_use, data_frequency
    807         )
    808         return pd.DataFrame(

/build/src/qexec_repo/zipline_repo/zipline/data/data_portal.py in _get_history_daily_window_data(self, assets, days_for_window, end_dt, field_to_use, data_frequency)
    827                 field_to_use,
    828                 days_for_window,
--> 829                 extra_slot=False
    830             )
    831         else:

/build/src/qexec_repo/zipline_repo/zipline/data/data_portal.py in _get_daily_window_data(self, assets, field, days_in_window, extra_slot)
   1115                                                 days_in_window,
   1116                                                 field,
-> 1117                                                 extra_slot)
   1118             if extra_slot:
   1119                 return_array[:len(return_array) - 1, :] = data

/build/src/qexec_repo/zipline_repo/zipline/data/history_loader.py in history(self, assets, dts, field, is_perspective_after)
    547                                              dts,
    548                                              field,
--> 549                                              is_perspective_after)
    550         end_ix = self._calendar.searchsorted(dts[-1])
    551 

/build/src/qexec_repo/zipline_repo/zipline/data/history_loader.py in _ensure_sliding_windows(self, assets, dts, field, is_perspective_after)
    429                 adj_dts = prefetch_dts
    430             prefetch_len = len(prefetch_dts)
--> 431             array = self._array(prefetch_dts, needed_assets, field)
    432 
    433             if field == 'sid':

/build/src/qexec_repo/zipline_repo/zipline/data/history_loader.py in _array(self, dts, assets, field)
    571             dts[0],
    572             dts[-1],
--> 573             assets,
    574         )[0]
    575 

/build/src/qexec_repo/zipline_repo/zipline/data/dispatch_bar_reader.py in load_raw_arrays(self, fields, start_dt, end_dt, sids)
    118                                                 end_dt,
    119                                                 sid_groups[t])
--> 120             for t in asset_types if sid_groups[t]}
    121 
    122         results = []

/build/src/qexec_repo/zipline_repo/zipline/data/dispatch_bar_reader.py in <dictcomp>(.0)
    118                                                 end_dt,
    119                                                 sid_groups[t])
--> 120             for t in asset_types if sid_groups[t]}
    121 
    122         results = []

/build/src/qexec_repo/qexec/artifacts/validation.py in load_raw_arrays(self, columns, start_date, end_date, assets)
     42             start_date=start_date,
     43             end_date=end_date,
---> 44             assets=assets,
     45         )
     46 

/build/src/qexec_repo/zipline_repo/zipline/data/bcolz_daily_bars.py in load_raw_arrays(self, columns, start_date, end_date, assets)
    577             start_idx,
    578             end_idx,
--> 579             assets,
    580         )
    581         read_all = len(assets) > self._read_all_threshold

/build/src/qexec_repo/zipline_repo/zipline/data/bcolz_daily_bars.py in _compute_slices(self, start_idx, end_idx, assets)
    567             start_idx,
    568             end_idx,
--> 569             assets,
    570         )
    571 

/build/src/qexec_repo/zipline_repo/zipline/data/_equities.pyx in zipline.data._equities._compute_row_slices (zipline/data/_equities.c:2939)()

/build/src/qexec_repo/zipline_repo/zipline/data/_equities.pyx in zipline.data._equities._compute_row_slices (zipline/data/_equities.c:2759)()

ValueError: At least one valid asset id is required.
In [ ]: