Skip to content

Commit

Permalink
data / ib - fetch futures data (#17)
Browse files Browse the repository at this point in the history
* examples. provide better log messages.

* enable IB to fetch futures data.

* add more data to basic report.

* update examples to show futures data fetching

* run on all branches

* update readme

* try this

* try one more thing

* readme

* remove unused data.

* unused

* return calendar that's specific to symbol

* rename

* handle various futures exchanges

* log things via OOO
  • Loading branch information
Weston Platter authored Jul 29, 2020
1 parent ab11a60 commit 602fde5
Show file tree
Hide file tree
Showing 12 changed files with 267 additions and 326 deletions.
6 changes: 4 additions & 2 deletions .github/workflows/conda-run-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@ name: Run tests

on:
push:
branches: [ dev, "issue-12/base"]
branches:
- "issue-**"
pull_request:
branches: [ dev ]
branches:
- "dev"

jobs:
example-1:
Expand Down
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,10 @@ intervals?

## Features

- [ ] Pull in live data from IB
- [x] Pull data from IB
- [ ] Apply various indicators against single or multiple instruments
- [ ] Simple Moving Average Crossover
- [ ] MACD Crossover
- [ ]please open an issue for those you're interested in
s- more coming

### Digging a little deeper

Expand Down
9 changes: 9 additions & 0 deletions examples/example_db_futures.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from loguru import logger
from ta_scanner.data import load_and_cache, IbDataFetcher

ib_data_fetcher = IbDataFetcher()

df = load_and_cache("/MES", ib_data_fetcher, previous_days=20, use_rth=True)
df = load_and_cache("/MNQ", ib_data_fetcher, previous_days=20, use_rth=True)
df = load_and_cache("/ZS", ib_data_fetcher, previous_days=30, use_rth=True)
df = load_and_cache("/MGC", ib_data_fetcher, previous_days=30, use_rth=True)
File renamed without changes.
44 changes: 44 additions & 0 deletions examples/moving_average_crossover_futures.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
from loguru import logger
from ta_scanner.data import load_and_cache, IbDataFetcher
from ta_scanner.indicators import IndicatorSmaCrossover, IndicatorParams
from ta_scanner.signals import Signal
from ta_scanner.filters import FilterCumsum, FilterOptions, FilterNames
from ta_scanner.reports import BasicReport


# get SPY data
ib_data_fetcher = IbDataFetcher()
df = load_and_cache("/ZS", ib_data_fetcher, previous_days=30, use_rth=True)

indicator_sma_cross = IndicatorSmaCrossover()

# store signals in this field
field_name = "moving_avg_cross"

# Moving Average Crossover, 20 vs 50
indicator_params = {
IndicatorParams.fast_sma: 10,
IndicatorParams.slow_sma: 30,
}
# apply indicator to generate signals
indicator_sma_cross.apply(df, field_name, indicator_params)

# initialize filter
sfilter = FilterCumsum()

filter_options = {
FilterOptions.win_points: 5,
FilterOptions.loss_points: 2,
FilterOptions.threshold_intervals: 20,
}

# generate results
results = sfilter.apply(df, field_name, filter_options)

# analyze results
basic_report = BasicReport()
pnl, count, average, median = basic_report.analyze(df, FilterNames.filter_cumsum.value)

logger.info("------------------------")

logger.info(f"Final PnL = {pnl}")
54 changes: 54 additions & 0 deletions examples/moving_average_crossover_range_futures.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
from loguru import logger
import sys

from ta_scanner.data import load_and_cache, IbDataFetcher
from ta_scanner.indicators import IndicatorSmaCrossover, IndicatorParams
from ta_scanner.signals import Signal
from ta_scanner.filters import FilterCumsum, FilterOptions, FilterNames
from ta_scanner.reports import BasicReport


# mute the noisy data debug statements
logger.remove()
logger.add(sys.stderr, level="INFO")

ib_data_fetcher = IbDataFetcher()
df = load_and_cache("/MNQ", ib_data_fetcher, previous_days=30, use_rth=True)

indicator_sma_cross = IndicatorSmaCrossover()

# store signals in this field
field_name = "moving_avg_cross"


def run_cross(fast_sma: int, slow_sma: int):
indicator_params = {
IndicatorParams.fast_sma: fast_sma,
IndicatorParams.slow_sma: slow_sma,
}
# apply indicator to generate signals
indicator_sma_cross.apply(df, field_name, indicator_params)

# initialize filter
sfilter = FilterCumsum()

filter_options = {
FilterOptions.win_points: 4,
FilterOptions.loss_points: 2,
FilterOptions.threshold_intervals: 20,
}

# generate results
results = sfilter.apply(df, field_name, filter_options)

# get aggregate pnl
basic_report = BasicReport()
pnl, count, avg, median = basic_report.analyze(df, FilterNames.filter_cumsum.value)
return pnl, count, avg, median


slow_sma = 100

for fast_sma in range(2, slow_sma):
pnl, count, avg, median = run_cross(fast_sma, slow_sma)
print(f"MA_Crx_{fast_sma}/{slow_sma}, {pnl}, {count}, {avg}, {median}")
File renamed without changes.
File renamed without changes.
254 changes: 0 additions & 254 deletions ta_scanner/SPY.csv

This file was deleted.

176 changes: 127 additions & 49 deletions ta_scanner/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,10 @@
from datetime import datetime, timedelta, timezone
import pytz
from trading_calendars import get_calendar, TradingCalendar
from typing import Optional, Dict

from ta_scanner.models import gen_engine, init_db, Quote

VALID_WHAT_TO_SHOW_TYPES = [
"TRADES",
"MIDPOINT",
"BID",
"ASK",
"BID_ASK",
"ADJUSTED_LAST",
"HISTORICAL_VOLATILITY",
"OPTION_IMPLIED_VOLATILITY",
"REBATE_RATE",
"FEE_RATE",
"YIELD_BID",
"YIELD_ASK",
"YIELD_BID_ASK",
"YIELD_LAST",
]


class TimezoneNames(Enum):
US_EASTERN = "US/Eastern"
Expand All @@ -53,23 +37,65 @@ class WhatToShow(Enum):


class Exchange(Enum):
SMART = "SMART"
NYSE = "NYSE"
SMART = "NYSE"

GLOBEX = "GLOBEX"
NYMEX = "NYMEX"
ECBOT = "ECBOT"
CBOE = "CBOE"
ICE = "ICE"

class ExchangeCalendar(Enum):
NYSE = "XNYS"
SMART = "SMART"


def prepare_db():
init_db()
class Calendar(Enum):
# https://github.com/quantopian/trading_calendars
DEFAULT = "XNYS" # default to NYSE
NYSE = "XNYS"
CME = "CMES"
CBOE = "XCBF"
ICE = "IEPA"

@staticmethod
def futures_lookup_hash() -> Dict:
return {
Calendar.CME: [
# equities
'/ES', '/MES', '/MNQ', '/NQ', '/MNQ'
# metals
'/GC', '/MGC',
# metals
'/CL', '/QM',
# currencies
'/M6A', '/M6B', '/M6E',
# interest rates
'/GE', '/ZN', '/ZN', '/ZT',
# grains
'/ZC', '/YC', '/ZS', '/YK', '/ZW', '/YW'
],
Calendar.CBOE: [],
Calendar.ICE: [],
}

@staticmethod
def select_exchange_by_symbol(symbol: str):
for k, v in Calendar.futures_lookup_hash().items():
if symbol in v:
return k
logger.waring(f"Did not find a calendar entry for symbol={symbol}")
return Calendar.DEFAULT

@staticmethod
def init_by_symbol(symbol: str) -> TradingCalendar:
if "/" in symbol:
key = Calendar.select_exchange_by_symbol(symbol)
name = key.value
else:
name = Calendar.NYSE.value
return get_calendar(name)


def prepare_ib():
ib = IB()
ib.connect("127.0.0.1", 4001, clientId=1)
return ib
class Currency(Enum):
USD = "USD"


from abc import ABCMeta, abstractmethod
Expand All @@ -83,24 +109,21 @@ class IbDataFetcher(DataFetcherBase):
def __init__(self):
self.ib = None

def init_client(
def _init_client(
self, host: str = "127.0.0.1", port: int = 4001, client_id: int = 0
) -> None:
ib = IB()
ib.connect(host, port, clientId=client_id)
self.ib = ib

def request_stock_instrument(
self, instrument_symbol: str, dt: datetime, what_to_show: str
def _convert_bars_to_df(self, bars) -> pd.DataFrame:
return util.df(bars)

def _execute_req_historical(
self, contract, dt, duration, bar_size_setting, what_to_show, use_rth
) -> pd.DataFrame:
if self.ib is None or not self.ib.isConnected():
self.init_client()

exchange = Exchange.SMART
contract = Stock(instrument_symbol, exchange.value, "USD")
duration = "1 D"
bar_size_setting = "1 min"
use_rth = False
self._init_client()

bars = self.ib.reqHistoricalData(
contract,
Expand All @@ -111,9 +134,69 @@ def request_stock_instrument(
useRTH=use_rth,
formatDate=2, # return as UTC time
)
return self._convert_bars_to_df(bars)

df = util.df(bars)
return df
def request_stock_instrument(
self, instrument_symbol: str, dt: datetime, what_to_show: str
) -> pd.DataFrame:
exchange = Exchange.SMART.value
contract = Stock(instrument_symbol, exchange, Currency.USD.value)
duration = "1 D"
bar_size_setting = "1 min"
use_rth = False
return self._execute_req_historical(
contract, dt, duration, bar_size_setting, what_to_show, use_rth
)

def select_echange_by_symbol(self, symbol):
d = {
Exchange.GLOBEX: [
# equities
'/ES', '/MES', '/MNQ', '/NQ', '/MNQ'
# # currencies
# ? '/M6A', '/M6B', '/M6E',
# # interest rates
# ? '/GE', '/ZN', '/ZN', '/ZT',
],
Exchange.ECBOT: ['/ZC', '/YC', '/ZS', '/YK', '/ZW', '/YW'],
Exchange.NYMEX: ['/GC', '/MGC', '/CL', '/QM',],
}

for k, v in d.items():
if symbol in v:
return k
raise NotImplementedError

def request_future_instrument(
self,
symbol: str,
dt: datetime,
what_to_show: str,
contract_date: Optional[str] = None,
) -> pd.DataFrame:
exchange_name = self.select_echange_by_symbol(symbol).value

if contract_date:
raise NotImplementedError
else:
contract = ContFuture(symbol, exchange_name, currency=Currency.USD.value)

duration = "1 D"
bar_size_setting = "1 min"
use_rth = False
return self._execute_req_historical(
contract, dt, duration, bar_size_setting, what_to_show, use_rth
)

def request_instrument(
self, symbol: str, dt, what_to_show, contract_date: Optional[str] = None,
):
if "/" in symbol:
return self.request_future_instrument(
symbol, dt, what_to_show, contract_date
)
else:
return self.request_stock_instrument(symbol, dt, what_to_show)


def load_and_cache(
Expand All @@ -128,10 +211,11 @@ def load_and_cache(
pd.DataFrame: [description]
"""
engine = gen_engine()
prepare_db()
init_db()

previous_days = int(kwargs["previous_days"])
use_rth = kwargs["use_rth"] if "use_rth" in kwargs else False
contract_date = kwargs["contract_date"] if "contract_date" in kwargs else None

tz = pytz.timezone(TimezoneNames.US_EASTERN.value)
now = datetime.now(tz)
Expand All @@ -143,11 +227,7 @@ def load_and_cache(
dfs = []
# temp - stop

exchange = Exchange.SMART
exchange_calendar = ExchangeCalendar[exchange.name]
calendar = get_calendar(exchange_calendar.value)

# ib_data_fetcher = IbDataFetcher()
calendar = Calendar.init_by_symbol(instrument_symbol)

for date in gen_last_x_days_from(previous_days, end_date):
# if market was closed - skip
Expand All @@ -158,9 +238,7 @@ def load_and_cache(
if db_data_exists(engine, instrument_symbol, date):
df = db_data_fetch(engine, instrument_symbol, date)
else:
df = data_fetcher.request_stock_instrument(
instrument_symbol, date, what_to_show
)
df = data_fetcher.request_instrument(instrument_symbol, date, what_to_show)

df["symbol"] = instrument_symbol
rename_df_columns(df)
Expand Down
Loading

0 comments on commit 602fde5

Please sign in to comment.