Skip to content

Commit

Permalink
final fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
benjaminsingleton committed Nov 11, 2023
1 parent 12fc140 commit 234a59a
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 95 deletions.
8 changes: 0 additions & 8 deletions examples/notebooks/backtest_example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -168,14 +168,6 @@
"source": [
"result"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "af22401c-4d5b-4a58-bb18-97f460cb284c",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down
58 changes: 0 additions & 58 deletions nautilus_trader/adapters/interactive_brokers/historic/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
from nautilus_trader.model.identifiers import TraderId
from nautilus_trader.model.instruments.base import Instrument
from nautilus_trader.msgbus.bus import MessageBus
from nautilus_trader.persistence.catalog import ParquetDataCatalog


class HistoricInteractiveBrokersClient:
Expand Down Expand Up @@ -484,60 +483,3 @@ def _calculate_duration_segments(
results.append((minus_days_date, f"{seconds} S"))

return results


# will remove this post testing and review
async def main():
contract = IBContract(
secType="STK",
symbol="AAPL",
exchange="SMART",
primaryExchange="NASDAQ",
)
instrument_id = "TSLA.NASDAQ"

client = HistoricInteractiveBrokersClient(port=4002, client_id=5)
await client._connect()
await asyncio.sleep(2)

instruments = await client.request_instruments(
contracts=[contract],
instrument_ids=[instrument_id],
)

bars = await client.request_bars(
bar_specifications=["1-DAY-LAST", "8-HOUR-MID"],
start_date_time=datetime.datetime(2022, 10, 15, 3),
end_date_time=datetime.datetime(2023, 11, 1),
tz_name="America/New_York",
contracts=[contract],
instrument_ids=[instrument_id],
)

trade_ticks = await client.request_ticks(
"TRADES",
start_date_time=datetime.datetime(2023, 11, 6, 10, 0),
end_date_time=datetime.datetime(2023, 11, 6, 10, 1),
tz_name="America/New_York",
contracts=[contract],
instrument_ids=[instrument_id],
)

quote_ticks = await client.request_ticks(
"BID_ASK",
start_date_time=datetime.datetime(2023, 11, 6, 10, 0),
end_date_time=datetime.datetime(2023, 11, 6, 10, 1),
tz_name="America/New_York",
contracts=[contract],
instrument_ids=[instrument_id],
)

catalog = ParquetDataCatalog("./catalog")
catalog.write_data(instruments)
catalog.write_data(bars)
catalog.write_data(trade_ticks)
catalog.write_data(quote_ticks)


if __name__ == "__main__":
asyncio.run(main())
43 changes: 14 additions & 29 deletions nautilus_trader/persistence/wranglers_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,6 @@ def decode(k, v):
**{k.decode(): decode(k, v) for k, v in metadata.items() if k not in cls.IGNORE_KEYS},
)

def scale_column(
self,
column: pd.Series,
dtype: pd.core.arrays.integer.IntegerDtype,
) -> pd.Series:
return (column * 1e9).round().astype(dtype())


class OrderBookDeltaDataWrangler(WranglerBase):
"""
Expand Down Expand Up @@ -146,10 +139,10 @@ def from_pandas(
)

# Scale prices and quantities
df["price"] = super().scale_column(df["price"], pd.Int64Dtype)
df["size"] = super().scale_column(df["size"], pd.UInt64Dtype)
df["price"] = (df["price"] * 1e9).astype(pd.Int64Dtype())
df["size"] = (df["size"] * 1e9).round().astype(pd.UInt64Dtype())

df["order_id"] = super().scale_column(df["order_id"], pd.UInt64Dtype)
df["order_id"] = df["order_id"].astype(pd.UInt64Dtype())

# Process timestamps
df["ts_event"] = (
Expand Down Expand Up @@ -254,17 +247,17 @@ def from_pandas(
)

# Scale prices and quantities
df["bid_price"] = super().scale_column(df["bid_price"], pd.Int64Dtype)
df["ask_price"] = super().scale_column(df["ask_price"], pd.Int64Dtype)
df["bid_price"] = (df["bid_price"] * 1e9).astype(pd.Int64Dtype())
df["ask_price"] = (df["ask_price"] * 1e9).astype(pd.Int64Dtype())

# Create bid_size and ask_size columns
if "bid_size" in df.columns:
df["bid_size"] = super().scale_column(df["bid_size"], pd.Int64Dtype)
df["bid_size"] = (df["bid_size"] * 1e9).astype(pd.Int64Dtype())
else:
df["bid_size"] = pd.Series([default_size * 1e9] * len(df), dtype=pd.UInt64Dtype())

if "ask_size" in df.columns:
df["ask_size"] = super().scale_column(df["ask_size"], pd.Int64Dtype)
df["ask_size"] = (df["ask_size"] * 1e9).astype(pd.Int64Dtype())
else:
df["ask_size"] = pd.Series([default_size * 1e9] * len(df), dtype=pd.UInt64Dtype())

Expand Down Expand Up @@ -375,8 +368,8 @@ def from_pandas(
)

# Scale prices and quantities
df["price"] = super().scale_column(df["price"], pd.Int64Dtype)
df["size"] = super().scale_column(df["size"], pd.UInt64Dtype)
df["price"] = (df["price"] * 1e9).astype(pd.Int64Dtype())
df["size"] = (df["size"] * 1e9).round().astype(pd.UInt64Dtype())

df["aggressor_side"] = df["aggressor_side"].map(_map_aggressor_side).astype(pd.UInt8Dtype())
df["trade_id"] = df["trade_id"].astype(str)
Expand Down Expand Up @@ -488,23 +481,15 @@ def from_pandas(
# Rename column
df = df.rename(columns={"timestamp": "ts_event"})

# Check required columns
required_columns = {"open", "high", "low", "close", "ts_event"}
missing_columns = required_columns - set(df.columns)
if missing_columns:
raise ValueError(f"Missing columns: {missing_columns}")

# Scale OHLC
df["open"] = super().scale_column(df["open"], pd.Int64Dtype)
df["high"] = super().scale_column(df["high"], pd.Int64Dtype)
df["low"] = super().scale_column(df["low"], pd.Int64Dtype)
df["close"] = super().scale_column(df["close"], pd.Int64Dtype)
# Scale prices and quantities
df["open"] = (df["open"] * 1e9).astype(pd.Int64Dtype())
df["high"] = (df["high"] * 1e9).astype(pd.Int64Dtype())
df["low"] = (df["low"] * 1e9).astype(pd.Int64Dtype())
df["clow"] = (df["close"] * 1e9).astype(pd.Int64Dtype())

if "volume" not in df.columns:
df["volume"] = pd.Series([default_volume * 1e9] * len(df), dtype=pd.UInt64Dtype())

df["volume"] = super().scale_column(df["volume"], pd.UInt64Dtype)

# Process timestamps
df["ts_event"] = (
pd.to_datetime(df["ts_event"], utc=True, format="mixed")
Expand Down

0 comments on commit 234a59a

Please sign in to comment.