Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 16 additions & 3 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
# Changelog

## 0.57.1 - 2025-06-17

#### Enhancements
- Changed the following Venue, Publisher, and Dataset descriptions:
- "ICE Futures Europe (Financials)" renamed to "ICE Europe Financials"
- "ICE Futures Europe (Commodities)" renamed to "ICE Europe Commodities"
- Upgraded `databento-dbn` to 0.36.1
- Fixed setting of ts_out property of DbnFsm based on decoded metadata. This
was preventing ts_out from being correctly decoded in the Python DBNDecoder
- Fixed decoding of `ts_out` with first records in DBNDecoder

#### Bug fixes
- Fixed an issue where DBN records from the Live client where not having their `ts_out` populated

## 0.57.0 - 2025-06-10

#### Enhancements
Expand Down Expand Up @@ -121,7 +135,7 @@
## 0.50.0 - 2025-03-18

#### Enhancements
- Added new venues, datasets, and publishers for ICE Futures US, ICE Futures Europe (Financial products), Eurex, and European Energy Exchange (EEX)
- Added new venues, datasets, and publishers for ICE Futures US, ICE Europe Financials products, Eurex, and European Energy Exchange (EEX)
- Added export of the following enums from `databento_dbn` to the root `databento` package:
- `Action`
- `InstrumentClass`
Expand All @@ -148,8 +162,7 @@
## 0.49.0 - 2025-03-04

#### Enhancements
- Added new venues, datasets, and publishers for ICE Futures US and for ICE Futures
Europe (Financial products)
- Added new venues, datasets, and publishers for ICE Futures US and for ICE Europe Financials products
- Added a `keep_zip` parameter to `Historical.batch.download()`. When `True`, and downloading all files, the jobs contents will be saved as a ZIP file
- Calling `Live.terminate()` will now attempt to write EOF before aborting the connection to help close the remote end

Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ The library is fully compatible with the latest distribution of Anaconda 3.9 and
The minimum dependencies as found in the `pyproject.toml` are also listed below:
- python = "^3.9"
- aiohttp = "^3.8.3"
- databento-dbn = "0.35.1"
- databento-dbn = "0.36.1"
- numpy= ">=1.23.5"
- pandas = ">=1.5.3"
- pip-system-certs = ">=4.0" (Windows only)
Expand Down
8 changes: 6 additions & 2 deletions databento/common/dbnstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,8 @@ class DBNStore:
Write the data to a file in JSON format.
to_ndarray : np.ndarray
The data as a numpy `ndarray`.
to_parquet
Write the data to a parquet file.

Raises
------
Expand Down Expand Up @@ -662,7 +664,7 @@ def from_bytes(cls, data: BytesIO | bytes | IO[bytes]) -> DBNStore:

Parameters
----------
data : BytesIO or bytes
data : BytesIO or bytes or IO[bytes]
The bytes to read from.

Returns
Expand Down Expand Up @@ -698,7 +700,7 @@ def insert_symbology_json(
self._instrument_map.clear()
self._instrument_map.insert_json(json_data)

def replay(self, callback: Callable[[Any], None]) -> None:
def replay(self, callback: Callable[[DBNRecord], None]) -> None:
"""
Replay data by passing records sequentially to the given callback.

Expand Down Expand Up @@ -983,6 +985,8 @@ def to_parquet(

Parameters
----------
path: PathLike[str] or str
The file path to write the data to.
price_type : str, default "float"
The price type to use for price fields.
If "fixed", prices will have a type of `int` in fixed decimal format; each unit representing 1e-9 or 0.000000001.
Expand Down
2 changes: 1 addition & 1 deletion databento/common/parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ def datetime_to_unix_nanoseconds(


def optional_datetime_to_unix_nanoseconds(
value: pd.Timestamp | str | int | None,
value: pd.Timestamp | date | str | int | None,
) -> int | None:
"""
Return a valid UNIX nanosecond timestamp from the given value (if not
Expand Down
32 changes: 16 additions & 16 deletions databento/common/publishers.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ class Venue(StringyMixin, str, Enum):
MXOP
MEMX Options.
IFEU
ICE Futures Europe (Commodities).
ICE Europe Commodities.
NDEX
ICE Endex.
DBEQ
Expand All @@ -113,7 +113,7 @@ class Venue(StringyMixin, str, Enum):
IFUS
ICE Futures US.
IFLL
ICE Futures Europe (Financials).
ICE Europe Financials.
XEUR
Eurex Exchange.
XEER
Expand Down Expand Up @@ -470,7 +470,7 @@ def description(self) -> str:
if self == Venue.MXOP:
return "MEMX Options"
if self == Venue.IFEU:
return "ICE Futures Europe (Commodities)"
return "ICE Europe Commodities"
if self == Venue.NDEX:
return "ICE Endex"
if self == Venue.DBEQ:
Expand All @@ -492,7 +492,7 @@ def description(self) -> str:
if self == Venue.IFUS:
return "ICE Futures US"
if self == Venue.IFLL:
return "ICE Futures Europe (Financials)"
return "ICE Europe Financials"
if self == Venue.XEUR:
return "Eurex Exchange"
if self == Venue.XEER:
Expand Down Expand Up @@ -561,7 +561,7 @@ class Dataset(StringyMixin, str, Enum):
XNAS_NLS
Nasdaq NLS.
IFEU_IMPACT
ICE Futures Europe (Commodities) iMpact.
ICE Europe Commodities iMpact.
NDEX_IMPACT
ICE Endex iMpact.
EQUS_ALL
Expand All @@ -579,7 +579,7 @@ class Dataset(StringyMixin, str, Enum):
IFUS_IMPACT
ICE Futures US iMpact.
IFLL_IMPACT
ICE Futures Europe (Financials) iMpact.
ICE Europe Financials iMpact.
XEUR_EOBI
Eurex EOBI.
XEER_EOBI
Expand Down Expand Up @@ -856,7 +856,7 @@ def description(self) -> str:
if self == Dataset.XNAS_NLS:
return "Nasdaq NLS"
if self == Dataset.IFEU_IMPACT:
return "ICE Futures Europe (Commodities) iMpact"
return "ICE Europe Commodities iMpact"
if self == Dataset.NDEX_IMPACT:
return "ICE Endex iMpact"
if self == Dataset.EQUS_ALL:
Expand All @@ -874,7 +874,7 @@ def description(self) -> str:
if self == Dataset.IFUS_IMPACT:
return "ICE Futures US iMpact"
if self == Dataset.IFLL_IMPACT:
return "ICE Futures Europe (Financials) iMpact"
return "ICE Europe Financials iMpact"
if self == Dataset.XEUR_EOBI:
return "Eurex EOBI"
if self == Dataset.XEER_EOBI:
Expand Down Expand Up @@ -1001,7 +1001,7 @@ class Publisher(StringyMixin, str, Enum):
EQUS_PLUS_FINC
Databento US Equities Plus - FINRA/Nasdaq TRF Chicago.
IFEU_IMPACT_IFEU
ICE Futures Europe (Commodities).
ICE Europe Commodities.
NDEX_IMPACT_NDEX
ICE Endex.
DBEQ_BASIC_DBEQ
Expand Down Expand Up @@ -1055,7 +1055,7 @@ class Publisher(StringyMixin, str, Enum):
XNAS_BASIC_FINC
Nasdaq Basic - FINRA/Nasdaq TRF Chicago.
IFEU_IMPACT_XOFF
ICE Futures Europe - Off-Market Trades.
ICE Europe - Off-Market Trades.
NDEX_IMPACT_XOFF
ICE Endex - Off-Market Trades.
XNAS_NLS_XBOS
Expand Down Expand Up @@ -1085,9 +1085,9 @@ class Publisher(StringyMixin, str, Enum):
IFUS_IMPACT_XOFF
ICE Futures US - Off-Market Trades.
IFLL_IMPACT_IFLL
ICE Futures Europe (Financials).
ICE Europe Financials.
IFLL_IMPACT_XOFF
ICE Futures Europe (Financials) - Off-Market Trades.
ICE Europe Financials - Off-Market Trades.
XEUR_EOBI_XEUR
Eurex EOBI.
XEER_EOBI_XEER
Expand Down Expand Up @@ -2181,7 +2181,7 @@ def description(self) -> str:
if self == Publisher.EQUS_PLUS_FINC:
return "Databento US Equities Plus - FINRA/Nasdaq TRF Chicago"
if self == Publisher.IFEU_IMPACT_IFEU:
return "ICE Futures Europe (Commodities)"
return "ICE Europe Commodities"
if self == Publisher.NDEX_IMPACT_NDEX:
return "ICE Endex"
if self == Publisher.DBEQ_BASIC_DBEQ:
Expand Down Expand Up @@ -2235,7 +2235,7 @@ def description(self) -> str:
if self == Publisher.XNAS_BASIC_FINC:
return "Nasdaq Basic - FINRA/Nasdaq TRF Chicago"
if self == Publisher.IFEU_IMPACT_XOFF:
return "ICE Futures Europe - Off-Market Trades"
return "ICE Europe - Off-Market Trades"
if self == Publisher.NDEX_IMPACT_XOFF:
return "ICE Endex - Off-Market Trades"
if self == Publisher.XNAS_NLS_XBOS:
Expand Down Expand Up @@ -2265,9 +2265,9 @@ def description(self) -> str:
if self == Publisher.IFUS_IMPACT_XOFF:
return "ICE Futures US - Off-Market Trades"
if self == Publisher.IFLL_IMPACT_IFLL:
return "ICE Futures Europe (Financials)"
return "ICE Europe Financials"
if self == Publisher.IFLL_IMPACT_XOFF:
return "ICE Futures Europe (Financials) - Off-Market Trades"
return "ICE Europe Financials - Off-Market Trades"
if self == Publisher.XEUR_EOBI_XEUR:
return "Eurex EOBI"
if self == Publisher.XEER_EOBI_XEER:
Expand Down
6 changes: 3 additions & 3 deletions databento/historical/api/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def submit_job(

def list_jobs(
self,
states: list[str] | str = "received,queued,processing,done",
states: Iterable[str] | str = "received,queued,processing,done",
since: pd.Timestamp | datetime | date | str | int | None = None,
) -> list[dict[str, Any]]:
"""
Expand All @@ -196,8 +196,8 @@ def list_jobs(

Parameters
----------
states : list[str] or str, optional {'received', 'queued', 'processing', 'done', 'expired'} # noqa
The filter for jobs states as a list of comma separated values.
states : Iterable[str] or str, optional {'received', 'queued', 'processing', 'done', 'expired'} # noqa
The filter for jobs states as an iterable of comma separated values.
since : pd.Timestamp, datetime, date, str, or int, optional
The filter for timestamp submitted (will not include jobs prior to this).

Expand Down
12 changes: 6 additions & 6 deletions databento/historical/api/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(self, key: str, gateway: str) -> None:
super().__init__(key=key, gateway=gateway)
self._base_url = gateway + f"/v{API_VERSION}/metadata"

def list_publishers(self) -> list[dict[str, Any]]:
def list_publishers(self) -> list[dict[str, int | str]]:
"""
Request all publishers from Databento.

Expand All @@ -42,7 +42,7 @@ def list_publishers(self) -> list[dict[str, Any]]:

Returns
-------
list[dict[str, Any]]
list[dict[str, int | str]]

"""
response: Response = self._get(
Expand Down Expand Up @@ -121,7 +121,7 @@ def list_fields(
self,
schema: Schema | str,
encoding: Encoding | str,
) -> list[dict[str, Any]]:
) -> list[dict[str, str]]:
"""
List all fields for a particular schema and encoding from Databento.

Expand All @@ -136,7 +136,7 @@ def list_fields(

Returns
-------
list[dict[str, Any]]
list[dict[str, str]]
A list of field details.

"""
Expand Down Expand Up @@ -189,7 +189,7 @@ def get_dataset_condition(
dataset: Dataset | str,
start_date: date | str | None = None,
end_date: date | str | None = None,
) -> list[dict[str, str]]:
) -> list[dict[str, str | None]]:
"""
Get the per date dataset conditions from Databento.

Expand All @@ -210,7 +210,7 @@ def get_dataset_condition(

Returns
-------
list[dict[str, str]]
list[dict[str, str | None]]

"""
params: list[tuple[str, str | None]] = [
Expand Down
2 changes: 1 addition & 1 deletion databento/historical/api/symbology.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def resolve(
----------
dataset : Dataset or str
The dataset code (string identifier) for the request.
symbols : Iterable[str | int] or str or int, optional
symbols : Iterable[str | int] or str or int
The symbols to resolve. Takes up to 2,000 symbols per request.
stype_in : SType or str, default 'raw_symbol'
The input symbology type to resolve from.
Expand Down
2 changes: 1 addition & 1 deletion databento/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.57.0"
__version__ = "0.57.1"
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "databento"
version = "0.57.0"
version = "0.57.1"
description = "Official Python client library for Databento"
authors = [
"Databento <support@databento.com>",
Expand Down Expand Up @@ -32,7 +32,7 @@ aiohttp = [
{version = "^3.8.3", python = "<3.12"},
{version = "^3.9.0", python = "^3.12"}
]
databento-dbn = "0.36.0"
databento-dbn = "0.36.1"
numpy = [
{version = ">=1.23.5", python = "<3.12"},
{version = ">=1.26.0", python = "^3.12"}
Expand Down
5 changes: 3 additions & 2 deletions tests/test_historical_bento.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,13 +435,14 @@ def test_replay_with_stub_data_record_passes_to_callback(
stub_data = test_data(Dataset.GLBX_MDP3, Schema.MBO)
data = DBNStore.from_bytes(data=stub_data)

handler: list[MBOMsg] = []
handler: list[DBNRecord] = []

# Act
data.replay(callback=handler.append)
record: MBOMsg = handler[0]
record: DBNRecord = handler[0]

# Assert
assert isinstance(record, MBOMsg)
assert record.hd.length == 14
assert record.hd.rtype == 160
assert record.hd.rtype == 160
Expand Down
6 changes: 6 additions & 0 deletions tests/test_live_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from __future__ import annotations

import pathlib
import platform
import random
import string
from io import BytesIO
Expand Down Expand Up @@ -32,6 +33,11 @@
from tests.mockliveserver.fixture import MockLiveServerInterface


# TODO(nm): Remove when stable
if platform.system() == "Windows":
pytest.skip(reason="Skip on Windows due to flakiness", allow_module_level=True)


def test_live_connection_refused(
test_api_key: str,
) -> None:
Expand Down
6 changes: 6 additions & 0 deletions tests/test_live_client_reconnect.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import asyncio
import platform
from unittest.mock import MagicMock

import pandas as pd
Expand All @@ -18,6 +19,11 @@
from tests.mockliveserver.fixture import MockLiveServerInterface


# TODO(nm): Remove when stable
if platform.system() == "Windows":
pytest.skip(reason="Skip on Windows due to flakiness", allow_module_level=True)


async def test_reconnect_policy_none(
test_live_api_key: str,
mock_live_server: MockLiveServerInterface,
Expand Down
Loading