# requires Python >=3.6
# pip install tardis-dev
from tardis_dev import datasets, get_exchange_details
import logging
# optionally enable debug logs
# logging.basicConfig(level=logging.DEBUG)
exchange = 'deribit'
exchange_details = get_exchange_details(exchange)
# iterate over and download all data for every symbol
for symbol in exchange_details["datasets"]["symbols"]:
# alternatively specify datatypes explicitly ['trades', 'incremental_book_L2', 'quotes'] etc
# see available options https://docs.tardis.dev/downloadable-csv-files#data-types
data_types = symbol["dataTypes"]
symbol_id = symbol["id"]
from_date = symbol["availableSince"]
to_date = symbol["availableTo"]
# skip groupped symbols
if symbol_id in ['PERPETUALS', 'SPOT', 'FUTURES']:
continue
print(f"Downloading {exchange} {data_types} for {symbol_id} from {from_date} to {to_date}")
# each CSV dataset format is documented at https://docs.tardis.dev/downloadable-csv-files#data-types
# see https://docs.tardis.dev/downloadable-csv-files#download-via-client-libraries for full options docs
datasets.download(
exchange = exchange,
data_types = data_types,
from_date = from_date,
to_date = to_date,
symbols = [symbol_id],
# TODO set your API key here
api_key = "YOUR_API_KEY",
# path where CSV data will be downloaded into
download_dir = "./datasets",
)
See docs that shows all available download options (download path customization, filenames conventions and more).
// npm install tardis-dev
// requires node version >=12
const { downloadDatasets, getExchangeDetails } = require('tardis-dev')
;(async () => {
const exchange = 'deribit'
const exchangeDetails = await getExchangeDetails(exchange)
// iterate over and download all data for every symbol
for (const symbol of exchangeDetails.datasets.symbols) {
// alternatively specify dataTypes explicitly ['trades', 'incremental_book_L2', 'quotes'] etc
// see available options https://docs.tardis.dev/downloadable-csv-files#data-types
const dataTypes = symbol.dataTypes
const symbolId = symbol.id
const from = symbol.availableSince
const to = symbol.availableTo
// skip groupped symbols
if (['PERPETUALS', 'SPOT', 'FUTURES'].includes(symbolId)) {
continue
}
console.log(`Downloading ${exchange} ${dataTypes} for ${symbolId} from ${from} to ${to}`)
// each CSV dataset format is documented at https://docs.tardis.dev/downloadable-csv-files#data-types
// see https://docs.tardis.dev/downloadable-csv-files#download-via-client-libraries for full options docs
await downloadDatasets({
exchange,
dataTypes,
from,
to,
symbols: [symbolId],
// TODO: set your API key here
apiKey: 'YOUR_API_KEY',
// path where CSV data will be downloaded into
downloadDir: './datasets'
})
}
})()
See docs that shows all available download options (download path customization, filenames conventions and more).
Historical data format is the same as provided by real-time Deribit WebSocket v2 API with addition of local timestamps. If you'd like to work with normalized data format instead (same format for each exchange) see downloadable CSV files or official client libs that perform data normalization client-side.
# pip install tardis-client
import asyncio
from tardis_client import TardisClient, Channel
tardis_client = TardisClient(api_key="YOUR_API_KEY")
async def replay():
# replay method returns Async Generator
messages = tardis_client.replay(
exchange="deribit",
from_date="2019-07-01",
to_date="2019-07-02",
filters=[Channel(name="book", symbols=["BTC-PERPETUAL"])]
)
# messages as provided by Deribit real-time stream
async for local_timestamp, message in messages:
print(message)
asyncio.run(replay())
Tardis-machine is a locally runnable server that exposes API allowing efficiently requesting historical market data for whole time periods in contrast to HTTP API that provides data only in minute by minute slices.
Click any channel below to see HTTP API response with historical data recorded for it.
book
During data collection integrity of order book incremental updates is being validated using sequence numbers provided by Deribit's real-time feed (prev_change_id) - in case of detecting missed message WebSocket connection is being restarted.
book, perpetual, ticker, trades channels data was all collected with raw interval - no aggregation was applied.
Market data collection details
Market data collection infrastructure for Deribit is located in GCP europe-west2 region (London, UK).
Real-time market data is captured via multiple WebSocket connections.
Deribit servers are located in Equinix LD4 (Slough, UK).