# requires Python >=3.6# pip install tardis-devfrom tardis_dev import datasets, get_exchange_detailsimport logging# optionally enable debug logs# logging.basicConfig(level=logging.DEBUG)exchange ='deribit'exchange_details =get_exchange_details(exchange)# iterate over and download all data for every symbolfor symbol in exchange_details["datasets"]["symbols"]:# alternatively specify datatypes explicitly ['trades', 'incremental_book_L2', 'quotes'] etc# see available options https://docs.tardis.dev/downloadable-csv-files#data-types data_types = symbol["dataTypes"] symbol_id = symbol["id"] from_date = symbol["availableSince"] to_date = symbol["availableTo"]# skip groupped symbolsif symbol_id in ['PERPETUALS','SPOT','FUTURES']:continueprint(f"Downloading {exchange}{data_types} for {symbol_id} from {from_date} to {to_date}")# each CSV dataset format is documented at https://docs.tardis.dev/downloadable-csv-files#data-types# see https://docs.tardis.dev/downloadable-csv-files#download-via-client-libraries for full options docs datasets.download( exchange = exchange, data_types = data_types, from_date = from_date, to_date = to_date, symbols = [symbol_id],# TODO set your API key here api_key ="YOUR_API_KEY",# path where CSV data will be downloaded into download_dir ="./datasets", )
See docs that shows all available download options (download path customization, filenames conventions and more).
// npm install tardis-dev// requires node version >=12const { downloadDatasets,getExchangeDetails } =require('tardis-dev');(async () => {constexchange='deribit'constexchangeDetails=awaitgetExchangeDetails(exchange)// iterate over and download all data for every symbolfor (constsymbolofexchangeDetails.datasets.symbols) {// alternatively specify dataTypes explicitly ['trades', 'incremental_book_L2', 'quotes'] etc// see available options https://docs.tardis.dev/downloadable-csv-files#data-typesconstdataTypes=symbol.dataTypesconstsymbolId=symbol.idconstfrom=symbol.availableSinceconstto=symbol.availableTo// skip groupped symbolsif (['PERPETUALS','SPOT','FUTURES'].includes(symbolId)) {continue }console.log(`Downloading ${exchange}${dataTypes} for ${symbolId} from ${from} to ${to}`)// each CSV dataset format is documented at https://docs.tardis.dev/downloadable-csv-files#data-types// see https://docs.tardis.dev/downloadable-csv-files#download-via-client-libraries for full options docsawaitdownloadDatasets({ exchange, dataTypes, from, to, symbols: [symbolId],// TODO: set your API key here apiKey:'YOUR_API_KEY',// path where CSV data will be downloaded into downloadDir:'./datasets' }) }})()
See docs that shows all available download options (download path customization, filenames conventions and more).
Historical data format is the same as provided by real-time Deribit WebSocket v2 API with addition of local timestamps. If you'd like to work with normalized data format instead (same format for each exchange) see downloadable CSV files or official client libs that perform data normalization client-side.
Tardis-machine is a locally runnable server that exposes API allowing efficiently requesting historical market data for whole time periods in contrast to HTTP API that provides data only in minute by minute slices.
Click any channel below to see HTTP API response with historical data recorded for it.
book
During data collection integrity of order book incremental updates is being validated using sequence numbers provided by Deribit's real-time feed (prev_change_id) - in case of detecting missed message WebSocket connection is being restarted.
book, perpetual, ticker, trades channels data was all collected with raw interval - no aggregation was applied.
Market data collection details
Market data collection infrastructure for Deribit is located in GCP europe-west2 region (London, UK).
Real-time market data is captured via multiple WebSocket connections.
Deribit servers are located in Equinix LD4 (Slough, UK).