# requires Python >=3.6# pip install tardis-devfrom tardis_dev import datasets, get_exchange_detailsimport logging# optionally enable debug logs# logging.basicConfig(level=logging.DEBUG)exchange ='okex'exchange_details =get_exchange_details(exchange)# iterate over and download all data for every symbolfor symbol in exchange_details["datasets"]["symbols"]:# alternatively specify datatypes explicitly ['trades', 'incremental_book_L2', 'quotes'] etc# see available options https://docs.tardis.dev/downloadable-csv-files#data-types data_types = symbol["dataTypes"] symbol_id = symbol["id"] from_date = symbol["availableSince"] to_date = symbol["availableTo"]# skip groupped symbolsif symbol_id in ['PERPETUALS','SPOT','FUTURES']:continueprint(f"Downloading {exchange}{data_types} for {symbol_id} from {from_date} to {to_date}")# each CSV dataset format is documented at https://docs.tardis.dev/downloadable-csv-files#data-types# see https://docs.tardis.dev/downloadable-csv-files#download-via-client-libraries for full options docs datasets.download( exchange = exchange, data_types = data_types, from_date = from_date, to_date = to_date, symbols = [symbol_id],# TODO set your API key here api_key ="YOUR_API_KEY",# path where CSV data will be downloaded into download_dir ="./datasets", )
See docs that shows all available download options (download path customization, filenames conventions and more).
// npm install tardis-dev// requires node version >=12const { downloadDatasets,getExchangeDetails } =require('tardis-dev');(async () => {constexchange='okex'constexchangeDetails=awaitgetExchangeDetails(exchange)// iterate over and download all data for every symbolfor (constsymbolofexchangeDetails.datasets.symbols) {// alternatively specify dataTypes explicitly ['trades', 'incremental_book_L2', 'quotes'] etc// see available options https://docs.tardis.dev/downloadable-csv-files#data-typesconstdataTypes=symbol.dataTypesconstsymbolId=symbol.idconstfrom=symbol.availableSinceconstto=symbol.availableTo// skip groupped symbolsif (['PERPETUALS','SPOT','FUTURES'].includes(symbolId)) {continue }console.log(`Downloading ${exchange}${dataTypes} for ${symbolId} from ${from} to ${to}`)// each CSV dataset format is documented at https://docs.tardis.dev/downloadable-csv-files#data-types// see https://docs.tardis.dev/downloadable-csv-files#download-via-client-libraries for full options docsawaitdownloadDatasets({ exchange, dataTypes, from, to, symbols: [symbolId],// TODO: set your API key here apiKey:'YOUR_API_KEY',// path where CSV data will be downloaded into downloadDir:'./datasets' }) }})()
See docs that shows all available download options (download path customization, filenames conventions and more).
Historical data format is the same as provided by real-time OKX WebSocket v3 API with addition of local timestamps. If you'd like to work with normalized data format instead (same format for each exchange) see downloadable CSV files or official client libs that can perform data normalization client-side.
Tardis-machine is a locally runnable server that exposes API allowing efficiently requesting historical market data for whole time periods in contrast to HTTP API that provides data only in minute by minute slices.
Market data collection infrastructure for OKX Spot since 2022-05-04T16:45 is located in AWS HK region (Hong Kong, China, VPC colo setup), before that, starting since 2020-05-15 it was located in GCP asia-northeast1 (Tokyo, Japan) and initially it was located in GCP europe-west2 region (London, UK).
Real-time market data is captured via multiple WebSocket connections.
OKEx servers are located in Alibaba Cloud cn-hongkong region (Hong Kong, China).