Full Code of tardis-dev/tardis-machine for AI

master c6e9313358eb cached
35 files
30.6 MB
28.7k tokens
63 symbols
1 requests
Download .txt
Repository: tardis-dev/tardis-machine
Branch: master
Commit: c6e9313358eb
Files: 35
Total size: 30.6 MB

Directory structure:
gitextract_yzu_3thd/

├── .dockerignore
├── .github/
│   └── workflows/
│       ├── ci.yaml
│       ├── npm_audit.yaml
│       └── publish.yaml
├── .gitignore
├── .npmrc
├── .prettierignore
├── .prettierrc
├── AGENTS.md
├── ARCHITECTURE.md
├── CLAUDE.md
├── Dockerfile
├── LICENSE
├── README.md
├── benchmark.js
├── bin/
│   └── tardis-machine.js
├── package.json
├── src/
│   ├── debug.ts
│   ├── helpers.ts
│   ├── http/
│   │   ├── healthCheck.ts
│   │   ├── index.ts
│   │   ├── replay.ts
│   │   └── replaynormalized.ts
│   ├── index.ts
│   ├── tardismachine.ts
│   └── ws/
│       ├── index.ts
│       ├── replay.ts
│       ├── replaynormalized.ts
│       ├── streamnormalized.ts
│       └── subscriptionsmappers.ts
├── test/
│   ├── __snapshots__/
│   │   └── tardismachine.test.ts.snap
│   ├── subscriptionsmappers.test.ts
│   ├── tardismachine.test.ts
│   └── tsconfig.json
└── tsconfig.json

================================================
FILE CONTENTS
================================================

================================================
FILE: .dockerignore
================================================
.git
dist
node_modules
.cache

================================================
FILE: .github/workflows/ci.yaml
================================================
name: CI

on:
  push:
    branches:
      - master
  pull_request:
    branches:
      - master

jobs:
  ci:
    name: CI
    runs-on: ${{ matrix.os }}
    env:
      NPM_CONFIG_MIN_RELEASE_AGE: 0

    strategy:
      matrix:
        node-version: ['25.8.2']
        os: [ubuntu-latest, ubuntu-24.04-arm]

    steps:
      - name: Checkout
        uses: actions/checkout@v5

      - name: Use Node.js v${{ matrix.node-version }}
        uses: actions/setup-node@v5
        with:
          node-version: ${{ matrix.node-version }}

      - name: Install Dependencies And Compile TS
        run: npm ci --ignore-scripts

      - name: Verify Registry Signatures
        run: npm audit signatures

      - name: Audit Production Dependencies
        run: npm audit --omit=dev --audit-level=critical

      - name: Check Code Format
        run: npm run check-format

      - name: Run Tests
        run: npm run test


================================================
FILE: .github/workflows/npm_audit.yaml
================================================
name: Full NPM Audit

on:
  schedule:
    - cron: '20 3 * * *'
  workflow_dispatch:

permissions:
  contents: read

jobs:
  audit:
    name: Full NPM Audit Report
    runs-on: ubuntu-latest

    steps:
      - name: Checkout
        uses: actions/checkout@v6

      - name: Use Node.js v25.8.2
        uses: actions/setup-node@v6
        with:
          node-version: 25.8.2

      - name: Generate Full Audit Report
        id: audit
        run: |
          set +e
          npm audit --package-lock-only --json > npm-audit.json
          exit_code=$?
          set -e
          if [ ! -f npm-audit.json ]; then
            echo '{}' > npm-audit.json
          fi
          echo "exit_code=$exit_code" >> "$GITHUB_OUTPUT"

      - name: Upload Audit Report
        uses: actions/upload-artifact@v4
        with:
          name: npm-audit-report
          path: npm-audit.json

      - name: Summarize Audit Report
        env:
          AUDIT_EXIT_CODE: ${{ steps.audit.outputs.exit_code }}
        run: |
          node --input-type=module <<'EOF'
          import fs from 'node:fs';

          const report = JSON.parse(fs.readFileSync('npm-audit.json', 'utf8'));
          const vulnerabilities = report.metadata?.vulnerabilities ?? {};
          const lines = [
            `Full npm audit exit code: ${process.env.AUDIT_EXIT_CODE}`,
            `info: ${vulnerabilities.info ?? 0}`,
            `low: ${vulnerabilities.low ?? 0}`,
            `moderate: ${vulnerabilities.moderate ?? 0}`,
            `high: ${vulnerabilities.high ?? 0}`,
            `critical: ${vulnerabilities.critical ?? 0}`,
            `total: ${vulnerabilities.total ?? 0}`,
            '',
            'Download the npm-audit-report artifact for the full JSON report.'
          ];

          fs.appendFileSync(process.env.GITHUB_STEP_SUMMARY, `${lines.join('\n')}\n`);
          EOF


================================================
FILE: .github/workflows/publish.yaml
================================================
name: Publish New Release To NPM And Image To Docker Hub

on:
  release:
    # This specifies that the build will be triggered when we publish a release
    types: [published]

permissions:
  id-token: write
  contents: write

jobs:
  publish:
    name: Publish New Release To NPM And Image To Docker Hub
    runs-on: ubuntu-latest
    env:
      IMAGE_NAME: tardisdev/tardis-machine
      NPM_CONFIG_MIN_RELEASE_AGE: 0

    steps:
      - name: Checkout
        uses: actions/checkout@v5
        with:
          ref: ${{ github.event.release.target_commitish }}

      - name: Use Node.js v25.8.2
        uses: actions/setup-node@v5
        with:
          node-version: 25.8.2
          registry-url: https://registry.npmjs.org/

      - name: Install Dependencies And Compile TS
        run: npm ci --ignore-scripts

      - name: Verify Registry Signatures
        run: npm audit signatures

      - name: Audit Production Dependencies
        run: npm audit --omit=dev --audit-level=critical

      - name: Configure Git
        run: git config --global user.name "GitHub Release Bot"

      - name: Update package version
        run: npm version ${{ github.event.release.tag_name }}

      - name: Run Tests
        run: npm run test

      - name: Publish Package
        run: npm publish

      - name: Push Version Changes To GitHub
        run: git push

      - name: Set Up QEMU
        uses: docker/setup-qemu-action@v4

      - name: Set Up Docker Buildx
        uses: docker/setup-buildx-action@v4

      - name: Login To Docker Hub
        uses: docker/login-action@v4
        with:
          username: ${{ secrets.DOCKERHUB_USERNAME }}
          password: ${{ secrets.DOCKERHUB_TOKEN }}

      - name: Build And Push Docker Image
        uses: docker/build-push-action@v6
        with:
          context: .
          platforms: linux/amd64,linux/arm64
          push: true
          build-args: VERSION_ARG=${{ github.event.release.tag_name }}
          tags: |
            ${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
            ${{ env.IMAGE_NAME }}:latest

      - name: Logout From Docker Hub
        run: docker logout
        if: ${{ always() }}
        continue-on-error: true


================================================
FILE: .gitignore
================================================
node_modules
/dist
/*.log
.tardis-cache
*.tsbuildinfo
bench
.cache
.DS_Store


================================================
FILE: .npmrc
================================================
min-release-age=1
allow-git=all


================================================
FILE: .prettierignore
================================================
package.json
package-lock.json
yarn.lock
dist

================================================
FILE: .prettierrc
================================================
{
  "printWidth": 140,
  "semi": false,
  "singleQuote": true,
  "trailingComma": "none",
  "endOfLine": "lf"
}


================================================
FILE: AGENTS.md
================================================
# tardis-machine

Public npm package and Docker image. Locally runnable server providing HTTP and WebSocket APIs for tick-level historical market data replay and consolidated real-time cryptocurrency market data streaming. Uses `tardis-dev` under the hood.

## Build & Test

```bash
npm run build          # tsc
npm test               # build + jest
npm run check-format   # prettier check
```

## Editing Rules

- Keep API behavior compatible with public docs — this is a published npm package
- Preserve backpressure handling in WebSocket paths
- Maintain mapper correctness in `src/ws/subscriptionsmappers.ts`
- Avoid heavy synchronous logic on request paths
- **Format after every edit** — run `npx prettier --write` on modified files after each change

## Validation

- `npm run build && npm test`
- `npm run check-format`

## Operational Docs

- [ARCHITECTURE.md](ARCHITECTURE.md) — dual-server design, HTTP/WS routing, session management

## Publishing

Published via GitHub Actions (`publish.yaml`). Do not publish manually unless explicitly requested.

## Keeping Docs Current

When you change code, check if any docs in this repo become stale as a result — if so, update them. When following a workflow doc, if the steps don't match reality, fix the doc so the next run is better.


================================================
FILE: ARCHITECTURE.md
================================================
# Architecture

tardis-machine is a local server that wraps `tardis-dev` library functionality in HTTP and WebSocket APIs.

## Dual-Server Design

The server runs two listeners: HTTP on port N and WebSocket on port N+1.

- **HTTP** — Node.js `http` module with `find-my-way` router. Endpoints for historical replay (exchange-native and normalized) and health check. Responses are streamed with batched buffering for throughput.
- **WebSocket** — `uWebSockets.js` for high-performance WebSocket handling with built-in backpressure. Endpoints for historical replay and real-time streaming (exchange-native and normalized).

## Key Concepts

**Replay sessions** — WebSocket replay supports multiple synchronized connections via session keys. Multiple clients can share a replay session and receive synchronized data.

**Subscription mapping** (`src/ws/subscriptionsmappers.ts`) — Translates exchange-native WebSocket subscribe messages into tardis-dev filter format. This enables existing exchange WebSocket clients to connect to tardis-machine and receive historical data as if it were the live exchange.

**Backpressure** — WebSocket paths monitor send buffer pressure and pause data production when a client can't keep up. This prevents memory growth from slow consumers.

**Caching** — Data is cached locally on disk in compressed format via tardis-dev. Subsequent requests for the same data range hit the cache.

## Design Decisions

- **Separate HTTP and WS ports** — Avoids complexity of protocol upgrade handling; uWebSockets.js runs its own event loop
- **Backpressure-first WS design** — Slow consumers don't cause memory growth; production pauses when send buffer fills
- **Exchange-native WS compatibility** — Subscription mappers allow existing exchange clients to work against historical data unchanged


================================================
FILE: CLAUDE.md
================================================
@AGENTS.md


================================================
FILE: Dockerfile
================================================
#
# uWebSockets.js v20.59.0 requires glibc >= 2.38 for the prebuilt Linux addon.
# Use the explicit trixie variant to keep the base image on glibc >= 2.38.
FROM node:25.8.2-trixie-slim
# version arg contains current git tag
ARG VERSION_ARG
# install git
RUN apt-get update && apt-get install -y git
# install tardis-machine globally (exposes tardis-machine command)
RUN npm install --global --unsafe-perm tardis-machine@$VERSION_ARG

ENV UWS_HTTP_MAX_HEADERS_SIZE=20000
# run it
CMD tardis-machine --cache-dir=/.cache


================================================
FILE: LICENSE
================================================
Mozilla Public License Version 2.0
==================================

1. Definitions
--------------

1.1. "Contributor"
    means each individual or legal entity that creates, contributes to
    the creation of, or owns Covered Software.

1.2. "Contributor Version"
    means the combination of the Contributions of others (if any) used
    by a Contributor and that particular Contributor's Contribution.

1.3. "Contribution"
    means Covered Software of a particular Contributor.

1.4. "Covered Software"
    means Source Code Form to which the initial Contributor has attached
    the notice in Exhibit A, the Executable Form of such Source Code
    Form, and Modifications of such Source Code Form, in each case
    including portions thereof.

1.5. "Incompatible With Secondary Licenses"
    means

    (a) that the initial Contributor has attached the notice described
        in Exhibit B to the Covered Software; or

    (b) that the Covered Software was made available under the terms of
        version 1.1 or earlier of the License, but not also under the
        terms of a Secondary License.

1.6. "Executable Form"
    means any form of the work other than Source Code Form.

1.7. "Larger Work"
    means a work that combines Covered Software with other material, in
    a separate file or files, that is not Covered Software.

1.8. "License"
    means this document.

1.9. "Licensable"
    means having the right to grant, to the maximum extent possible,
    whether at the time of the initial grant or subsequently, any and
    all of the rights conveyed by this License.

1.10. "Modifications"
    means any of the following:

    (a) any file in Source Code Form that results from an addition to,
        deletion from, or modification of the contents of Covered
        Software; or

    (b) any new file in Source Code Form that contains any Covered
        Software.

1.11. "Patent Claims" of a Contributor
    means any patent claim(s), including without limitation, method,
    process, and apparatus claims, in any patent Licensable by such
    Contributor that would be infringed, but for the grant of the
    License, by the making, using, selling, offering for sale, having
    made, import, or transfer of either its Contributions or its
    Contributor Version.

1.12. "Secondary License"
    means either the GNU General Public License, Version 2.0, the GNU
    Lesser General Public License, Version 2.1, the GNU Affero General
    Public License, Version 3.0, or any later versions of those
    licenses.

1.13. "Source Code Form"
    means the form of the work preferred for making modifications.

1.14. "You" (or "Your")
    means an individual or a legal entity exercising rights under this
    License. For legal entities, "You" includes any entity that
    controls, is controlled by, or is under common control with You. For
    purposes of this definition, "control" means (a) the power, direct
    or indirect, to cause the direction or management of such entity,
    whether by contract or otherwise, or (b) ownership of more than
    fifty percent (50%) of the outstanding shares or beneficial
    ownership of such entity.

2. License Grants and Conditions
--------------------------------

2.1. Grants

Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:

(a) under intellectual property rights (other than patent or trademark)
    Licensable by such Contributor to use, reproduce, make available,
    modify, display, perform, distribute, and otherwise exploit its
    Contributions, either on an unmodified basis, with Modifications, or
    as part of a Larger Work; and

(b) under Patent Claims of such Contributor to make, use, sell, offer
    for sale, have made, import, and otherwise transfer either its
    Contributions or its Contributor Version.

2.2. Effective Date

The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.

2.3. Limitations on Grant Scope

The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:

(a) for any code that a Contributor has removed from Covered Software;
    or

(b) for infringements caused by: (i) Your and any other third party's
    modifications of Covered Software, or (ii) the combination of its
    Contributions with other software (except as part of its Contributor
    Version); or

(c) under Patent Claims infringed by Covered Software in the absence of
    its Contributions.

This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).

2.4. Subsequent Licenses

No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).

2.5. Representation

Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.

2.6. Fair Use

This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.

2.7. Conditions

Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.

3. Responsibilities
-------------------

3.1. Distribution of Source Form

All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.

3.2. Distribution of Executable Form

If You distribute Covered Software in Executable Form then:

(a) such Covered Software must also be made available in Source Code
    Form, as described in Section 3.1, and You must inform recipients of
    the Executable Form how they can obtain a copy of such Source Code
    Form by reasonable means in a timely manner, at a charge no more
    than the cost of distribution to the recipient; and

(b) You may distribute such Executable Form under the terms of this
    License, or sublicense it under different terms, provided that the
    license for the Executable Form does not attempt to limit or alter
    the recipients' rights in the Source Code Form under this License.

3.3. Distribution of a Larger Work

You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).

3.4. Notices

You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.

3.5. Application of Additional Terms

You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.

4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------

If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.

5. Termination
--------------

5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.

5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.

5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.

************************************************************************
*                                                                      *
*  6. Disclaimer of Warranty                                           *
*  -------------------------                                           *
*                                                                      *
*  Covered Software is provided under this License on an "as is"       *
*  basis, without warranty of any kind, either expressed, implied, or  *
*  statutory, including, without limitation, warranties that the       *
*  Covered Software is free of defects, merchantable, fit for a        *
*  particular purpose or non-infringing. The entire risk as to the     *
*  quality and performance of the Covered Software is with You.        *
*  Should any Covered Software prove defective in any respect, You     *
*  (not any Contributor) assume the cost of any necessary servicing,   *
*  repair, or correction. This disclaimer of warranty constitutes an   *
*  essential part of this License. No use of any Covered Software is   *
*  authorized under this License except under this disclaimer.         *
*                                                                      *
************************************************************************

************************************************************************
*                                                                      *
*  7. Limitation of Liability                                          *
*  --------------------------                                          *
*                                                                      *
*  Under no circumstances and under no legal theory, whether tort      *
*  (including negligence), contract, or otherwise, shall any           *
*  Contributor, or anyone who distributes Covered Software as          *
*  permitted above, be liable to You for any direct, indirect,         *
*  special, incidental, or consequential damages of any character      *
*  including, without limitation, damages for lost profits, loss of    *
*  goodwill, work stoppage, computer failure or malfunction, or any    *
*  and all other commercial damages or losses, even if such party      *
*  shall have been informed of the possibility of such damages. This   *
*  limitation of liability shall not apply to liability for death or   *
*  personal injury resulting from such party's negligence to the       *
*  extent applicable law prohibits such limitation. Some               *
*  jurisdictions do not allow the exclusion or limitation of           *
*  incidental or consequential damages, so this exclusion and          *
*  limitation may not apply to You.                                    *
*                                                                      *
************************************************************************

8. Litigation
-------------

Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.

9. Miscellaneous
----------------

This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.

10. Versions of the License
---------------------------

10.1. New Versions

Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.

10.2. Effect of New Versions

You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.

10.3. Modified Versions

If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).

10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses

If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.

Exhibit A - Source Code Form License Notice
-------------------------------------------

  This Source Code Form is subject to the terms of the Mozilla Public
  License, v. 2.0. If a copy of the MPL was not distributed with this
  file, You can obtain one at http://mozilla.org/MPL/2.0/.

If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.

You may add additional accurate notices of copyright ownership.

Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------

  This Source Code Form is "Incompatible With Secondary Licenses", as
  defined by the Mozilla Public License, v. 2.0.

================================================
FILE: README.md
================================================
# Tardis Machine Server

[![Version](https://img.shields.io/npm/v/tardis-machine.svg)](https://www.npmjs.org/package/tardis-machine)

[Tardis Machine](https://docs.tardis.dev/tardis-machine/quickstart) is a locally runnable server with built-in data caching that uses the [Tardis.dev HTTP API](https://docs.tardis.dev/api/http-api-reference) under the hood. It provides both **tick-level historical** and **consolidated real-time cryptocurrency market data** via HTTP and WebSocket APIs. Available via [npm and Docker](https://docs.tardis.dev/tardis-machine/quickstart#installation).

<br/>

## Features

- efficient data replay API endpoints returning historical market data for entire time periods

- [exchange-native market data APIs](https://docs.tardis.dev/tardis-machine/replaying-historical-data#exchange-native-market-data-apis) — tick-by-tick historical replay in exchange-native format via HTTP and WebSocket endpoints. The WebSocket API replays data with the same format and subscribe logic as real-time exchange APIs — existing exchange WebSocket clients can connect to this endpoint.

- [normalized market data APIs](https://docs.tardis.dev/tardis-machine/replaying-historical-data#normalized-market-data-apis) — consistent format across all exchanges via HTTP and WebSocket endpoints. Includes synchronized multi-exchange replay, real-time streaming, customizable order book snapshots and trade bars.

- [seamless switching](https://docs.tardis.dev/tardis-machine/replaying-historical-data#normalized-market-data-apis) between real-time streaming and historical replay

- transparent local data caching (compressed on disk, decompressed on demand)

- support for many cryptocurrency exchanges — see [docs.tardis.dev](https://docs.tardis.dev) for the full list

<br/>

## Documentation

### [See official docs](https://docs.tardis.dev/tardis-machine/quickstart).

<br/>


================================================
FILE: benchmark.js
================================================
import { createRequire } from 'node:module'

const require = createRequire(import.meta.url)
const fetch = require('node-fetch')
const split2 = require('split2')
const WebSocket = require('ws')

const serialize = (options) => {
  return encodeURIComponent(JSON.stringify(options))
}

class SimpleWebsocketClient {
  constructor(url, onMessageCB, onOpen) {
    this._socket = new WebSocket(url)
    this._socket.on('open', onOpen)
    this._socket.on('message', onMessageCB)
    this._socket.on('error', (err) => {
      console.log('SimpleWebsocketClient error', err)
    })
  }

  send(payload) {
    this._socket.send(JSON.stringify(payload))
  }

  async closed() {
    await new Promise((resolve) => {
      this._socket.on('close', () => {
        resolve()
      })
    })
  }
}

const EXCHANGE = 'bitmex'
const SYMBOL = 'XBTUSD'

const TRADES_AND_BOOK_FILTERS = [
  {
    channel: 'trade',
    symbols: [SYMBOL]
  },
  {
    channel: 'orderBookL2',
    symbols: [SYMBOL]
  }
]
const TRADES_AND_BOOK_SUBSCRIPTION_MESSAGES = [
  {
    op: 'subscribe',
    args: [`trade:${SYMBOL}`, `orderBookL2:${SYMBOL}`]
  }
]

const FROM_DATE = '2020-02-01'
const TO_DATE = '2020-02-02'

async function httpReplayBenchmark({ JSONParseResponse }) {
  const options = {
    exchange: EXCHANGE,
    filters: TRADES_AND_BOOK_FILTERS,
    from: FROM_DATE,
    to: TO_DATE
  }

  const response = await fetch(`http://localhost:8000/replay?options=${serialize(options)}`)
  const messagesStream = response.body.pipe(split2())

  let messagesCount = 0
  let startTime = new Date()
  for await (let line of messagesStream) {
    if (JSONParseResponse) {
      JSON.parse(line)
    }

    messagesCount++
  }

  const elapsedSeconds = (new Date() - startTime) / 1000
  const messagesPerSecond = Math.round(messagesCount / elapsedSeconds)

  console.log('HTTP /replay finished', {
    JSONParseResponse,
    messagesPerSecond,
    messagesCount,
    elapsedSeconds
  })
}

async function httpReplayNormalizedBenchmark({ computeTBTBookSnapshots }) {
  const options = {
    exchange: EXCHANGE,
    symbols: [SYMBOL],
    from: FROM_DATE,
    to: TO_DATE,
    dataTypes: ['trade', 'book_change']
  }
  if (computeTBTBookSnapshots) {
    options.dataTypes.push('book_snapshot_50_0ms')
  }

  const response = await fetch(`http://localhost:8000/replay-normalized?options=${serialize(options)}`)
  const messagesStream = response.body.pipe(split2())

  let messagesCount = 0
  let startTime = new Date()
  for await (let line of messagesStream) {
    messagesCount++
  }

  const elapsedSeconds = (new Date() - startTime) / 1000
  const messagesPerSecond = Math.round(messagesCount / elapsedSeconds)

  console.log('HTTP /replay-normalized finished', {
    computeTBTBookSnapshots,
    messagesPerSecond,
    messagesCount,
    elapsedSeconds
  })
}

async function wsReplayBenchmark({ JSONParseResponse }) {
  let messagesCount = 0
  let startTime
  const simpleBitmexWSClient = new SimpleWebsocketClient(
    `ws://localhost:8001/ws-replay?exchange=${EXCHANGE}&from=${FROM_DATE}&to=${TO_DATE}`,
    (message) => {
      if (!startTime) {
        startTime = new Date()
      }
      if (JSONParseResponse) {
        JSON.parse(message)
      }
      messagesCount++
    },
    () => {
      for (const sub of TRADES_AND_BOOK_SUBSCRIPTION_MESSAGES) {
        simpleBitmexWSClient.send(sub)
      }
    }
  )

  await simpleBitmexWSClient.closed()

  const elapsedSeconds = (new Date() - startTime) / 1000
  const messagesPerSecond = Math.round(messagesCount / elapsedSeconds)

  console.log('WS /ws-replay finished', {
    JSONParseResponse,
    messagesPerSecond,
    messagesCount,
    elapsedSeconds
  })
}

async function wsReplayNormalizedBenchmark({ computeTBTBookSnapshots }) {
  const options = {
    exchange: EXCHANGE,
    symbols: [SYMBOL],
    from: FROM_DATE,
    to: TO_DATE,
    dataTypes: ['trade', 'book_change']
  }
  if (computeTBTBookSnapshots) {
    options.dataTypes.push('book_snapshot_50_0ms')
  }

  let messagesCount = 0
  let startTime

  const simpleBitmexWSClient = new SimpleWebsocketClient(
    `ws://localhost:8001/ws-replay-normalized?options=${serialize(options)}`,
    (message) => {
      if (!startTime) {
        startTime = new Date()
      }
      messagesCount++
    },
    () => {}
  )

  await simpleBitmexWSClient.closed()

  const elapsedSeconds = (new Date() - startTime) / 1000
  const messagesPerSecond = Math.round(messagesCount / elapsedSeconds)

  console.log('WS /ws-replay-normalized finished', {
    computeTBTBookSnapshots,
    messagesPerSecond,
    messagesCount,
    elapsedSeconds
  })
}

async function runBenchmarks() {
  console.log(`tardis-machine benchmark for ${EXCHANGE} from ${FROM_DATE} to ${TO_DATE}`)
  console.log('\n')

  await httpReplayBenchmark({ JSONParseResponse: false })

  await httpReplayBenchmark({ JSONParseResponse: true })

  await wsReplayBenchmark({ JSONParseResponse: true })

  await httpReplayNormalizedBenchmark({ computeTBTBookSnapshots: false })
  await httpReplayNormalizedBenchmark({ computeTBTBookSnapshots: true })

  await wsReplayNormalizedBenchmark({ computeTBTBookSnapshots: false })
  await wsReplayNormalizedBenchmark({ computeTBTBookSnapshots: true })
}

// assumes tardis-machine server is running
runBenchmarks()


================================================
FILE: bin/tardis-machine.js
================================================
#!/usr/bin/env node
process.env.UWS_HTTP_MAX_HEADERS_SIZE = '20000'
import { createRequire } from 'node:module'

const require = createRequire(import.meta.url)
const yargs = require('yargs')
const os = require('node:os')
const path = require('node:path')
const cluster = require('node:cluster')
const numCPUs = os.cpus().length
const isDocker = require('is-docker')
const pkg = require('../package.json')

const DEFAULT_PORT = 8000
const argv = yargs
  .scriptName('tardis-machine')
  .env('TM_')
  .strict()
  .option('api-key', {
    type: 'string',
    describe: 'API key for tardis.dev API access'
  })
  .option('cache-dir', {
    type: 'string',
    describe: 'Local cache dir path ',
    default: path.join(os.tmpdir(), '.tardis-cache')
  })
  .option('clear-cache', {
    type: 'boolean',
    describe: 'Clear cache dir on startup',
    default: false
  })
  .option('port', {
    type: 'number',
    describe: 'Port to bind server on',
    default: DEFAULT_PORT
  })
  .option('cluster-mode', {
    type: 'boolean',
    describe: 'Run tardis-machine as cluster of Node.js processes',
    default: false
  })
  .option('debug', {
    type: 'boolean',
    describe: 'Enable debug logs.',
    default: false
  })
  .help()
  .version()
  .usage('$0 [options]')
  .example('$0 --api-key=YOUR_API_KEY')
  .epilogue('See https://docs.tardis.dev/api/tardis-machine for more information.')
  .detectLocale(false).argv

const port = process.env.PORT ? +process.env.PORT : argv['port']

if (argv['debug']) {
  process.env.DEBUG = 'tardis-dev:machine*,tardis-dev:realtime*'
}

const { TardisMachine } = await import('../dist/index.js')

async function start() {
  const machine = new TardisMachine({
    apiKey: argv['api-key'],
    cacheDir: argv['cache-dir'],
    clearCache: argv['clear-cache']
  })
  let suffix = ''

  const runAsCluster = argv['cluster-mode']
  if (runAsCluster) {
    cluster.schedulingPolicy = cluster.SCHED_RR

    suffix = '(cluster mode)'
    if (cluster.isPrimary) {
      for (let i = 0; i < numCPUs; i++) {
        cluster.fork()
      }
    } else {
      await machine.start(port)
    }
  } else {
    await machine.start(port)
  }

  if (!cluster.isPrimary) {
    return
  }

  if (isDocker() && !process.env.RUNKIT_HOST) {
    console.log(`tardis-machine server v${pkg.version} is running inside Docker container ${suffix}`)
  } else {
    console.log(`tardis-machine server v${pkg.version} is running ${suffix}`)
    console.log(`HTTP port: ${port}`)
    console.log(`WS port: ${port + 1}`)
  }

  console.log(`See https://docs.tardis.dev/api/tardis-machine for more information.`)
}

start()

process
  .on('unhandledRejection', (reason, p) => {
    console.error('Unhandled Rejection at Promise', reason, p)
  })
  .on('uncaughtException', (err) => {
    console.error('Uncaught Exception thrown', err)
    process.exit(1)
  })


================================================
FILE: package.json
================================================
{
  "name": "tardis-machine",
  "version": "16.1.0",
  "engines": {
    "node": ">=25"
  },
  "devEngines": {
    "runtime": {
      "name": "node",
      "version": ">=25"
    },
    "packageManager": {
      "name": "npm",
      "version": ">=11.11.1"
    }
  },
  "description": "Locally runnable server with built-in data caching, providing both tick-level historical and consolidated real-time cryptocurrency market data via HTTP and WebSocket APIs",
  "main": "dist/index.js",
  "source": "src/index.ts",
  "types": "dist/index.d.ts",
  "type": "module",
  "exports": {
    ".": {
      "types": "./dist/index.d.ts",
      "import": "./dist/index.js",
      "default": "./dist/index.js"
    },
    "./package.json": "./package.json"
  },
  "repository": "tardis-dev/tardis-machine",
  "homepage": "https://github.com/tardis-dev/tardis-machine",
  "scripts": {
    "build": "tsc",
    "precommit": "lint-staged",
    "test": "npm run build && cross-env UWS_HTTP_MAX_HEADERS_SIZE=20000 node --experimental-vm-modules ./node_modules/jest/bin/jest.js --forceExit --runInBand",
    "prepare": "npm run build",
    "format": "prettier --write .",
    "check-format": "prettier --check .",
    "benchmark": "node ./benchmark.js"
  },
  "bin": {
    "tardis-machine": "./bin/tardis-machine.js"
  },
  "files": [
    "src",
    "dist",
    "bin",
    "benchmark.js"
  ],
  "keywords": [
    "cryptocurrency data feed",
    "market data",
    "api client",
    "crypto markets data replay",
    "historical data",
    "real-time cryptocurrency market data feed",
    "historical cryptocurrency prices",
    "cryptocurrency api",
    "real-time normalized WebSocket cryptocurrency markets data",
    "normalized cryptocurrency market data API",
    "order book reconstruction",
    "market data normalization",
    "cryptocurrency api",
    "cryptocurrency",
    "orderbook",
    "exchange",
    "websocket",
    "realtime",
    "bitmex",
    "binance",
    "trading",
    "high granularity order book data",
    "replay service",
    "historical cryptocurrency market data replay API"
  ],
  "license": "MPL-2.0",
  "dependencies": {
    "debug": "^4.4.1",
    "find-my-way": "^9.3.0",
    "is-docker": "^2.2.1",
    "tardis-dev": "^16.1.1",
    "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.59.0",
    "yargs": "^17.5.1"
  },
  "devDependencies": {
    "@types/debug": "^4.1.12",
    "@types/jest": "^30.0.0",
    "@types/node": "^25.5.2",
    "@types/node-fetch": "^2.6.13",
    "@types/split2": "^4.2.3",
    "@types/ws": "^8.18.1",
    "@types/yargs": "^17.0.33",
    "cross-env": "^10.1.0",
    "husky": "^9.1.7",
    "jest": "^30.0.5",
    "lint-staged": "^16.1.2",
    "node-fetch": "^2.6.1",
    "prettier": "^3.6.2",
    "split2": "^4.2.0",
    "ts-jest": "^29.4.0",
    "typescript": "^5.9.2",
    "ws": "^8.18.3"
  },
  "lint-staged": {
    "*.{ts}": [
      "prettier --write",
      "git add"
    ]
  },
  "jest": {
    "extensionsToTreatAsEsm": [
      ".ts"
    ],
    "moduleNameMapper": {
      "^(\\.{1,2}/.*)\\.js$": "$1"
    },
    "transform": {
      "\\.(ts|tsx)?$": [
        "ts-jest",
        {
          "useESM": true,
          "tsconfig": "./test/tsconfig.json"
        }
      ]
    },
    "testEnvironment": "node"
  }
}


================================================
FILE: src/debug.ts
================================================
import dbg from 'debug'
export const debug = dbg('tardis-dev:machine')


================================================
FILE: src/helpers.ts
================================================
import {
  ComputableFactory,
  computeBookSnapshots,
  computeTradeBars,
  Disconnect,
  MapperFactory,
  normalizeBookChanges,
  NormalizedData,
  normalizeDerivativeTickers,
  normalizeLiquidations,
  normalizeTrades,
  normalizeOptionsSummary,
  ReplayNormalizedOptions,
  StreamNormalizedOptions,
  normalizeBookTickers
} from 'tardis-dev'

export type WithDataType = {
  dataTypes: string[]
}

export type ReplayNormalizedOptionsWithDataType = ReplayNormalizedOptions<any, any> & WithDataType

export type ReplayNormalizedRequestOptions = ReplayNormalizedOptionsWithDataType | ReplayNormalizedOptionsWithDataType[]

export type StreamNormalizedOptionsWithDataType = StreamNormalizedOptions<any, any> & WithDataType & { withErrorMessages?: boolean }

export type StreamNormalizedRequestOptions = StreamNormalizedOptionsWithDataType | StreamNormalizedOptionsWithDataType[]

export function* getNormalizers(dataTypes: string[]): IterableIterator<MapperFactory<any, any>> {
  if (dataTypes.includes('trade') || dataTypes.some((dataType) => dataType.startsWith('trade_bar_'))) {
    yield normalizeTrades
  }
  if (
    dataTypes.includes('book_change') ||
    dataTypes.some((dataType) => dataType.startsWith('book_snapshot_')) ||
    dataTypes.some((dataType) => dataType.startsWith('quote'))
  ) {
    yield normalizeBookChanges
  }

  if (dataTypes.includes('derivative_ticker')) {
    yield normalizeDerivativeTickers
  }

  if (dataTypes.includes('liquidation')) {
    yield normalizeLiquidations
  }
  if (dataTypes.includes('option_summary')) {
    yield normalizeOptionsSummary
  }

  if (dataTypes.includes('book_ticker')) {
    yield normalizeBookTickers
  }
}

function getRequestedDataTypes(options: ReplayNormalizedOptionsWithDataType | StreamNormalizedOptionsWithDataType) {
  return options.dataTypes.map((dataType) => {
    if (dataType.startsWith('trade_bar_')) {
      return 'trade_bar'
    }
    if (dataType.startsWith('book_snapshot_')) {
      return 'book_snapshot'
    }

    if (dataType.startsWith('quote')) {
      return 'book_snapshot'
    }

    return dataType
  })
}

export function constructDataTypeFilter(options: (ReplayNormalizedOptionsWithDataType | StreamNormalizedOptionsWithDataType)[]) {
  const requestedDataTypesPerExchange = options.reduce((prev, current) => {
    if (prev[current.exchange] !== undefined) {
      prev[current.exchange] = [...prev[current.exchange], ...getRequestedDataTypes(current)]
    } else {
      prev[current.exchange] = getRequestedDataTypes(current)
    }

    return prev
  }, {} as any)

  const returnDisconnectMessages = options.some((o) => o.withDisconnectMessages)

  return (message: NormalizedData | Disconnect) => {
    if (message.type === 'disconnect' && returnDisconnectMessages) {
      return true
    }

    return requestedDataTypesPerExchange[message.exchange].includes(message.type)
  }
}

const tradeBarSuffixToKindMap = {
  ticks: {
    kind: 'tick',
    multiplier: 1
  },
  ms: {
    kind: 'time',
    multiplier: 1
  },
  s: {
    kind: 'time',
    multiplier: 1000
  },
  m: {
    kind: 'time',
    multiplier: 60 * 1000
  },

  vol: {
    kind: 'volume',
    multiplier: 1
  }
} as const

const bookSnapshotsToIntervalMultiplierMap = {
  ms: {
    multiplier: 1
  },
  s: {
    multiplier: 1000
  },
  m: {
    multiplier: 60 * 1000
  }
} as const

const getKeys = <T extends {}>(o: T): Array<keyof T> => <Array<keyof T>>Object.keys(o)

export function getComputables(dataTypes: string[]): ComputableFactory<any>[] {
  const computables = []

  for (const dataType of dataTypes) {
    if (dataType.startsWith('trade_bar')) {
      computables.push(parseAsTradeBarComputable(dataType))
    }

    if (dataType.startsWith('book_snapshot')) {
      computables.push(parseAsBookSnapshotComputable(dataType))
    }

    if (dataType.startsWith('quote')) {
      computables.push(parseAsQuoteComputable(dataType))
    }
  }

  return computables
}

function parseAsTradeBarComputable(dataType: string) {
  for (const suffix of getKeys(tradeBarSuffixToKindMap)) {
    if (dataType.endsWith(suffix) === false) {
      continue
    }

    const intervalString = dataType.replace('trade_bar_', '').replace(suffix, '')
    const interval = Number(intervalString)
    if (Number.isNaN(interval)) {
      throw new Error(`invalid interval: ${intervalString}, data type: ${dataType}`)
    }

    return computeTradeBars({
      interval: tradeBarSuffixToKindMap[suffix].multiplier * interval,
      kind: tradeBarSuffixToKindMap[suffix].kind,
      name: dataType
    })
  }

  throw new Error(`invalid data type: ${dataType}`)
}

function parseAsBookSnapshotComputable(dataType: string) {
  for (const suffix of getKeys(bookSnapshotsToIntervalMultiplierMap)) {
    if (dataType.endsWith(suffix) === false) {
      continue
    }

    const parts = dataType.split('_')

    const depthString = parts[2]
    const depth = Number(parts[2])
    if (Number.isNaN(depth)) {
      throw new Error(`invalid depth: ${depthString}, data type: ${dataType}`)
    }
    const intervalString = parts[parts.length - 1].replace(suffix, '')

    const interval = Number(intervalString)
    if (Number.isNaN(interval)) {
      throw new Error(`invalid interval: ${intervalString}, data type: ${dataType}`)
    }

    const isGrouped = parts.length === 5

    let grouping
    if (isGrouped) {
      const groupingString = parts[3].replace('grouped', '')

      grouping = Number(groupingString)
      if (Number.isNaN(grouping)) {
        throw new Error(`invalid interval: ${groupingString}, data type: ${dataType}`)
      }
    }

    return computeBookSnapshots({
      interval: bookSnapshotsToIntervalMultiplierMap[suffix].multiplier * interval,
      grouping,
      depth,
      name: dataType,
      removeCrossedLevels: true
    })
  }

  throw new Error(`invalid data type: ${dataType}`)
}

function parseAsQuoteComputable(dataType: string) {
  if (dataType === 'quote') {
    return computeBookSnapshots({
      interval: 0,
      depth: 1,
      name: dataType,
      removeCrossedLevels: true
    })
  }

  for (const suffix of getKeys(bookSnapshotsToIntervalMultiplierMap)) {
    if (dataType.endsWith(suffix) === false) {
      continue
    }
    const intervalString = dataType.replace('quote_', '').replace(suffix, '')
    const interval = Number(intervalString)
    if (Number.isNaN(interval)) {
      throw new Error(`invalid interval: ${intervalString}, data type: ${dataType}`)
    }

    return computeBookSnapshots({
      interval: bookSnapshotsToIntervalMultiplierMap[suffix].multiplier * interval,
      depth: 1,
      name: dataType,
      removeCrossedLevels: true
    })
  }

  throw new Error(`invalid data type: ${dataType}`)
}

export const wait = (delayMS: number) => new Promise((resolve) => setTimeout(resolve, delayMS))

const oldToISOString = Date.prototype.toISOString

// if Date provides microseconds add those to ISO date
Date.prototype.toISOString = function () {
  if (this.μs !== undefined) {
    const isoString = oldToISOString.apply(this)

    return isoString.slice(0, isoString.length - 1) + this.μs.toString().padStart(3, '0') + 'Z'
  }
  return oldToISOString.apply(this)
}


================================================
FILE: src/http/healthCheck.ts
================================================
import type { IncomingMessage, ServerResponse } from 'node:http'
const BYTES_IN_MB = 1024 * 1024

export const healthCheck = (_: IncomingMessage, res: ServerResponse) => {
  res.setHeader('Content-Type', 'application/json')

  try {
    const memUsage = process.memoryUsage()

    const message = {
      status: 'Healthy',
      uptimeHours: Number((process.uptime() / (60 * 60)).toFixed(2)),
      timestampMs: Date.now(),
      memoryInfo: {
        rssMB: Number((memUsage.rss / BYTES_IN_MB).toFixed(1)),
        heapTotalMB: Number((memUsage.heapTotal / BYTES_IN_MB).toFixed(1)),
        heapUsedMB: Number((memUsage.heapUsed / BYTES_IN_MB).toFixed(1)),
        externalMB: Number((memUsage.external / BYTES_IN_MB).toFixed(1))
      }
    }

    res.end(JSON.stringify(message))
  } catch {
    if (!res.finished) {
      res.statusCode = 500

      res.end(
        JSON.stringify({
          message: 'Unhealthy'
        })
      )
    }
  }
}


================================================
FILE: src/http/index.ts
================================================
export * from './replay.ts'
export * from './replaynormalized.ts'
export * from './healthCheck.ts'


================================================
FILE: src/http/replay.ts
================================================
import { once } from 'node:events'
import type { IncomingMessage, OutgoingMessage, ServerResponse } from 'node:http'
import { replay, ReplayOptions } from 'tardis-dev'
import { debug } from '../debug.ts'

export const replayHttp = async (req: IncomingMessage, res: ServerResponse) => {
  try {
    const startTimestamp = new Date().getTime()
    const requestUrl = new URL(req.url!, 'http://localhost')
    const optionsString = requestUrl.searchParams.get('options') ?? undefined
    const replayOptions = JSON.parse(optionsString as string) as ReplayOptions<any, any, any>

    debug('GET /replay request started, options: %o', replayOptions)

    const streamedMessagesCount = await writeMessagesToResponse(res, replayOptions)
    const endTimestamp = new Date().getTime()

    debug(
      'GET /replay request finished, options: %o, time: %d seconds, total messages count:%d',
      replayOptions,
      (endTimestamp - startTimestamp) / 1000,
      streamedMessagesCount
    )
  } catch (e: any) {
    const errorInfo = {
      responseText: e.responseText,
      message: e.message,
      url: e.url
    }

    debug('GET /replay request error: %o', e)
    console.error('GET /replay request error:', e)

    if (!res.finished) {
      res.statusCode = e.status || 500
      res.end(JSON.stringify(errorInfo))
    }
  }
}

async function writeMessagesToResponse(res: OutgoingMessage, replayOptions: ReplayOptions<any, any, any>) {
  const responsePrefixBuffer = Buffer.from('{"localTimestamp":"')
  const responseMiddleBuffer = Buffer.from('","message":')
  const responseSuffixBuffer = Buffer.from('}\n')
  const newLineBuffer = Buffer.from('\n')
  const BATCH_SIZE = 32

  // not 100% sure that's necessary since we're returning ndjson in fact, not json
  res.setHeader('Content-Type', 'application/x-json-stream')

  let buffers: Buffer[] = []
  let totalMessagesCount = 0

  const messages = replay({ ...replayOptions, skipDecoding: true })

  for await (let messageWithTimestamp of messages) {
    totalMessagesCount++

    if (messageWithTimestamp === undefined) {
      // if received message is undefined  (disconnect)
      // return it as new line
      buffers.push(newLineBuffer)
    } else {
      // instead of writing each message directly to response,
      // let's batch them and send in BATCH_SIZE  batches (each message is 5 buffers: prefix etc)
      // also instead of converting messages to string or parsing them let's manually stich together desired json response using buffers which is faster
      buffers.push(
        responsePrefixBuffer,
        messageWithTimestamp.localTimestamp,
        responseMiddleBuffer,
        messageWithTimestamp.message,
        responseSuffixBuffer
      )

      if (buffers.length >= BATCH_SIZE * 5) {
        const ok = res.write(Buffer.concat(buffers))
        buffers = []

        if (!ok) {
          await once(res, 'drain')
        }
      }
    }
  }

  if (buffers.length > 0) {
    res.write(Buffer.concat(buffers))
    buffers = []
  }

  res.end('')

  return totalMessagesCount
}


================================================
FILE: src/http/replaynormalized.ts
================================================
import { once } from 'node:events'
import type { IncomingMessage, OutgoingMessage, ServerResponse } from 'node:http'
import { combine, compute, replayNormalized } from 'tardis-dev'
import { debug } from '../debug.ts'
import { constructDataTypeFilter, getComputables, getNormalizers, ReplayNormalizedRequestOptions } from '../helpers.ts'

export const replayNormalizedHttp = async (req: IncomingMessage, res: ServerResponse) => {
  try {
    const startTimestamp = new Date().getTime()
    const requestUrl = new URL(req.url!, 'http://localhost')
    const optionsString = requestUrl.searchParams.get('options') ?? undefined
    const replayNormalizedOptions = JSON.parse(optionsString as string) as ReplayNormalizedRequestOptions

    debug('GET /replay-normalized request started, options: %o', replayNormalizedOptions)

    const streamedMessagesCount = await writeMessagesToResponse(res, replayNormalizedOptions)
    const endTimestamp = new Date().getTime()

    debug(
      'GET /replay-normalized request finished, options: %o, time: %d seconds, total messages count: %d',
      replayNormalizedOptions,
      (endTimestamp - startTimestamp) / 1000,
      streamedMessagesCount
    )
  } catch (e: any) {
    const errorInfo = {
      responseText: e.responseText,
      message: e.message,
      url: e.url
    }

    debug('GET /replay-normalized request error: %o', e)
    console.error('GET /replay-normalized request error:', e)

    if (!res.finished) {
      res.statusCode = e.status || 500
      res.end(JSON.stringify(errorInfo))
    }
  }
}

async function writeMessagesToResponse(res: OutgoingMessage, options: ReplayNormalizedRequestOptions) {
  const BATCH_SIZE = 32

  res.setHeader('Content-Type', 'application/x-json-stream')

  let buffers: string[] = []
  let totalMessagesCount = 0

  const replayNormalizedOptions = Array.isArray(options) ? options : [options]

  const messagesIterables = replayNormalizedOptions.map((option) => {
    // let's map from provided options to options and normalizers that needs to be added for dataTypes provided in options
    const messages = replayNormalized(option, ...getNormalizers(option.dataTypes))
    // separately check if any computables are needed for given dataTypes
    const computables = getComputables(option.dataTypes)

    if (computables.length > 0) {
      return compute(messages, ...computables)
    }

    return messages
  })

  const filterByDataType = constructDataTypeFilter(replayNormalizedOptions)

  const messages = messagesIterables.length === 1 ? messagesIterables[0] : combine(...messagesIterables)

  for await (const message of messages) {
    // filter out messages not explicitly requested via options.dataTypes
    // eg.: return only book_snapshots when someone asked only for those
    // as by default also book_changes are returned as well
    if (filterByDataType(message) === false) {
      continue
    }

    totalMessagesCount++

    buffers.push(JSON.stringify(message))

    if (buffers.length === BATCH_SIZE) {
      const ok = res.write(`${buffers.join('\n')}\n`)
      buffers = []

      if (!ok) {
        await once(res, 'drain')
      }
    }
  }

  if (buffers.length > 0) {
    res.write(`${buffers.join('\n')}\n`)
    buffers = []
  }

  res.end('')

  return totalMessagesCount
}


================================================
FILE: src/index.ts
================================================
export { TardisMachine } from './tardismachine.ts'


================================================
FILE: src/tardismachine.ts
================================================
import findMyWay from 'find-my-way'
import http from 'node:http'
import { createRequire } from 'module'
import { clearCache, init } from 'tardis-dev'
import { App, DISABLED, TemplatedApp } from 'uWebSockets.js'
import { healthCheck, replayHttp, replayNormalizedHttp } from './http/index.ts'
import { replayNormalizedWS, replayWS, streamNormalizedWS } from './ws/index.ts'
import { debug } from './debug.ts'

const require = createRequire(import.meta.url)
const packageJson = require('../package.json') as { version: string }

export class TardisMachine {
  private readonly _httpServer: http.Server
  private readonly _wsServer: TemplatedApp
  private _eventLoopTimerId: NodeJS.Timeout | undefined = undefined

  constructor(private readonly options: Options) {
    init({
      apiKey: options.apiKey,
      cacheDir: options.cacheDir,
      _userAgent: `tardis-machine/${packageJson.version} (+https://github.com/tardis-dev/tardis-machine)`
    })

    const router = findMyWay({ ignoreTrailingSlash: true })

    this._httpServer = http.createServer((req, res) => {
      router.lookup(req, res)
    })

    // set timeout to 0 meaning infinite http timout - streaming may take some time expecially for longer date ranges
    this._httpServer.timeout = 0

    router.on('GET', '/replay', replayHttp)
    router.on('GET', '/replay-normalized', replayNormalizedHttp)
    router.on('GET', '/health-check', healthCheck)

    const wsRoutes = {
      '/ws-replay': replayWS,
      '/ws-replay-normalized': replayNormalizedWS,
      '/ws-stream-normalized': streamNormalizedWS
    } as any

    this._wsServer = App().ws('/*', {
      compression: DISABLED,
      maxPayloadLength: 512 * 1024,
      idleTimeout: 60,
      maxBackpressure: 5 * 1024 * 1024,
      closeOnBackpressureLimit: true,
      upgrade: (res: any, req: any, context: any) => {
        res.upgrade(
          { req },
          req.getHeader('sec-websocket-key'),
          req.getHeader('sec-websocket-protocol'),
          req.getHeader('sec-websocket-extensions'),
          context
        )
      },
      open: (ws: any) => {
        const path = ws.req.getUrl().toLocaleLowerCase()
        ws.closed = false
        const matchingRoute = wsRoutes[path]

        if (matchingRoute !== undefined) {
          matchingRoute(ws, ws.req)
        } else {
          ws.end(1008)
        }
      },

      message: (ws: any, message: ArrayBuffer) => {
        if (ws.onmessage !== undefined) {
          ws.onmessage(message)
        }
      },

      close: (ws: any) => {
        ws.closed = true
        if (ws.onclose !== undefined) {
          ws.onclose()
        }
      }
    } as any)
  }

  public async start(port: number) {
    let start = process.hrtime()
    const interval = 500

    // based on https://github.com/tj/node-blocked/blob/master/index.js
    this._eventLoopTimerId = setInterval(() => {
      const delta = process.hrtime(start)
      const nanosec = delta[0] * 1e9 + delta[1]
      const ms = nanosec / 1e6
      const n = ms - interval

      if (n > 2000) {
        debug('Tardis-machine server event loop blocked for %d ms.', Math.round(n))
      }

      start = process.hrtime()
    }, interval)

    if (this.options.clearCache) {
      await clearCache()
    }

    await new Promise<void>((resolve, reject) => {
      try {
        this._httpServer.on('error', reject)
        this._httpServer.listen(port, () => {
          this._wsServer.listen(port + 1, (listenSocket) => {
            if (listenSocket) {
              resolve()
            } else {
              reject(new Error('ws server did not start'))
            }
          })
        })
      } catch (e) {
        reject(e)
      }
    })
  }

  public async stop() {
    await new Promise<void>((resolve, reject) => {
      this._httpServer.close((err) => {
        err ? reject(err) : resolve()
      })
    })

    if (this._eventLoopTimerId !== undefined) {
      clearInterval(this._eventLoopTimerId)
    }
  }
}

type Options = {
  apiKey?: string
  cacheDir: string
  clearCache?: boolean
}


================================================
FILE: src/ws/index.ts
================================================
export * from './replay.ts'
export * from './replaynormalized.ts'
export * from './streamnormalized.ts'


================================================
FILE: src/ws/replay.ts
================================================
import { decode } from 'node:querystring'
import { combine, Exchange, replay, ReplayOptions } from 'tardis-dev'
import type { HttpRequest } from 'uWebSockets.js'
import { debug } from '../debug.ts'
import { wait } from '../helpers.ts'
import { SubscriptionMapper, subscriptionsMappers } from './subscriptionsmappers.ts'

const replaySessions: { [sessionKey: string]: ReplaySession | undefined } = {}
let sessionsCounter = 0

export function replayWS(ws: any, req: HttpRequest) {
  const parsedQuery = decode(req.getQuery())
  const from = parsedQuery['from'] as string
  const to = parsedQuery['to'] as string
  const exchange = parsedQuery['exchange'] as Exchange

  // if there are multiple separate ws connections being made for the same session key
  // in short time frame (5 seconds)
  // consolidate them in single replay session that will make sure that messages being send via multiple websockets connections
  // are  synchronized by local timestamp

  const replaySessionKey = (parsedQuery['session'] as string) || exchange + sessionsCounter++

  let matchingReplaySessionMeta = replaySessions[replaySessionKey] && replaySessions[replaySessionKey]

  if (matchingReplaySessionMeta === undefined) {
    const newReplaySession = new ReplaySession()

    replaySessions[replaySessionKey] = newReplaySession
    matchingReplaySessionMeta = newReplaySession

    newReplaySession.onFinished(() => {
      replaySessions[replaySessionKey] = undefined
    })
  }

  if (matchingReplaySessionMeta.hasStarted) {
    const message = 'trying to add new WS connection to replay session that already started'
    debug(message)
    ws.end(1011, message)
    return
  }

  matchingReplaySessionMeta.addToSession(new WebsocketConnection(ws, exchange, from, to))
}

class ReplaySession {
  private readonly _connections: WebsocketConnection[] = []
  private _hasStarted: boolean = false

  constructor() {
    const SESSION_START_DELAY_MS = 2000

    debug('creating new ReplaySession')

    setTimeout(() => {
      this._start()
    }, SESSION_START_DELAY_MS)
  }

  public addToSession(websocketConnection: WebsocketConnection) {
    if (this._hasStarted) {
      throw new Error('Replay session already started')
    }

    this._connections.push(websocketConnection)
    debug('added new connection to ReplaySession, %s', websocketConnection)
  }

  public get hasStarted() {
    return this._hasStarted
  }

  private _onFinishedCallback: () => void = () => {}

  private async _start() {
    try {
      debug('starting ReplaySession, %s', this._connections.join(', '))
      this._hasStarted = true

      const connectionsWithoutSubscriptions = this._connections.filter((c) => c.subscriptionsCount === 0)
      if (connectionsWithoutSubscriptions.length > 0) {
        throw new Error(`No subscriptions received for websocket connection ${connectionsWithoutSubscriptions[0]}`)
      }

      // fast path for case when there is only single WS connection for given replay session
      if (this._connections.length === 1) {
        const connection = this._connections[0]

        const messages = replay({
          ...connection.replayOptions,
          skipDecoding: true,
          withDisconnects: false
        })

        for await (const { message } of messages) {
          const success = connection.ws.send(message)
          // handle backpressure in case of slow clients
          if (!success) {
            while (connection.ws.getBufferedAmount() > 0) {
              await wait(1)
            }
          }
        }
      } else {
        // map connections to replay messages streams enhanced with addtional ws field so
        // when we combine streams by localTimestamp we'll know which ws we should send given message via
        const messagesWithConnections = this._connections.map(async function* (connection) {
          const messages = replay({
            ...connection.replayOptions,
            skipDecoding: true,
            withDisconnects: false
          })

          for await (const { localTimestamp, message } of messages) {
            yield {
              ws: connection.ws,
              localTimestamp: new Date(localTimestamp.toString()),
              message
            }
          }
        })

        for await (const { ws, message } of combine(...messagesWithConnections)) {
          const success = ws.send(message)
          // handle backpressure in case of slow clients
          if (!success) {
            while (ws.getBufferedAmount() > 0) {
              await wait(1)
            }
          }
        }
      }

      await this._closeAllConnections()

      debug(
        'finished ReplaySession with %d connections, %s',
        this._connections.length,
        this._connections.map((c) => c.toString())
      )
    } catch (e: any) {
      debug('received error in ReplaySession, %o', e)
      await this._closeAllConnections(e)
    } finally {
      this._onFinishedCallback()
    }
  }

  private async _closeAllConnections(error: Error | undefined = undefined) {
    for (let i = 0; i < this._connections.length; i++) {
      const connection = this._connections[i]
      if (connection.ws.closed) {
        continue
      }

      // let's wait until buffer is empty before closing normal connections
      while (!error && connection.ws.getBufferedAmount() > 0) {
        await wait(100)
      }

      connection.close(error)
    }
  }

  public onFinished(onFinishedCallback: () => void) {
    this._onFinishedCallback = onFinishedCallback
  }
}

class WebsocketConnection {
  public readonly replayOptions: ReplayOptions<any>
  private readonly _subscriptionsMapper: SubscriptionMapper
  public subscriptionsCount = 0

  constructor(
    public readonly ws: any,
    exchange: Exchange,
    from: string,
    to: string
  ) {
    this.replayOptions = {
      exchange,
      from,
      to,
      filters: []
    }

    if (!subscriptionsMappers[exchange]) {
      throw new Error(`Exchange ${exchange} is not supported via /ws-replay Websocket API, please use HTTP streaming API instead.`)
    }

    this._subscriptionsMapper = subscriptionsMappers[exchange]!
    this.ws.onmessage = this._convertSubscribeRequestToFilter.bind(this)
  }

  public close(error: Error | undefined = undefined) {
    if (this.ws.closed) {
      return
    }

    if (error) {
      debug('Closed websocket connection %s, error: %o', this, error)
      this.ws.end(1011, error.toString())
    } else {
      debug('Closed websocket connection %s', this)
      this.ws.end(1000, 'WS replay finished')
    }
  }

  public toString() {
    return `${JSON.stringify(this.replayOptions)}`
  }

  private _convertSubscribeRequestToFilter(messageRaw: ArrayBuffer) {
    const message = Buffer.from(messageRaw).toString()
    try {
      const messageDeserialized = JSON.parse(message)

      if (this._subscriptionsMapper.canHandle(messageDeserialized, new Date(this.replayOptions.from))) {
        // if there is a subscribe message let's map it to filters and add those to replay options
        const filters = this._subscriptionsMapper.map(messageDeserialized, new Date(this.replayOptions.from))
        debug('Received subscribe websocket message: %s, mapped filters: %o', message, filters)
        this.replayOptions.filters.push(...filters)
        this.subscriptionsCount++
      } else {
        debug('Ignored websocket message %s', message)
      }
    } catch (e) {
      console.error('convertSubscribeRequestToFilter Error', e)
      debug('Ignored websocket message %s, error %o', message, e)
    }
  }
}


================================================
FILE: src/ws/replaynormalized.ts
================================================
import { decode } from 'node:querystring'
import { combine, compute, replayNormalized } from 'tardis-dev'
import type { HttpRequest } from 'uWebSockets.js'
import { debug } from '../debug.ts'
import { constructDataTypeFilter, getComputables, getNormalizers, ReplayNormalizedRequestOptions, wait } from '../helpers.ts'

export async function replayNormalizedWS(ws: any, req: HttpRequest) {
  let messages: AsyncIterableIterator<any> | undefined
  try {
    const startTimestamp = new Date().getTime()
    const parsedQuery = decode(req.getQuery())
    const optionsString = parsedQuery['options'] as string
    const replayNormalizedOptions = JSON.parse(optionsString) as ReplayNormalizedRequestOptions

    debug('WebSocket /ws-replay-normalized started, options: %o', replayNormalizedOptions)

    const options = Array.isArray(replayNormalizedOptions) ? replayNormalizedOptions : [replayNormalizedOptions]

    const messagesIterables = options.map((option) => {
      // let's map from provided options to options and normalizers that needs to be added for dataTypes provided in options
      const messages = replayNormalized(option, ...getNormalizers(option.dataTypes))
      // separately check if any computables are needed for given dataTypes
      const computables = getComputables(option.dataTypes)

      if (computables.length > 0) {
        return compute(messages, ...computables)
      }

      return messages
    })

    const filterByDataType = constructDataTypeFilter(options)

    messages = messagesIterables.length === 1 ? messagesIterables[0] : combine(...messagesIterables)

    for await (const message of messages) {
      if (!filterByDataType(message)) {
        continue
      }

      const success = ws.send(JSON.stringify(message))
      // handle backpressure in case of slow clients
      if (!success) {
        while (ws.getBufferedAmount() > 0) {
          await wait(1)
        }
      }
    }

    while (ws.getBufferedAmount() > 0) {
      await wait(100)
    }

    ws.end(1000, 'WS replay-normalized finished')

    const endTimestamp = new Date().getTime()

    debug(
      'WebSocket /ws-replay-normalized finished, options: %o, time: %d seconds',
      replayNormalizedOptions,
      (endTimestamp - startTimestamp) / 1000
    )
  } catch (e: any) {
    // this will underlying open WS connections
    if (messages !== undefined) {
      messages!.return!()
    }
    if (!ws.closed) {
      ws.end(1011, e.toString())
    }

    debug('WebSocket /ws-replay-normalized  error: %o', e)
    console.error('WebSocket /ws-replay-normalized error:', e)
  }
}


================================================
FILE: src/ws/streamnormalized.ts
================================================
import { decode } from 'node:querystring'
import { combine, compute, Exchange, streamNormalized } from 'tardis-dev'
import type { HttpRequest } from 'uWebSockets.js'
import { debug } from '../debug.ts'
import { constructDataTypeFilter, getComputables, getNormalizers, StreamNormalizedRequestOptions, wait } from '../helpers.ts'

export async function streamNormalizedWS(ws: any, req: HttpRequest) {
  let messages: AsyncIterableIterator<any> | undefined

  try {
    const startTimestamp = new Date().getTime()
    const parsedQuery = decode(req.getQuery())
    const optionsString = parsedQuery['options'] as string
    const streamNormalizedOptions = JSON.parse(optionsString) as StreamNormalizedRequestOptions

    debug('WebSocket /ws-stream-normalized started, options: %o', streamNormalizedOptions)

    const options = Array.isArray(streamNormalizedOptions) ? streamNormalizedOptions : [streamNormalizedOptions]
    let subSequentErrorsCount: { [key in Exchange]?: number } = {}

    let retries = 0
    let bufferedAmount = 0

    const messagesIterables = options.map((option) => {
      // let's map from provided options to options and normalizers that needs to be added for dataTypes provided in options
      const messages = streamNormalized(
        {
          ...option,
          withDisconnectMessages: true,
          onError: (error) => {
            const exchange = option.exchange as Exchange
            if (subSequentErrorsCount[exchange] === undefined) {
              subSequentErrorsCount[exchange] = 0
            }

            subSequentErrorsCount[exchange]!++

            if (option.withErrorMessages && !ws.closed) {
              ws.send(
                JSON.stringify({
                  type: 'error',
                  exchange,
                  localTimestamp: new Date(),
                  details: error.message,
                  subSequentErrorsCount: subSequentErrorsCount[exchange]
                })
              )
            }

            debug('WebSocket /ws-stream-normalized %s WS connection error: %o', exchange, error)
          }
        },
        ...getNormalizers(option.dataTypes)
      )
      // separately check if any computables are needed for given dataTypes
      const computables = getComputables(option.dataTypes)

      if (computables.length > 0) {
        return compute(messages, ...computables)
      }

      return messages
    })

    const filterByDataType = constructDataTypeFilter(options)
    messages = messagesIterables.length === 1 ? messagesIterables[0] : combine(...messagesIterables)

    for await (const message of messages) {
      if (ws.closed) {
        return
      }

      const exchange = message.exchange as Exchange

      if (subSequentErrorsCount[exchange] !== undefined && subSequentErrorsCount[exchange]! >= 50) {
        ws.end(1011, `Too many subsequent errors when connecting to  ${exchange} WS API`)
        return
      }

      if (!filterByDataType(message)) {
        continue
      }

      retries = 0
      bufferedAmount = 0
      // handle backpressure in case of slow clients
      while ((bufferedAmount = ws.getBufferedAmount()) > 0) {
        retries += 1
        const isState = new Date().valueOf() - message.localTimestamp.valueOf() >= 6

        // log stale messages, stale meaning message was not sent in 6 ms or more (2 retries)

        if (isState) {
          debug('Slow client, waiting %d ms, buffered amount: %d', 3 * retries, bufferedAmount)
        }
        if (retries > 300) {
          ws.end(1008, 'Too much backpressure')
          return
        }

        await wait(3 * retries)
      }

      ws.send(JSON.stringify(message))

      if (message.type !== 'disconnect') {
        subSequentErrorsCount[exchange] = 0
      }
    }

    while (ws.getBufferedAmount() > 0) {
      await wait(100)
    }

    ws.end(1000, 'WS stream-normalized finished')

    const endTimestamp = new Date().getTime()

    debug(
      'WebSocket /ws-stream-normalized finished, options: %o, time: %d seconds',
      streamNormalizedOptions,
      (endTimestamp - startTimestamp) / 1000
    )
  } catch (e: any) {
    if (!ws.closed) {
      ws.end(1011, e.toString())
    }

    debug('WebSocket /ws-stream-normalized  error: %o', e)
    console.error('WebSocket /ws-stream-normalized error:', e)
  } finally {
    // this will close underlying open WS connections
    if (messages !== undefined) {
      messages!.return!()
    }
  }
}


================================================
FILE: src/ws/subscriptionsmappers.ts
================================================
import { Exchange, Filter } from 'tardis-dev'

// https://www.bitmex.com/app/wsAPI
const bitmexMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.op === 'subscribe'
  },

  map: (message: any) => {
    const args = typeof message.args === 'string' ? [message.args] : message.args

    return args.map((arg: string) => {
      const channelSymbols = arg.split(':')
      if (channelSymbols.length == 1) {
        return {
          channel: channelSymbols[0]
        }
      }
      return {
        channel: channelSymbols[0],
        symbols: [channelSymbols[1]]
      }
    })
  }
}

// https://docs.pro.coinbase.com/#protocol-overview
const coinbaseMaper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.type === 'subscribe'
  },

  map: (message: any) => {
    const topLevelSymbols = message.product_ids
    const finalChannels: Filter<any>[] = []

    const channelMappings = {
      full: ['received', 'open', 'done', 'match', 'change', 'full_snapshot'],
      level2: ['snapshot', 'l2update'],
      matches: ['match', 'last_match'],
      ticker: ['ticker']
    }

    message.channels.forEach((channel: any) => {
      const channelName = typeof channel == 'string' ? channel : channel.name
      const symbols = typeof channel == 'string' ? topLevelSymbols : channel.product_ids
      const mappedChannels = (channelMappings as any)[channelName]

      mappedChannels.forEach((channel: string) => {
        finalChannels.push({
          channel,
          symbols
        })
      })
    })

    return finalChannels
  }
}

// https://docs.deribit.com/v2/#subscription-management
const deribitMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method === 'public/subscribe'
  },

  map: (message: any) => {
    return message.params.channels.map((channel: string) => {
      const lastSeparator = channel.lastIndexOf('.')
      const firstSeparator = channel.indexOf('.')

      return {
        channel: channel.slice(0, firstSeparator),
        // handle both
        // "deribit_price_ranking.btc_usd" and "book.ETH-PERPETUAL.100.1.100ms" cases
        // we need to extract channel name and symbols out of such strings
        symbols: [channel.slice(firstSeparator + 1, lastSeparator == firstSeparator ? undefined : lastSeparator)]
      }
    })
  }
}

// https://www.cryptofacilities.com/resources/hc/en-us/sections/360000120914-Websocket-API-Public
const cryptofacilitiesMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.event == 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: message.feed,
        symbols: message.product_ids
      }
    ]
  }
}

// https://www.bitstamp.net/websocket/v2/
const bitstampMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.event == 'bts:subscribe'
  },

  map: (message: any) => {
    const separator = message.data.channel.lastIndexOf('_')
    return [
      {
        channel: message.data.channel.slice(0, separator),
        symbols: [message.data.channel.slice(separator + 1)]
      }
    ]
  }
}

// https://www.okex.com/docs/en/#spot_ws-sub
const okexMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.op == 'subscribe'
  },

  map: (message: any) => {
    return message.args.map((arg: string) => {
      const separator = arg.indexOf(':')
      return {
        channel: arg.slice(0, separator),
        symbols: [arg.slice(separator + 1)]
      }
    })
  }
}
// https://docs.ftx.com/#request-format
const ftxMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.op === 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: message.channel,
        symbols: [message.market]
      }
    ]
  }
}

// https://www.kraken.com/features/websocket-api#message-subscribe
const krakenMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.event === 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: message.subscription.name,
        symbols: message.pair
      }
    ]
  }
}
// https://lightning.bitflyer.com/docs?lang=en#json-rpc-2.0-over-websocket
const bitflyerMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method === 'subscribe'
  },

  map: (message: any) => {
    const availableChannels = ['lightning_board_snapshot', 'lightning_board', 'lightning_ticker', 'lightning_executions']
    const inputChannel = message.params.channel as string
    const channel = availableChannels.find((c) => inputChannel.startsWith(c))!
    const symbol = inputChannel.slice(channel.length + 1)

    return [
      {
        channel,
        symbols: [symbol]
      }
    ]
  }
}

// https://docs.gemini.com/websocket-api/#market-data-version-2
const geminiMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.type === 'subscribe'
  },

  map: (message: any) => {
    const finalChannels: Filter<any>[] = []

    const channelMappings = {
      l2: ['trade', 'l2_updates', 'auction_open', 'auction_indicative', 'auction_result']
    }

    message.subscriptions.forEach((sub: any) => {
      const matchingChannels = (channelMappings as any)[sub.name]

      matchingChannels.forEach((channel: string) => {
        finalChannels.push({
          channel,
          symbols: sub.symbols
        })
      })
    })

    return finalChannels
  }
}

// https://binance-docs.github.io/apidocs/futures/en/#live-subscribing-unsubscribing-to-streams
const binanceMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method === 'SUBSCRIBE'
  },

  map: (message: any) => {
    return (message.params as string[]).map((param) => {
      const lastSeparator = param.lastIndexOf('@')
      const firstSeparator = param.indexOf('@')

      return {
        channel: param.slice(firstSeparator + 1, lastSeparator == firstSeparator ? undefined : lastSeparator),
        symbols: [param.slice(0, firstSeparator)]
      }
    })
  }
}

// https://docs.binance.org/api-reference/dex-api/ws-connection.html
const binanceDEXMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method === 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: message.topic,
        symbols: message.symbols
      }
    ]
  }
}

// https://huobiapi.github.io/docs/spot/v1/en/#websocket-market-data
const huobiMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.sub !== undefined
  },

  map: (message: any) => {
    const pieces = message.sub.split('.')
    return [
      {
        channel: pieces[2],
        symbols: [pieces[1]]
      }
    ]
  }
}

// https://github.com/bybit-exchange/bybit-official-api-docs/blob/master/en/websocket.md
const BYBIT_V5_API_SWITCH_DATE = new Date('2023-04-05T00:00:00.000Z')

const bybitMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.op === 'subscribe'
  },

  map: (message: any) => {
    return (message.args as string[]).map((arg) => {
      const pieces = arg.split('.')

      return {
        channel: pieces[0],
        symbols: [pieces[pieces.length - 1]]
      }
    })
  }
}

const bybitSpotMapper: SubscriptionMapper = {
  canHandle: (message: any, date: Date) => {
    if (date.valueOf() > BYBIT_V5_API_SWITCH_DATE.valueOf()) {
      return message.op === 'subscribe'
    }
    return message.event === 'sub'
  },

  map: (message: any, date: Date) => {
    if (date.valueOf() > BYBIT_V5_API_SWITCH_DATE.valueOf()) {
      return (message.args as string[]).map((arg) => {
        const pieces = arg.split('.')
        return {
          channel: pieces[0],
          symbols: [pieces[pieces.length - 1]]
        }
      })
    }

    return [
      {
        channel: message.topic,
        symbols: [message.symbol]
      }
    ]
  }
}

const blockchainComMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.action === 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: message.channel,
        symbols: [message.symbol]
      }
    ]
  }
}

// https://api.hitbtc.com/#subscribe-to-trades
const hitBtcMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method !== undefined
  },

  map: (message: any) => {
    const channelMappings = {
      subscribeTrades: ['snapshotTrades', 'updateTrades'],
      subscribeOrderbook: ['snapshotOrderbook', 'updateOrderbook']
    } as any

    return channelMappings[message.method].map((channel: string) => {
      return {
        channel,
        symbols: [message.params.symbol]
      }
    })
  }
}

const bitfinexMapper: SubscriptionMapper = {
  canHandle: () => true,
  map: () => []
}

const coinflexMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.op === 'subscribe'
  },

  map: (message: any) => {
    return message.args.map((arg: string) => {
      const split = arg.split(':')
      return {
        channel: split[0],
        symbols: [split[1]]
      }
    })
  }
}

const phemexMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method !== undefined
  },

  map: (message: any) => {
    const channelsMapping = {
      'orderbook.subscribe': 'book',
      'trade.subscribe': 'trades',
      'market24h.subscribe': 'market24h'
    } as any

    return [
      {
        channel: channelsMapping[message.method],
        symbols: message.params
      }
    ]
  }
}

const deltaMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.type === 'subscribe'
  },

  map: (message: any) => {
    return message.payload.channels.map((channel: any) => {
      return {
        channel: channel.name,
        symbols:
          channel.symbols !== undefined && channel.name === 'mark_price' ? channel.symbols.map((s: any) => `MARK:${s}`) : channel.symbols
      }
    })
  }
}

const gateIOMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method !== undefined && message.method.endsWith('.subscribe')
  },

  map: (message: any) => {
    return [
      {
        channel: message.method.split('.')[0],
        symbols: message.params.map((s: any) => {
          if (typeof s === 'string') {
            return s
          }
          return s[0] as string
        })
      }
    ]
  }
}

const gateIOFuturesMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.event === 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: message.channel.split('.')[1],
        symbols: message.payload.map((s: any) => {
          if (typeof s === 'string') {
            return s
          }
          return s[0] as string
        })
      }
    ]
  }
}

const poloniexMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.command === 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: 'price_aggregated_book',
        symbols: [message.channel]
      }
    ]
  }
}

const ascendexMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.op === 'sub' || message.op === 'req'
  },

  map: (message: any) => {
    const channel = message.action || message.ch.split(':')[0]
    const symbol = (message.args && message.args.symbol) || message.ch.split(':')[1]
    return [
      {
        channel,
        symbols: symbol ? [symbol] : []
      }
    ]
  }
}

const dydxMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.type === 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: message.channel,
        symbols: message.id ? [message.id] : []
      }
    ]
  }
}

const upbitMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return Array.isArray(message)
  },

  map: (message: any) => {
    return message
      .filter((m: any) => {
        return m.type !== undefined
      })
      .map((m: any) => {
        return {
          channel: m.type,
          symbols: m.codes
        }
      })
  }
}

const serumMaper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.op === 'subscribe'
  },

  map: (message: any) => {
    const finalChannels: Filter<any>[] = []

    const channelMappings = {
      trades: ['recent_trades', 'trade'],
      level1: ['quote'],
      level2: ['l2snapshot', 'l2update'],
      level3: ['l3snapshot', 'open', 'fill', 'change', 'done']
    }

    const symbols = message.markets
    const mappedChannels = (channelMappings as any)[message.channel]

    mappedChannels.forEach((channel: string) => {
      finalChannels.push({
        channel,
        symbols
      })
    })

    return finalChannels
  }
}

const cryptoComMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method === 'subscribe'
  },

  map: (message: any) => {
    return message.params.channels.map((channel: string) => {
      const parts = channel.split('.')
      return {
        channel: parts[1],
        symbols: [parts[0]]
      }
    })
  }
}

const kucoinMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.type === 'subscribe'
  },

  map: (message: any) => {
    //  "topic": "/market/ticker:BTC-USDT,ETH-USDT",
    const parts = message.topic.split(':') as string[]
    const symbols = parts[1].split(',')

    return [
      {
        channel: parts[0].substring(1),
        symbols
      }
    ]
  }
}

const bitnomialMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.type === 'subscribe'
  },

  map: (message: any) => {
    const topLevelSymbols = message.product_codes
    const finalChannels: Filter<any>[] = []

    const channelMappings = {
      book: ['book', 'levels'],
      trade: ['trade'],
      block: ['block']
    }

    message.channels.forEach((channel: any) => {
      const channelName = typeof channel == 'string' ? channel : channel.name
      const symbols = typeof channel == 'string' ? topLevelSymbols : channel.product_codes
      const mappedChannels = (channelMappings as any)[channelName]

      mappedChannels.forEach((channel: string) => {
        finalChannels.push({
          channel,
          symbols
        })
      })
    })

    return finalChannels
  }
}

const wooxMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.event === 'subscribe'
  },

  map: (message: any) => {
    const [symbol, channel] = message.topic.split('@')
    return [
      {
        channel,
        symbols: symbol
      }
    ]
  }
}

const bitgetMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.op === 'subscribe'
  },

  map: (message: any) => {
    return message.args.map((arg: any) => {
      return {
        channel: arg.channel,
        symbols: [arg.instId]
      }
    })
  }
}
const coinbaseInternationalMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.type === 'SUBSCRIBE'
  },

  map: (message: any) => {
    return message.channels.map((channel: string) => {
      return {
        channel,
        symbols: message.product_ids
      }
    })
  }
}

const hyperliquidMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.method === 'subscribe'
  },

  map: (message: any) => {
    return [
      {
        channel: message.type,
        symbols: [message.coin]
      }
    ]
  }
}

const lighterMapper: SubscriptionMapper = {
  canHandle: (message: any) => {
    return message.type === 'subscribe'
  },

  map: (message: any) => {
    const [channel, symbol] = message.channel.split('/')
    return [
      {
        channel,
        symbols: symbol === undefined || symbol === 'all' ? [] : [symbol]
      }
    ]
  }
}

export const subscriptionsMappers: { [key in Exchange]: SubscriptionMapper } = {
  bitmex: bitmexMapper,
  coinbase: coinbaseMaper,
  deribit: deribitMapper,
  cryptofacilities: cryptofacilitiesMapper,
  bitstamp: bitstampMapper,
  okex: okexMapper,
  'okex-futures': okexMapper,
  'okex-swap': okexMapper,
  'okex-options': okexMapper,
  ftx: ftxMapper,
  'ftx-us': ftxMapper,
  kraken: krakenMapper,
  bitflyer: bitflyerMapper,
  gemini: geminiMapper,
  binance: binanceMapper,
  'binance-futures': binanceMapper,
  'binance-delivery': binanceMapper,
  'binance-jersey': binanceMapper,
  'binance-us': binanceMapper,
  'binance-dex': binanceDEXMapper,
  huobi: huobiMapper,
  'huobi-dm': huobiMapper,
  'huobi-dm-swap': huobiMapper,
  'huobi-dm-linear-swap': huobiMapper,
  bybit: bybitMapper,
  bitfinex: bitfinexMapper,
  'bitfinex-derivatives': bitfinexMapper,
  okcoin: okexMapper,
  hitbtc: hitBtcMapper,
  coinflex: coinflexMapper,
  phemex: phemexMapper,
  delta: deltaMapper,
  'gate-io': gateIOMapper,
  'gate-io-futures': gateIOFuturesMapper,
  poloniex: poloniexMapper,
  ascendex: ascendexMapper,
  dydx: dydxMapper,
  'dydx-v4': dydxMapper,
  'huobi-dm-options': huobiMapper,
  upbit: upbitMapper,
  serum: serumMaper,
  'star-atlas': serumMaper,
  mango: serumMaper,
  'bybit-spot': bybitSpotMapper,
  'crypto-com': cryptoComMapper,
  kucoin: kucoinMapper,
  bitnomial: bitnomialMapper,
  'woo-x': wooxMapper,
  'blockchain-com': blockchainComMapper,
  'bybit-options': bybitMapper,
  'binance-european-options': binanceMapper,
  'okex-spreads': okexMapper,
  'kucoin-futures': kucoinMapper,
  bitget: bitgetMapper,
  'bitget-futures': bitgetMapper,
  'coinbase-international': coinbaseInternationalMapper,
  hyperliquid: hyperliquidMapper,
  lighter: lighterMapper
}

export type SubscriptionMapper = {
  canHandle: (message: object, date: Date) => boolean
  map: (message: object, date: Date) => Filter<string>[]
}


================================================
FILE: test/__snapshots__/tardismachine.test.ts.snap
================================================
[File too large to display: 30.5 MB]

================================================
FILE: test/subscriptionsmappers.test.ts
================================================
import { subscriptionsMappers } from '../src/ws/subscriptionsmappers.ts'

describe('subscriptions mappers', () => {
  test('maps lighter symbol-scoped subscriptions', () => {
    const mapper = subscriptionsMappers.lighter

    expect(mapper.canHandle({ type: 'subscribe', channel: 'order_book/0' }, new Date())).toBe(true)
    expect(mapper.map({ type: 'subscribe', channel: 'order_book/0' }, new Date())).toEqual([{ channel: 'order_book', symbols: ['0'] }])
    expect(mapper.map({ type: 'subscribe', channel: 'trade/1' }, new Date())).toEqual([{ channel: 'trade', symbols: ['1'] }])
    expect(mapper.map({ type: 'subscribe', channel: 'ticker/2048' }, new Date())).toEqual([{ channel: 'ticker', symbols: ['2048'] }])
  })

  test('maps lighter all-market stats subscriptions', () => {
    const mapper = subscriptionsMappers.lighter

    expect(mapper.map({ type: 'subscribe', channel: 'market_stats/all' }, new Date())).toEqual([{ channel: 'market_stats', symbols: [] }])
    expect(mapper.map({ type: 'subscribe', channel: 'spot_market_stats/all' }, new Date())).toEqual([
      { channel: 'spot_market_stats', symbols: [] }
    ])
  })
})


================================================
FILE: test/tardismachine.test.ts
================================================
import WebSocket from 'ws'
import fetch from 'node-fetch'
import split2 from 'split2'
import { EXCHANGES, type FilterForExchange, getExchangeDetails } from 'tardis-dev'
import { TardisMachine } from '../dist/index.js'

const PORT = 8072
const HTTP_REPLAY_DATA_FEEDS_URL = `http://localhost:${PORT}/replay`
const HTTP_REPLAY_NORMALIZED_URL = `http://localhost:${PORT}/replay-normalized`
const WS_REPLAY_NORMALIZED_URL = `ws://localhost:${PORT + 1}/ws-replay-normalized`
const WS_REPLAY_URL = `ws://localhost:${PORT + 1}/ws-replay`

const serializeOptions = (options: any) => {
  return encodeURIComponent(JSON.stringify(options))
}
describe('tardis-machine', () => {
  let tardisMachine: TardisMachine

  beforeAll(async () => {
    tardisMachine = new TardisMachine({ cacheDir: './.cache' })
    await tardisMachine.start(PORT) // start server
  })

  afterAll(async () => {
    await tardisMachine.stop()
  })

  describe('HTTP GET /replay-normalized', () => {
    ;(test(
      'replays Bitmex ETHUSD trades and order book changes',
      async () => {
        const options = {
          exchange: 'bitmex',
          symbols: ['ETHUSD'],
          from: '2019-06-01',
          to: '2019-06-01 00:01',
          dataTypes: ['trade', 'book_change']
        }

        const response = await fetch(`${HTTP_REPLAY_NORMALIZED_URL}?options=${serializeOptions(options)}`)

        expect(response.status).toBe(200)

        const messagesStream = response.body!.pipe(split2()) // split response body by new lines

        const messages = []
        for await (let line of messagesStream) {
          const message = JSON.parse(line)

          messages.push(JSON.stringify(message))
        }

        expect(messages).toMatchSnapshot()
      },
      1000 * 60 * 10
    ),
      test(
        'replays Bitmex ETHUSD order book real time quotes and 6 second 5 levels snapshots',
        async () => {
          const options = {
            exchange: 'bitmex',
            symbols: ['ETHUSD'],
            from: '2019-06-01',
            to: '2019-06-01 00:01',
            dataTypes: ['quote', 'book_snapshot_5_6s']
          }

          const response = await fetch(`${HTTP_REPLAY_NORMALIZED_URL}?options=${serializeOptions(options)}`)

          expect(response.status).toBe(200)

          const messagesStream = response.body!.pipe(split2()) // split response body by new lines

          const messages = []
          for await (let line of messagesStream) {
            const message = JSON.parse(line)

            messages.push(JSON.stringify(message))
          }

          expect(messages).toMatchSnapshot()
        },
        1000 * 60 * 10
      ))

    test(
      'replays Bitmex XBTUSD and Deribit BTC-PERPETUAL trade 1 second bars',
      async () => {
        const options = [
          {
            exchange: 'bitmex',
            symbols: ['ETHUSD'],
            from: '2019-06-01',
            to: '2019-06-01 00:01',
            dataTypes: ['trade_bar_1s']
          },
          {
            exchange: 'deribit',
            symbols: ['BTC-PERPETUAL'],
            from: '2019-06-01',
            to: '2019-06-01 00:01',
            dataTypes: ['trade_bar_1s']
          }
        ]

        const response = await fetch(`${HTTP_REPLAY_NORMALIZED_URL}?options=${serializeOptions(options)}`)

        expect(response.status).toBe(200)

        const messagesStream = response.body!.pipe(split2()) // split response body by new lines

        const messages = []
        for await (let line of messagesStream) {
          const message = JSON.parse(line)

          messages.push(JSON.stringify(message))
        }

        expect(messages).toMatchSnapshot()
      },
      1000 * 60 * 10
    )
  })

  describe('HTTP GET /replay', () => {
    test('invalid params', async () => {
      let response = await fetch(
        `${HTTP_REPLAY_DATA_FEEDS_URL}?options=${serializeOptions({
          exchange: 'binance',
          from: 'sdf',
          to: 'ssd'
        })}`
      )
      expect(response.status).toBe(500)

      response = await fetch(
        `${HTTP_REPLAY_DATA_FEEDS_URL}?options=${serializeOptions({
          exchange: 'binance',
          from: '2019-06-05 00:00Z',
          to: '2019-05-05 00:05Z'
        })}`
      )

      expect(response.status).toBe(500)
    })

    test(
      'replays Bitmex ETHUSD trades and order book updates for first of April 2019',
      async () => {
        const filters: FilterForExchange['bitmex'][] = [
          {
            channel: 'trade',
            symbols: ['ETHUSD']
          },
          {
            channel: 'orderBookL2',
            symbols: ['ETHUSD']
          }
        ]

        const options = {
          exchange: 'bitmex',
          from: '2019-05-01',
          to: '2019-05-02',
          filters
        }

        const response = await fetch(`${HTTP_REPLAY_DATA_FEEDS_URL}?options=${serializeOptions(options)}`)

        expect(response.status).toBe(200)

        const ethTradeMessages = response.body!.pipe(split2()) // split response body by new lines

        let receivedTradesCount = 0
        let receivedOrderBookUpdatesCount = 0

        for await (let line of ethTradeMessages) {
          const { message } = JSON.parse(line)

          if (message.table == 'trade') {
            receivedTradesCount++
          }

          if (message.table == 'orderBookL2') {
            receivedOrderBookUpdatesCount++
          }
        }

        expect(receivedTradesCount).toBe(28629)
        expect(receivedOrderBookUpdatesCount).toBe(1328937)
      },
      1000 * 60 * 10
    )

    test(
      'unauthorizedAccess',
      async () => {
        const options = {
          exchange: 'bitmex',
          from: '2019-05-02',
          to: '2019-05-03'
        }

        const response = await fetch(`${HTTP_REPLAY_DATA_FEEDS_URL}?options=${serializeOptions(options)}`)

        expect(response.status).toBe(401)
      },
      30 * 1000
    )
  })

  describe('WS /ws-replay', () => {
    test(
      'subcribes to and replays historical Coinbase data feed of 1st of Jun 2019 (ZEC-USDC trades)',
      async () => {
        let messages: string[] = []
        const simpleCoinbaseClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=coinbase&from=2019-06-01&to=2019-06-02`,
          (message) => {
            messages.push(message as string)
          },
          () => {
            simpleCoinbaseClient.send({
              type: 'subscribe',
              channels: [
                {
                  name: 'matches',
                  product_ids: ['ZEC-USDC']
                }
              ]
            })
          }
        )

        await simpleCoinbaseClient.closed()
        expect(messages).toMatchSnapshot()
      },
      10 * 60 * 1000
    )

    test(
      'subcribes to and replays historical Cryptofacilities data feed of 1st of Jun 2019 (PI_XBTUSD trades)',
      async () => {
        let messages: string[] = []
        const simpleCFClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=cryptofacilities&from=2019-06-01&to=2019-06-02`,
          (message) => {
            messages.push(message as string)
          },
          () => {
            simpleCFClient.send({
              event: 'subscribe',
              feed: 'trade',
              product_ids: ['PI_XBTUSD']
            })
          }
        )

        await simpleCFClient.closed()
        expect(messages).toMatchSnapshot()
      },
      10 * 60 * 1000
    )

    test(
      'subcribes to and replays historical Bitstamp data feed of 1st of Jun 2019 (LTCUSD trades)',
      async () => {
        let messages: string[] = []
        const simpleBitstampClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=bitstamp&from=2019-06-01&to=2019-06-02`,
          (message) => {
            messages.push(message as string)
          },
          () => {
            simpleBitstampClient.send({
              event: 'bts:subscribe',
              data: {
                channel: 'live_trades_ltcusd'
              }
            })
          }
        )

        await simpleBitstampClient.closed()
        expect(messages).toMatchSnapshot()
      },
      10 * 60 * 1000
    )

    test(
      'subcribes to and replays historical OKEX data feed of 1st of Jun 2019 (BTC-USDT trades)',
      async () => {
        let messages: string[] = []
        const simpleOkexClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=okex&from=2019-06-01&to=2019-06-01T02:00Z`,
          (message) => {
            messages.push(message as string)
          },
          () => {
            simpleOkexClient.send({ op: 'subscribe', args: ['spot/trade:BTC-USDT'] })
          }
        )

        await simpleOkexClient.closed()
        expect(messages).toMatchSnapshot()
      },
      10 * 60 * 1000
    )

    test(
      'subcribes to and replays historical BitMEX data feed of 1st of Jun 2019 (ADAM19 trades) using simple and official BitMEX clients',
      async () => {
        let trades: string[] = []
        let wsURL = `${WS_REPLAY_URL}?exchange=bitmex&from=2019-06-01&to=2019-06-02`
        const simpleBitmexWSClient = new SimpleWebsocketClient(
          wsURL,
          (message) => {
            const parsedMessage = JSON.parse(message)
            if (parsedMessage.action != 'insert') return

            parsedMessage.data.forEach((trade: any) => {
              if (trade.symbol != 'ADAM19') return

              trades.push(JSON.stringify(trade))
            })
          },
          () => {
            simpleBitmexWSClient.send({
              op: 'subscribe',
              args: ['trade:ADAM19']
            })
          }
        )

        await simpleBitmexWSClient.closed()
        expect(trades).toMatchSnapshot('ADAM19Trades')
      },
      10 * 60 * 1000
    )

    test(
      'subcribes to and replays historical BitMEX data feed of 1st of Jun 2019 (XBTUSD trades and  orderBookL2 updates)',
      async () => {
        const startTimestamp = new Date().getTime()
        let messagesCount = 0
        let lastBitmexMessage

        const simpleBitmexWSClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=bitmex&from=2019-06-01&to=2019-06-02`,
          (message) => {
            messagesCount++
            lastBitmexMessage = message
          },
          () => {
            simpleBitmexWSClient.send({
              op: 'subscribe',
              args: ['trade:XBTUSD', 'orderBookL2:XBTUSD']
            })
          }
        )

        await simpleBitmexWSClient.closed()
        console.log(`WS received  for BitMEX ${messagesCount} in ${(new Date().getTime() - startTimestamp) / 1000} seconds`)
        expect(lastBitmexMessage).toMatchSnapshot()
        expect(messagesCount).toBe(7690673)
      },
      10 * 60 * 1000
    )

    test(
      'subcribes to and replays historical BitMEX and Deribit data feed of 1st of Jun 2019 (XBTUSD trades and book updates)',
      async () => {
        const startTimestamp = new Date().getTime()
        let bitmexMessagesCount = 0
        let deribitMessagesCount = 0
        let lastBitmexMessage
        let lastDeribitMessage

        const simpleBitmexWSClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=bitmex&from=2019-06-01&to=2019-06-02&session=common`,
          (message) => {
            lastBitmexMessage = message
            bitmexMessagesCount++
          },
          () => {
            simpleBitmexWSClient.send({
              op: 'subscribe',
              args: ['trade:XBTUSD', 'orderBookL2:XBTUSD']
            })
          }
        )

        const simpleDeribitWSClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=deribit&from=2019-06-01&to=2019-06-02&session=common`,
          (message) => {
            lastDeribitMessage = message
            deribitMessagesCount++
          },
          () => {
            simpleDeribitWSClient.send({
              jsonrpc: '2.0',
              method: 'public/subscribe',
              params: {
                channels: ['book.BTC-PERPETUAL.raw']
              }
            })

            simpleDeribitWSClient.send({
              jsonrpc: '2.0',
              method: 'public/subscribe',
              params: {
                channels: ['trades.BTC-PERPETUAL.raw']
              }
            })
          }
        )

        await simpleBitmexWSClient.closed()

        const timestamp = new Date().getTime()

        await simpleDeribitWSClient.closed()
        // both clients should close in the same moment basically
        expect(new Date().getTime() - timestamp < 100).toBeTruthy

        console.log(
          `WS received for BitMEX ${bitmexMessagesCount} messages, for Deribit ${deribitMessagesCount} messages in ${
            (new Date().getTime() - startTimestamp) / 1000
          } seconds`
        )

        expect(bitmexMessagesCount).toBe(7690673)
        expect(deribitMessagesCount).toBe(7029393)

        expect(lastBitmexMessage).toMatchSnapshot()
        expect(lastDeribitMessage).toMatchSnapshot()
      },
      20 * 60 * 1000
    )

    test(
      'subcribes to and replays historical BitMEX and Deribit data feed of first 5 minutes of 1st of April 2019 (XBTUSD trades and book updates)',
      async () => {
        let bitmexMessages: string[] = []
        let deribitMessages: string[] = []

        const simpleBitmexWSClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=bitmex&from=2019-06-01&to=2019-06-01T00:05Z&session=common`,
          (message) => {
            bitmexMessages.push(message)
          },
          () => {
            simpleBitmexWSClient.send({
              op: 'subscribe',
              args: ['trade:XBTUSD', 'orderBookL2:XBTUSD']
            })
          }
        )

        const simpleDeribitWSClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=deribit&from=2019-06-01&to=2019-06-01T00:05Z&session=common`,
          (message) => {
            deribitMessages.push(message)
          },
          () => {
            simpleDeribitWSClient.send({
              jsonrpc: '2.0',
              method: 'public/subscribe',
              params: {
                channels: ['book.BTC-PERPETUAL.raw']
              }
            })

            simpleDeribitWSClient.send({
              jsonrpc: '2.0',
              method: 'public/subscribe',
              params: {
                channels: ['trades.BTC-PERPETUAL.raw']
              }
            })
          }
        )

        await simpleBitmexWSClient.closed()

        const timestamp = new Date().getTime()

        await simpleDeribitWSClient.closed()
        // both clients should close in the same moment basically
        expect(new Date().getTime() - timestamp < 100).toBeTruthy

        expect(bitmexMessages).toMatchSnapshot()
        expect(deribitMessages).toMatchSnapshot()
      },
      20 * 60 * 1000
    )

    test(
      'subcribes to and replays historical Binance data feed of 1st of July 2019 5 minutes (btcusdt trades)',
      async () => {
        let messages: string[] = []
        const simpleBinanceClient = new SimpleWebsocketClient(
          `${WS_REPLAY_URL}?exchange=binance&from=2019-07-01&to=2019-07-01T00:05Z`,
          (message) => {
            messages.push(message as string)
          },
          () => {
            simpleBinanceClient.send({ method: 'SUBSCRIBE', params: ['btcusdt@trade'] })
          }
        )

        await simpleBinanceClient.closed()
        expect(messages).toMatchSnapshot()
      },
      10 * 60 * 1000
    )
  })

  describe('WS /ws-replay-normalized', () => {
    ;(test(
      'replays Bitmex ETHUSD trades and order book changes',
      async () => {
        const options = {
          exchange: 'bitmex',
          symbols: ['ETHUSD'],
          from: '2019-06-01',
          to: '2019-06-01T00:01Z',
          dataTypes: ['trade', 'book_change']
        }

        let messages: string[] = []

        const simpleWSClient = new SimpleWebsocketClient(`${WS_REPLAY_NORMALIZED_URL}?options=${serializeOptions(options)}`, (message) => {
          messages.push(message)
        })

        await simpleWSClient.closed()

        expect(messages).toMatchSnapshot()
      },
      1000 * 60 * 10
    ),
      test(
        'replays Bitmex ETHUSD order book real time quotes and 6 second 5 levels snapshots',
        async () => {
          const options = {
            exchange: 'bitmex',
            symbols: ['ETHUSD'],
            from: '2019-06-01',
            to: '2019-06-01T00:01Z',
            dataTypes: ['quote', 'book_snapshot_5_6s']
          }

          let messages: string[] = []

          const simpleWSClient = new SimpleWebsocketClient(
            `${WS_REPLAY_NORMALIZED_URL}?options=${serializeOptions(options)}`,
            (message) => {
              messages.push(message)
            }
          )

          await simpleWSClient.closed()

          expect(messages).toMatchSnapshot()
        },
        1000 * 60 * 10
      ))

    test(
      'replays Bitmex XBTUSD and Deribit BTC-PERPETUAL trade 1 second bars',
      async () => {
        const options = [
          {
            exchange: 'bitmex',
            symbols: ['ETHUSD'],
            from: '2019-06-01',
            to: '2019-06-01T00:01Z',
            dataTypes: ['trade_bar_1s']
          },
          {
            exchange: 'deribit',
            symbols: ['BTC-PERPETUAL'],
            from: '2019-06-01',
            to: '2019-06-01T00:01Z',
            dataTypes: ['trade_bar_1s']
          }
        ]

        let messages: string[] = []

        const simpleWSClient = new SimpleWebsocketClient(`${WS_REPLAY_NORMALIZED_URL}?options=${serializeOptions(options)}`, (message) => {
          messages.push(message)
        })

        await simpleWSClient.closed()

        expect(messages).toMatchSnapshot()
      },
      1000 * 60 * 10
    )
  })

  describe('WS /ws-stream-normalized', () => {
    test(
      'streams normalized real-time messages for each supported exchange as single consolidated stream',
      async () => {
        const exchangesWithDerivativeInfo = [
          'bitmex',
          'binance-futures',
          'bitfinex-derivatives',
          'cryptofacilities',
          'deribit',
          'okex-futures',
          'okex-swap',
          'bybit',
          'phemex',
          'ftx',
          'delta',
          'binance-delivery',
          'huobi-dm',
          'huobi-dm-swap',
          'huobi-dm-linear-swap',
          'gate-io-futures',
          'coinflex'
        ]
        const excludedExchanges = new Set([
          'binance-dex',
          'binance-jersey',
          'coinbase-international',
          'coinflex',
          'dydx',
          'ftx',
          'ftx-us',
          'huobi-dm-options',
          'mango',
          'okex-spreads',
          'okcoin',
          'serum',
          'star-atlas'
        ])

        const options = (
          await Promise.all(
            EXCHANGES.filter((exchange) => excludedExchanges.has(exchange) === false).map(async (exchange) => {
              const exchangeDetails = await getExchangeDetails(exchange)
              const dataTypes: any[] = ['trade', 'trade_bar_10ms', 'book_change', 'book_snapshot_3_0ms']

              if (exchangesWithDerivativeInfo.includes(exchange)) {
                dataTypes.push('derivative_ticker')
              }

              var symbols = exchangeDetails.availableSymbols
                .filter((s) => s.id !== undefined)
                .filter((s) => s.availableTo === undefined || new Date(s.availableTo).valueOf() > new Date().valueOf())
                .slice(0, 2)
                .map((s) => s.id)

              return {
                exchange,
                symbols,
                withDisconnectMessages: true,
                withErrorMessages: true,
                timeoutIntervalMS: 30 * 1000,
                dataTypes: dataTypes
              }
            })
          )
        ).filter((option) => option.symbols.length > 0)

        let count = 0
        const countsByExchange: Record<string, number> = {}
        const errorCountsByExchange: Record<string, number> = {}
        const lastErrorByExchange: Record<string, string> = {}

        await new Promise<void>((resolve, reject) => {
          let settled = false

          const summarize = () => {
            const exchangesWithNoMessages = options
              .map((option) => option.exchange)
              .filter((exchange) => (countsByExchange[exchange] ?? 0) === 0)

            const exchangesWithErrors = Object.entries(errorCountsByExchange)
              .sort((left, right) => right[1] - left[1])
              .map(([exchange, errorCount]) => ({
                exchange,
                errorCount,
                lastError: lastErrorByExchange[exchange]
              }))

            return {
              totalMessages: count,
              exchangesWithNoMessages,
              exchangesWithErrors
            }
          }

          const ws = new SimpleWebsocketClient(
            `ws://localhost:${PORT + 1}/ws-stream-normalized?options=${serializeOptions(options)}`,
            (message) => {
              const parsedMessage = JSON.parse(message)

              if (parsedMessage.type === 'error') {
                errorCountsByExchange[parsedMessage.exchange] = (errorCountsByExchange[parsedMessage.exchange] ?? 0) + 1
                lastErrorByExchange[parsedMessage.exchange] = parsedMessage.details
                return
              }

              count++
              countsByExchange[parsedMessage.exchange] = (countsByExchange[parsedMessage.exchange] ?? 0) + 1

              if (count > 20000 && !settled) {
                settled = true
                clearInterval(progressInterval)
                clearTimeout(diagnosticTimeout)
                ws.close()
                resolve()
              }
            },
            () => {},
            (error) => {
              if (settled) {
                return
              }

              settled = true
              clearInterval(progressInterval)
              clearTimeout(diagnosticTimeout)
              reject(error)
            }
          )

          const progressInterval = setInterval(() => {
            console.log('WS /ws-stream-normalized progress', summarize())
          }, 30 * 1000)

          const diagnosticTimeout = setTimeout(
            () => {
              if (settled) {
                return
              }

              settled = true
              clearInterval(progressInterval)
              ws.close()
              reject(new Error(`WS /ws-stream-normalized diagnostic timeout: ${JSON.stringify(summarize())}`))
            },
            1000 * 60 * 3 + 30 * 1000
          )
        })
      },
      1000 * 60 * 4
    )
  })
})

class SimpleWebsocketClient {
  private readonly _socket: WebSocket
  private isClosed = false
  constructor(
    url: string,
    onMessageCB: (message: string) => void,
    onOpen: () => void = () => {},
    onError: (error: Error) => void = () => {}
  ) {
    this._socket = new WebSocket(url)
    this._socket.on('message', function (message: Buffer) {
      onMessageCB(message.toString())
    })
    this._socket.on('open', onOpen)
    this._socket.on('error', (err) => {
      console.log('SimpleWebsocketClient Error', err)
      onError(err)
    })
    this._socket.on('close', () => (this.isClosed = true))
  }

  public send(payload: any) {
    this._socket.send(JSON.stringify(payload))
  }

  public close() {
    this._socket.close()
  }

  public async closed() {
    while (!this.isClosed) {
      await new Promise((resolve) => setTimeout(resolve, 10))
    }
  }
}


================================================
FILE: test/tsconfig.json
================================================
{
  "extends": "../tsconfig.json",
  "compilerOptions": {
    "rootDir": "..",
    "isolatedModules": true,
    "module": "NodeNext",
    "moduleResolution": "NodeNext",
    "types": ["node", "jest"],
    "noEmit": true
  },
  "include": ["."]
}


================================================
FILE: tsconfig.json
================================================
{
  "include": ["src"],
  "compilerOptions": {
    "rootDir": "src",
    "allowSyntheticDefaultImports": true,
    "sourceMap": true,
    "declaration": true,
    "declarationMap": true,
    "module": "NodeNext",
    "moduleResolution": "NodeNext",
    "rewriteRelativeImportExtensions": true,
    "target": "ESNext",
    "pretty": true,
    "strict": true,
    "outDir": "dist",
    "noFallthroughCasesInSwitch": true,
    "noImplicitReturns": true,
    "noUnusedParameters": true,
    "esModuleInterop": true,
    "forceConsistentCasingInFileNames": true,
    "lib": ["ESNext", "DOM"],
    "types": ["node"]
  }
}
Download .txt
gitextract_yzu_3thd/

├── .dockerignore
├── .github/
│   └── workflows/
│       ├── ci.yaml
│       ├── npm_audit.yaml
│       └── publish.yaml
├── .gitignore
├── .npmrc
├── .prettierignore
├── .prettierrc
├── AGENTS.md
├── ARCHITECTURE.md
├── CLAUDE.md
├── Dockerfile
├── LICENSE
├── README.md
├── benchmark.js
├── bin/
│   └── tardis-machine.js
├── package.json
├── src/
│   ├── debug.ts
│   ├── helpers.ts
│   ├── http/
│   │   ├── healthCheck.ts
│   │   ├── index.ts
│   │   ├── replay.ts
│   │   └── replaynormalized.ts
│   ├── index.ts
│   ├── tardismachine.ts
│   └── ws/
│       ├── index.ts
│       ├── replay.ts
│       ├── replaynormalized.ts
│       ├── streamnormalized.ts
│       └── subscriptionsmappers.ts
├── test/
│   ├── __snapshots__/
│   │   └── tardismachine.test.ts.snap
│   ├── subscriptionsmappers.test.ts
│   ├── tardismachine.test.ts
│   └── tsconfig.json
└── tsconfig.json
Download .txt
SYMBOL INDEX (63 symbols across 12 files)

FILE: benchmark.js
  class SimpleWebsocketClient (line 12) | class SimpleWebsocketClient {
    method constructor (line 13) | constructor(url, onMessageCB, onOpen) {
    method send (line 22) | send(payload) {
    method closed (line 26) | async closed() {
  constant EXCHANGE (line 35) | const EXCHANGE = 'bitmex'
  constant SYMBOL (line 36) | const SYMBOL = 'XBTUSD'
  constant TRADES_AND_BOOK_FILTERS (line 38) | const TRADES_AND_BOOK_FILTERS = [
  constant TRADES_AND_BOOK_SUBSCRIPTION_MESSAGES (line 48) | const TRADES_AND_BOOK_SUBSCRIPTION_MESSAGES = [
  constant FROM_DATE (line 55) | const FROM_DATE = '2020-02-01'
  constant TO_DATE (line 56) | const TO_DATE = '2020-02-02'
  function httpReplayBenchmark (line 58) | async function httpReplayBenchmark({ JSONParseResponse }) {
  function httpReplayNormalizedBenchmark (line 90) | async function httpReplayNormalizedBenchmark({ computeTBTBookSnapshots }) {
  function wsReplayBenchmark (line 122) | async function wsReplayBenchmark({ JSONParseResponse }) {
  function wsReplayNormalizedBenchmark (line 156) | async function wsReplayNormalizedBenchmark({ computeTBTBookSnapshots }) {
  function runBenchmarks (line 195) | async function runBenchmarks() {

FILE: bin/tardis-machine.js
  constant DEFAULT_PORT (line 14) | const DEFAULT_PORT = 8000
  function start (line 63) | async function start() {

FILE: src/helpers.ts
  type WithDataType (line 18) | type WithDataType = {
  type ReplayNormalizedOptionsWithDataType (line 22) | type ReplayNormalizedOptionsWithDataType = ReplayNormalizedOptions<any, ...
  type ReplayNormalizedRequestOptions (line 24) | type ReplayNormalizedRequestOptions = ReplayNormalizedOptionsWithDataTyp...
  type StreamNormalizedOptionsWithDataType (line 26) | type StreamNormalizedOptionsWithDataType = StreamNormalizedOptions<any, ...
  type StreamNormalizedRequestOptions (line 28) | type StreamNormalizedRequestOptions = StreamNormalizedOptionsWithDataTyp...
  function getRequestedDataTypes (line 58) | function getRequestedDataTypes(options: ReplayNormalizedOptionsWithDataT...
  function constructDataTypeFilter (line 75) | function constructDataTypeFilter(options: (ReplayNormalizedOptionsWithDa...
  function getComputables (line 135) | function getComputables(dataTypes: string[]): ComputableFactory<any>[] {
  function parseAsTradeBarComputable (line 155) | function parseAsTradeBarComputable(dataType: string) {
  function parseAsBookSnapshotComputable (line 177) | function parseAsBookSnapshotComputable(dataType: string) {
  function parseAsQuoteComputable (line 221) | function parseAsQuoteComputable(dataType: string) {

FILE: src/http/healthCheck.ts
  constant BYTES_IN_MB (line 2) | const BYTES_IN_MB = 1024 * 1024

FILE: src/http/replay.ts
  function writeMessagesToResponse (line 41) | async function writeMessagesToResponse(res: OutgoingMessage, replayOptio...

FILE: src/http/replaynormalized.ts
  function writeMessagesToResponse (line 42) | async function writeMessagesToResponse(res: OutgoingMessage, options: Re...

FILE: src/tardismachine.ts
  class TardisMachine (line 13) | class TardisMachine {
    method constructor (line 18) | constructor(private readonly options: Options) {
    method start (line 86) | public async start(port: number) {
    method stop (line 126) | public async stop() {
  type Options (line 139) | type Options = {

FILE: src/ws/replay.ts
  function replayWS (line 11) | function replayWS(ws: any, req: HttpRequest) {
  class ReplaySession (line 47) | class ReplaySession {
    method constructor (line 51) | constructor() {
    method addToSession (line 61) | public addToSession(websocketConnection: WebsocketConnection) {
    method hasStarted (line 70) | public get hasStarted() {
    method _start (line 76) | private async _start() {
    method _closeAllConnections (line 150) | private async _closeAllConnections(error: Error | undefined = undefine...
    method onFinished (line 166) | public onFinished(onFinishedCallback: () => void) {
  class WebsocketConnection (line 171) | class WebsocketConnection {
    method constructor (line 176) | constructor(
    method close (line 197) | public close(error: Error | undefined = undefined) {
    method toString (line 211) | public toString() {
    method _convertSubscribeRequestToFilter (line 215) | private _convertSubscribeRequestToFilter(messageRaw: ArrayBuffer) {

FILE: src/ws/replaynormalized.ts
  function replayNormalizedWS (line 7) | async function replayNormalizedWS(ws: any, req: HttpRequest) {

FILE: src/ws/streamnormalized.ts
  function streamNormalizedWS (line 7) | async function streamNormalizedWS(ws: any, req: HttpRequest) {

FILE: src/ws/subscriptionsmappers.ts
  constant BYBIT_V5_API_SWITCH_DATE (line 265) | const BYBIT_V5_API_SWITCH_DATE = new Date('2023-04-05T00:00:00.000Z')
  type SubscriptionMapper (line 745) | type SubscriptionMapper = {

FILE: test/tardismachine.test.ts
  constant PORT (line 7) | const PORT = 8072
  constant HTTP_REPLAY_DATA_FEEDS_URL (line 8) | const HTTP_REPLAY_DATA_FEEDS_URL = `http://localhost:${PORT}/replay`
  constant HTTP_REPLAY_NORMALIZED_URL (line 9) | const HTTP_REPLAY_NORMALIZED_URL = `http://localhost:${PORT}/replay-norm...
  constant WS_REPLAY_NORMALIZED_URL (line 10) | const WS_REPLAY_NORMALIZED_URL = `ws://localhost:${PORT + 1}/ws-replay-n...
  constant WS_REPLAY_URL (line 11) | const WS_REPLAY_URL = `ws://localhost:${PORT + 1}/ws-replay`
  class SimpleWebsocketClient (line 758) | class SimpleWebsocketClient {
    method constructor (line 761) | constructor(
    method send (line 779) | public send(payload: any) {
    method close (line 783) | public close() {
    method closed (line 787) | public async closed() {
Condensed preview — 35 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (122K chars).
[
  {
    "path": ".dockerignore",
    "chars": 32,
    "preview": ".git\r\ndist\r\nnode_modules\r\n.cache"
  },
  {
    "path": ".github/workflows/ci.yaml",
    "chars": 914,
    "preview": "name: CI\n\non:\n  push:\n    branches:\n      - master\n  pull_request:\n    branches:\n      - master\n\njobs:\n  ci:\n    name: C"
  },
  {
    "path": ".github/workflows/npm_audit.yaml",
    "chars": 1866,
    "preview": "name: Full NPM Audit\n\non:\n  schedule:\n    - cron: '20 3 * * *'\n  workflow_dispatch:\n\npermissions:\n  contents: read\n\njobs"
  },
  {
    "path": ".github/workflows/publish.yaml",
    "chars": 2217,
    "preview": "name: Publish New Release To NPM And Image To Docker Hub\n\non:\n  release:\n    # This specifies that the build will be tri"
  },
  {
    "path": ".gitignore",
    "chars": 77,
    "preview": "node_modules\n/dist\n/*.log\n.tardis-cache\n*.tsbuildinfo\nbench\n.cache\n.DS_Store\n"
  },
  {
    "path": ".npmrc",
    "chars": 32,
    "preview": "min-release-age=1\nallow-git=all\n"
  },
  {
    "path": ".prettierignore",
    "chars": 45,
    "preview": "package.json\npackage-lock.json\nyarn.lock\ndist"
  },
  {
    "path": ".prettierrc",
    "chars": 112,
    "preview": "{\n  \"printWidth\": 140,\n  \"semi\": false,\n  \"singleQuote\": true,\n  \"trailingComma\": \"none\",\n  \"endOfLine\": \"lf\"\n}\n"
  },
  {
    "path": "AGENTS.md",
    "chars": 1291,
    "preview": "# tardis-machine\n\nPublic npm package and Docker image. Locally runnable server providing HTTP and WebSocket APIs for tic"
  },
  {
    "path": "ARCHITECTURE.md",
    "chars": 1814,
    "preview": "# Architecture\n\ntardis-machine is a local server that wraps `tardis-dev` library functionality in HTTP and WebSocket API"
  },
  {
    "path": "CLAUDE.md",
    "chars": 11,
    "preview": "@AGENTS.md\n"
  },
  {
    "path": "Dockerfile",
    "chars": 518,
    "preview": "#\n# uWebSockets.js v20.59.0 requires glibc >= 2.38 for the prebuilt Linux addon.\n# Use the explicit trixie variant to ke"
  },
  {
    "path": "LICENSE",
    "chars": 16724,
    "preview": "Mozilla Public License Version 2.0\n==================================\n\n1. Definitions\n--------------\n\n1.1. \"Contributor\""
  },
  {
    "path": "README.md",
    "chars": 1883,
    "preview": "# Tardis Machine Server\n\n[![Version](https://img.shields.io/npm/v/tardis-machine.svg)](https://www.npmjs.org/package/tar"
  },
  {
    "path": "benchmark.js",
    "chars": 5295,
    "preview": "import { createRequire } from 'node:module'\n\nconst require = createRequire(import.meta.url)\nconst fetch = require('node-"
  },
  {
    "path": "bin/tardis-machine.js",
    "chars": 2864,
    "preview": "#!/usr/bin/env node\nprocess.env.UWS_HTTP_MAX_HEADERS_SIZE = '20000'\nimport { createRequire } from 'node:module'\n\nconst r"
  },
  {
    "path": "package.json",
    "chars": 3260,
    "preview": "{\n  \"name\": \"tardis-machine\",\n  \"version\": \"16.1.0\",\n  \"engines\": {\n    \"node\": \">=25\"\n  },\n  \"devEngines\": {\n    \"runti"
  },
  {
    "path": "src/debug.ts",
    "chars": 71,
    "preview": "import dbg from 'debug'\nexport const debug = dbg('tardis-dev:machine')\n"
  },
  {
    "path": "src/helpers.ts",
    "chars": 7196,
    "preview": "import {\n  ComputableFactory,\n  computeBookSnapshots,\n  computeTradeBars,\n  Disconnect,\n  MapperFactory,\n  normalizeBook"
  },
  {
    "path": "src/http/healthCheck.ts",
    "chars": 951,
    "preview": "import type { IncomingMessage, ServerResponse } from 'node:http'\nconst BYTES_IN_MB = 1024 * 1024\n\nexport const healthChe"
  },
  {
    "path": "src/http/index.ts",
    "chars": 99,
    "preview": "export * from './replay.ts'\nexport * from './replaynormalized.ts'\nexport * from './healthCheck.ts'\n"
  },
  {
    "path": "src/http/replay.ts",
    "chars": 3064,
    "preview": "import { once } from 'node:events'\nimport type { IncomingMessage, OutgoingMessage, ServerResponse } from 'node:http'\nimp"
  },
  {
    "path": "src/http/replaynormalized.ts",
    "chars": 3302,
    "preview": "import { once } from 'node:events'\nimport type { IncomingMessage, OutgoingMessage, ServerResponse } from 'node:http'\nimp"
  },
  {
    "path": "src/index.ts",
    "chars": 51,
    "preview": "export { TardisMachine } from './tardismachine.ts'\n"
  },
  {
    "path": "src/tardismachine.ts",
    "chars": 4070,
    "preview": "import findMyWay from 'find-my-way'\nimport http from 'node:http'\nimport { createRequire } from 'module'\nimport { clearCa"
  },
  {
    "path": "src/ws/index.ts",
    "chars": 104,
    "preview": "export * from './replay.ts'\nexport * from './replaynormalized.ts'\nexport * from './streamnormalized.ts'\n"
  },
  {
    "path": "src/ws/replay.ts",
    "chars": 7576,
    "preview": "import { decode } from 'node:querystring'\nimport { combine, Exchange, replay, ReplayOptions } from 'tardis-dev'\nimport t"
  },
  {
    "path": "src/ws/replaynormalized.ts",
    "chars": 2601,
    "preview": "import { decode } from 'node:querystring'\nimport { combine, compute, replayNormalized } from 'tardis-dev'\nimport type { "
  },
  {
    "path": "src/ws/streamnormalized.ts",
    "chars": 4466,
    "preview": "import { decode } from 'node:querystring'\nimport { combine, compute, Exchange, streamNormalized } from 'tardis-dev'\nimpo"
  },
  {
    "path": "src/ws/subscriptionsmappers.ts",
    "chars": 17852,
    "preview": "import { Exchange, Filter } from 'tardis-dev'\n\n// https://www.bitmex.com/app/wsAPI\nconst bitmexMapper: SubscriptionMappe"
  },
  {
    "path": "test/subscriptionsmappers.test.ts",
    "chars": 1145,
    "preview": "import { subscriptionsMappers } from '../src/ws/subscriptionsmappers.ts'\n\ndescribe('subscriptions mappers', () => {\n  te"
  },
  {
    "path": "test/tardismachine.test.ts",
    "chars": 23849,
    "preview": "import WebSocket from 'ws'\nimport fetch from 'node-fetch'\nimport split2 from 'split2'\nimport { EXCHANGES, type FilterFor"
  },
  {
    "path": "test/tsconfig.json",
    "chars": 246,
    "preview": "{\n  \"extends\": \"../tsconfig.json\",\n  \"compilerOptions\": {\n    \"rootDir\": \"..\",\n    \"isolatedModules\": true,\n    \"module\""
  },
  {
    "path": "tsconfig.json",
    "chars": 616,
    "preview": "{\n  \"include\": [\"src\"],\n  \"compilerOptions\": {\n    \"rootDir\": \"src\",\n    \"allowSyntheticDefaultImports\": true,\n    \"sour"
  }
]

// ... and 1 more files (download for full content)

About this extraction

This page contains the full source code of the tardis-dev/tardis-machine GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 35 files (30.6 MB), approximately 28.7k tokens, and a symbol index with 63 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!