diff --git a/.env.example b/.env.example
new file mode 100644
index 0000000..8948dc9
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,13 @@
+# API Server
+PORT=8080
+HOSTNAME=localhost
+
+# Clickhouse Database
+HOST=http://127.0.0.1:8123
+DATABASE=default
+USERNAME=default
+PASSWORD=
+MAX_LIMIT=500
+
+# Logging
+VERBOSE=true
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..f7d86a1
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,28 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: ''
+labels: bug
+assignees: 0237h
+
+---
+
+
+
+## Expected Behavior
+
+
+## Current Behavior
+
+
+## How to reproduce
+
+
+## Possible Solution
+
+
+## Screenshots
+
+
+## Environment
+
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 0000000..6ce1244
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,22 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: ''
+labels: feature
+assignees: 0237h
+
+---
+
+
+
+## Detailed description
+
+
+## Context
+
+
+## Possible implementation
+
+
+## Other alternatives
+
diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml
new file mode 100644
index 0000000..cbcd107
--- /dev/null
+++ b/.github/release-drafter.yml
@@ -0,0 +1,56 @@
+name-template: 'v$RESOLVED_VERSION'
+tag-template: 'v$RESOLVED_VERSION'
+categories:
+ - title: '✨ Features'
+ labels:
+ - 'feature'
+ - title: '🐛 Bug Fixes'
+ labels:
+ - 'fix'
+ - 'bugfix'
+ - 'bug'
+ - title: '📝 Documentation'
+ labels:
+ - 'documentation'
+ - title: '🔧 Operations'
+ label: 'ops'
+change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
+change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
+version-resolver:
+ major:
+ labels:
+ - 'major'
+ minor:
+ labels:
+ - 'minor'
+ patch:
+ labels:
+ - 'patch'
+ default: patch
+template: |
+ ## Changes
+ $CHANGES
+
+ **Full Changelog**: https://github.com/pinax-network/antelope-transactions-api/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION
+
+autolabeler:
+ - label: 'documentation'
+ branch:
+ - '/docs\/.+/'
+ files:
+ - '*.md'
+ - label: 'bug'
+ branch:
+ - '/fix\/.+/'
+ - label: 'feature'
+ branch:
+ - '/feature\/.+/'
+ - label: 'ops'
+ branch:
+ - '/ops\/.+/'
+ files:
+ - '.github/*.yml'
+ - '.github/workflows/*.yml'
+ - '.gitignore'
+ - 'tsconfig.json'
+ - 'Dockerfile'
\ No newline at end of file
diff --git a/.github/workflows/bun-build.yml b/.github/workflows/bun-build.yml
new file mode 100644
index 0000000..e00623a
--- /dev/null
+++ b/.github/workflows/bun-build.yml
@@ -0,0 +1,27 @@
+name: Build
+on:
+ release:
+ types: [ published ]
+
+permissions:
+ contents: write
+
+jobs:
+ bun-build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: oven-sh/setup-bun@v1
+ with:
+ bun-version: latest
+
+ - name: 'Install Dependencies'
+ run: bun install
+
+ - name: 'Build app'
+ run: bun run build
+
+ - uses: softprops/action-gh-release@v1
+ with:
+ files: |
+ antelope-transactions-api
diff --git a/.github/workflows/bun-test.yml b/.github/workflows/bun-test.yml
new file mode 100644
index 0000000..7981b3b
--- /dev/null
+++ b/.github/workflows/bun-test.yml
@@ -0,0 +1,31 @@
+name: Test
+
+on: push
+
+jobs:
+ bun-test:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Install bun
+ uses: oven-sh/setup-bun@v1
+
+ - name: 'Install Dependencies'
+ run: |
+ bun install
+
+ - name: 'Run lint'
+ run: |
+ bun lint
+
+ - name: 'Run test'
+ run: |
+ bun test
+ env:
+ PORT: ${{ vars.PORT }}
+ HOSTNAME: ${{ vars.HOSTNAME }}
+ HOST: ${{ vars.HOST }}
+ USERNAME: ${{ secrets.USERNAME }}
+ PASSWORD: ${{ secrets.PASSWORD }}
diff --git a/.github/workflows/ghcr.yml b/.github/workflows/ghcr.yml
new file mode 100644
index 0000000..95a6b32
--- /dev/null
+++ b/.github/workflows/ghcr.yml
@@ -0,0 +1,47 @@
+name: GitHub Container Registry
+on:
+ push:
+ tags:
+ - "v*"
+ branches:
+ - "*"
+
+env:
+ REGISTRY: ghcr.io
+ IMAGE_NAME: ${{ github.repository }}
+
+jobs:
+ ghcr:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: write
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v3
+
+ - name: Log in to the Container registry
+ uses: docker/login-action@v3
+ with:
+ registry: ${{ env.REGISTRY }}
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
+ tags: |
+ type=sha,prefix=,suffix=-{{date 'X'}}
+ type=raw,enable=${{ !startsWith(github.ref, 'refs/tags/') }},value=develop
+ type=semver,pattern={{raw}}
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
\ No newline at end of file
diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml
new file mode 100644
index 0000000..12b1f6c
--- /dev/null
+++ b/.github/workflows/release-drafter.yml
@@ -0,0 +1,22 @@
+name: Release Drafter
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+ types: [opened, reopened, synchronize]
+
+permissions:
+ contents: read
+
+jobs:
+ update_release_draft:
+ permissions:
+ contents: write
+ pull-requests: write
+ runs-on: ubuntu-latest
+ steps:
+ - uses: release-drafter/release-drafter@v5
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..0278475
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,142 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+.pnpm-debug.log*
+package-lock.json
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
+
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+*.lcov
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+bower_components
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+node_modules/
+jspm_packages/
+
+# Snowpack dependency directory (https://snowpack.dev/)
+web_modules/
+
+# TypeScript cache
+*.tsbuildinfo
+
+# Optional npm cache directory
+.npm
+
+# Optional eslint cache
+.eslintcache
+
+# Optional stylelint cache
+.stylelintcache
+
+# Microbundle cache
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variable files
+.env
+.env.development.local
+.env.test.local
+.env.production.local
+.env.local
+
+# parcel-bundler cache (https://parceljs.org/)
+.cache
+.parcel-cache
+
+# Next.js build output
+.next
+out
+
+# Nuxt.js build / generate output
+.nuxt
+dist
+
+# Gatsby files
+.cache/
+# Comment in the public line in if your project uses Gatsby and not Next.js
+# https://nextjs.org/blog/next-9-1#public-directory-support
+# public
+
+# vuepress build output
+.vuepress/dist
+
+# vuepress v2.x temp and cache directory
+.temp
+.cache
+
+# Docusaurus cache and generated files
+.docusaurus
+
+# Serverless directories
+.serverless/
+
+# FuseBox cache
+.fusebox/
+
+# DynamoDB Local files
+.dynamodb/
+
+# TernJS port file
+.tern-port
+
+# Stores VSCode versions used for testing VSCode extensions
+.vscode-test
+
+# yarn v2
+.yarn/cache
+.yarn/unplugged
+.yarn/build-state.yml
+.yarn/install-state.gz
+.pnp.*
+
+# Sublime Text
+*.sublime*
+
+# Local clickhouse DB
+cursor.lock
+
+# CLI
+antelope-token-api
+
+*.db
\ No newline at end of file
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..faa2c5a
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,163 @@
+# Contributing to Antelope Token API
+
+Welcome to the Antelope Token API repository ! You'll find here guidelines on how the repository is set up and how to possibly contribute to it.
+
+
+
+## Table of Contents
+
+- [Asking Questions](#asking-questions)
+- [Contributing](#contributing)
+ - [Reporting Bugs](#reporting-bugs)
+ - [Suggesting Enhancements](#suggesting-enhancements)
+ - [Submitting PRs](#submitting-prs)
+- [Style guides](#style-guides)
+ - [Code](#code)
+ - [Commit Messages](#commit-messages)
+
+## Asking Questions
+
+> [!NOTE]
+> Make sure you have read the [documentation](README.md) first !
+
+Before you ask a question, it is best to search for existing [Issues](https://github.com/pinax-network/antelope-token-api/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue. It is also advisable to search the internet for answers first.
+
+
+If you then still feel the need to ask a question and need clarification, we recommend the following:
+
+- Open an [Issue](https://github.com/pinax-network/antelope-token-api/issues/new).
+- Provide as much context as you can about what you're running into.
+- Provide project and platform versions depending on what seems relevant.
+
+## Contributing
+
+
+
+### Reporting Bugs
+
+#### Before Submitting a Bug Report
+
+A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help fix any potential bug as fast as possible.
+
+- Make sure that you are using the [latest version](https://github.com/pinax-network/antelope-token-api/releases). If you're using the binary, you can check with `antelope-token-api --version`.
+- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (make sure that you have read the [documentation](README.md). If you are looking for support, you might want to check [this section](#asking-questions)).
+- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/pinax-network/antelope-token-api/issues?q=label%3Abug).
+- Also make sure to search the internet (including Stack Overflow) to see if users outside the GitHub community have discussed the issue.
+- Collect information about the bug:
+ - Stack trace if possible
+ - OS, Platform and Version (Windows, Linux, macOS, x86, ARM)
+ - Version of the [Bun](https://bun.sh/) binary, `bun --version`
+ - Possibly your environment variables and the output
+ - Can you reliably reproduce the issue? And can you also reproduce it with older versions?
+
+#### How Do I Submit a Good Bug Report?
+
+
+
+We use GitHub issues to track bugs and errors. If you run into an issue with the project:
+
+- Open an [Issue](https://github.com/pinax-network/antelope-token-api/issues/new?assignees=0237h&labels=bug&projects=&template=bug_report.md&title=).
+- Explain the behavior you would expect and the actual behavior.
+- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.
+- Provide the information you collected in the previous section.
+
+### Suggesting Enhancements
+
+This section guides you through submitting an enhancement suggestion for Antelope Token API, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.
+
+#### Before Submitting an Enhancement
+
+- Make sure that you are using the [latest version](https://github.com/pinax-network/antelope-token-api/releases). If you're using the binary, you can check with `antelope-token-api --version`.
+- Read the [documentation](README.md) carefully and find out if the functionality is already covered, maybe by an individual configuration.
+- Perform a [search](https://github.com/pinax-network/antelope-token-api/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
+- Find out whether your idea fits with the scope and aims of the project. Keep in mind that features should be useful to the majority of users and not just a small subset.
+
+#### How Do I Submit a Good Enhancement Suggestion?
+
+Enhancement suggestions are tracked as [GitHub issues](https://github.com/pinax-network/antelope-token-api/issues).
+
+- Open an [Issue](https://github.com/pinax-network/antelope-token-api/issues/new?assignees=0237h&labels=feature&projects=&template=feature_request.md&title=).
+- Use a **clear and descriptive title** for the issue to identify the suggestion.
+- Provide a **step-by-step description of the suggested enhancement** in as many details as possible.
+- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you.
+- **Explain why this enhancement would be useful** to most Antelope Token API users. You may also want to point out the other projects that solved it better and which could serve as inspiration.
+
+### Submitting PRs
+
+You can follow the instructions from the `Quick Start` section of the [`README.md`](README.md/#quick-start) for setting up the environment.
+
+The repository contains one `main` branch. Any changes to `main` must go through a pull request of a branch with a specific naming pattern (see below).
+
+Any push to `main` branch will be tagged with the commit hash and the latest commit will additionally be tagged with `develop` to enable pulling latest development image (this is done automatically). You can retrieve the latest stable version of the API by checking out the latest tagged version commit (following [*semver*](https://semver.org/)).
+
+PRs should be submitted from separate branches of the `main` branch. Ideally, your PR should fall into one the following categories:
+- **Feature**: `feature/xxx`
+- **Bug fix**: `fix/xxx`, try to make separate PRs for different bug fixes unless the change solves multiple bugs at once.
+- **Documentation**: `docs/xxx`, adding comments to files should be counted as documentation and changes made into a separate branch.
+- **Operations**: `ops/xxx`
+- **Others**: any other branching scheme or no branch will be counted as a miscellaneous change, avoid if possible.
+
+The reasoning behind these categories is to make it easier to track changes as well as drafting future releases (see [`release-drafter.yml`](.github/release-drafter.yml) action for more details).
+
+> [!WARNING]
+> Make sure to tag any issues associated with the PR if one (or more) exists in your commit message.
+
+## Style guides
+
+### Code
+
+If you're using a standard IDE like [VSCode](https://code.visualstudio.com/), [Sublime Text](https://www.sublimetext.com/), etc. there shouldn't be any formatting issues. The code is formatted accorded to what the [LSP Typescript](https://github.com/typescript-language-server/typescript-language-server) standard client is using. Details about the settings used can be found [here](https://github.com/sublimelsp/LSP-typescript/blob/00aef378fd99283ae8451fe8f3f2483fa62b7d8e/LSP-typescript.sublime-settings#L61).
+
+### Commit Messages
+
+Here's a helpful commit message template adapted from [cbeams' article](https://cbea.ms/git-commit/): *How to Write a Git Commit Message*.
+
+```
+# Summarize changes in about 50 characters or less
+# 50 characters limit ############################
+#
+# More detailed explanatory text, if necessary. Wrap it to about 72
+# characters or so. In some contexts, the first line is treated as the
+# subject of the commit and the rest of the text as the body. The
+# blank line separating the summary from the body is critical (unless
+# you omit the body entirely); various tools like `log`, `shortlog`
+# and `rebase` can get confused if you run the two together.
+# 72 characters limit ##################################################
+#
+# Explain the problem that this commit is solving. Focus on why you
+# are making this change as opposed to how (the code explains that).
+# Are there side effects or other unintuitive consequences of this
+# change? Here's the place to explain them.
+#
+# Further paragraphs come after blank lines.
+#
+# - Bullet points are okay, too
+# - Typically a hyphen or asterisk is used for the bullet, preceded
+# by a single space, with blank lines in between, but conventions
+# vary here
+#
+# Put references to relevant issues at the bottom, like this:
+#
+# Resolves: #123
+# See also: #456, #789
+```
+
+To use it, simply save it as a `.gitmessage` file and use the following comment to make `git` use it:
+```console
+git config commit.template ~/.gitmessage # Make sure to have the right path to your message file
+```
+or to configure it globally
+```console
+git config --global commit.template ~/.gitmessage # Make sure to have the right path to your message file
+```
+
+
+
+## Attribution
+
+This guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)!
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..06b2a1f
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,6 @@
+FROM oven/bun
+COPY . .
+RUN apt-get -y update
+RUN apt-get -y install git
+RUN bun install
+ENTRYPOINT [ "bun", "run", "start" ]
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..3f7af7b
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Pinax
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
index 01e522a..d576d8b 100644
--- a/README.md
+++ b/README.md
@@ -1 +1,218 @@
-# antelope-transactions-api
+# Antelope Transactions API
+
+[![.github/workflows/bun-test.yml](https://github.com/pinax-network/antelope-token-api/actions/workflows/bun-test.yml/badge.svg)](https://github.com/pinax-network/antelope-token-api/actions/workflows/bun-test.yml)
+
+> Transactions information from the Antelope blockchains, powered by [Substreams](https://substreams.streamingfast.io/)
+
+
+
+
+## Swagger API
+
+### Usage
+
+| Method | Path | Query parameters
(* = **Required**) | Description |
+| :---: | --- | --- | --- |
+| GET
`text/html` | `/` | - | [Swagger](https://swagger.io/) API playground |
+| GET
`application/json` | `/balance` | **`account*`**
`contract`
`symcode`
`limit`
`page` | Balances of an account |
+| GET
`application/json` | `/balance/historical` | **`account*`**
`block_num`
`contract`
`symcode`
`limit`
`page` | Historical token balances |
+| GET
`application/json` | `/head` | `limit`
`page` | Head block information |
+| GET
`application/json` | `/holders` | **`contract*`**
**`symcode*`**
`limit`
`page` | List of holders of a token |
+| GET
`application/json` | `/supply` | `block_num`
`issuer`
**`contract*`**
**`symcode*`**
`limit`
`page` | Total supply for a token |
+| GET
`application/json` | `/tokens` | `limit`
`page` | List of available tokens |
+| GET
`application/json` | `/transfers` | `block_range`
**`contract*`**
**`symcode*`**
`limit`
`page` | All transfers related to a token |
+| GET
`application/json` | `/transfers/account` | **`account*`**
`block_range`
`from`
`to`
`contract`
`symcode`
`limit`
`page` | All transfers related to an account |
+| GET
`application/json` | `/transfers/id` | **`trx_id*`**
`limit`
`page` | Specific transfer related to a token |
+
+### Docs
+
+| Method | Path | Description |
+| :---: | --- | --- |
+| GET
`application/json` | `/openapi` | [OpenAPI](https://www.openapis.org/) specification |
+| GET
`application/json` | `/version` | API version and Git short commit hash |
+
+### Monitoring
+
+| Method | Path | Description |
+| :---: | --- | --- |
+| GET
`text/plain` | `/health` | Checks database connection |
+| GET
`text/plain` | `/metrics` | [Prometheus](https://prometheus.io/) metrics |
+
+## GraphQL
+
+Go to `/graphql` for a GraphIQL interface.
+
+### `X-Api-Key`
+
+Use the `Variables` tab at the bottom to add your API key:
+```json
+{
+ "X-Api-Key": "changeme"
+}
+```
+
+### Additional notes
+
+- For the `block_range` parameter in `transfers`, you can pass a single integer value (low bound) or an array of two values (inclusive range).
+- Use the `from` and `to` field for transfers of an account to further filter the results (i.e. incoming or outgoing transactions from/to another account).
+- Don't forget to request the `meta` fields in the response to get access to pagination and statistics !
+
+## Requirements
+
+- [ClickHouse](clickhouse.com/), databases should follow a `{chain}_tokens_{version}` naming scheme. Database tables can be setup using the [`schema.sql`](./schema.sql) definitions created by the [`create_schema.sh`](./create_schema.sh) script.
+- A [Substream sink](https://substreams.streamingfast.io/reference-and-specs/glossary#sink) for loading data into ClickHouse. We recommend [Substreams Sink ClickHouse](https://github.com/pinax-network/substreams-sink-clickhouse/) or [Substreams Sink SQL](https://github.com/pinax-network/substreams-sink-sql). You should use the generated [`protobuf` files](static/@typespec/protobuf) to build your substream. This Token API makes use of the [`substreams-antelope-tokens`](https://github.com/pinax-network/substreams-antelope-tokens/) substream.
+
+### API stack architecture
+
+![Token API architecture diagram](token_api_architecture_diagram.png)
+
+### Setting up the database backend (ClickHouse)
+
+#### Without a cluster
+
+Example on how to set up the ClickHouse backend for sinking [EOS](https://pinax.network/en/chain/eos) data.
+
+1. Start the ClickHouse server
+
+```console
+clickhouse server
+```
+
+2. Create the token database
+
+```console
+echo "CREATE DATABASE eos_tokens_v1" | clickhouse client -h --port 9000 -d -u --password
+```
+
+3. Run the [`create_schema.sh`](./create_schema.sh) script
+
+```console
+./create_schema.sh -o /tmp/schema.sql
+```
+
+4. Execute the schema
+
+```console
+cat /tmp/schema.sql | clickhouse client -h --port 9000 -d -u --password
+```
+
+5. Run the [sink](https://github.com/pinax-network/substreams-sink-sql)
+
+```console
+substreams-sink-sql run clickhouse://:@:9000/eos_tokens_v1 \
+https://github.com/pinax-network/substreams-antelope-tokens/releases/download/v0.4.0/antelope-tokens-v0.4.0.spkg `#Substreams package` \
+-e eos.substreams.pinax.network:443 `#Substreams endpoint` \
+1: `#Block range :` \
+--final-blocks-only --undo-buffer-size 1 --on-module-hash-mistmatch=warn --batch-block-flush-interval 100 --development-mode `#Additional flags`
+```
+
+6. Start the API
+
+```console
+# Will be available on locahost:8080 by default
+antelope-token-api --host --database eos_tokens_v1 --username --password --verbose
+```
+
+#### With a cluster
+
+If you run ClickHouse in a [cluster](https://clickhouse.com/docs/en/architecture/cluster-deployment), change step 2 & 3:
+
+2. Create the token database
+
+```console
+echo "CREATE DATABASE eos_tokens_v1 ON CLUSTER " | clickhouse client -h --port 9000 -d -u --password
+```
+
+3. Run the [`create_schema.sh`](./create_schema.sh) script
+
+```console
+./create_schema.sh -o /tmp/schema.sql -c
+```
+
+
+## [`Bun` Binary Releases](https://github.com/pinax-network/antelope-token-api/releases)
+
+> [!WARNING]
+> Linux x86 only
+
+```console
+$ wget https://github.com/pinax-network/antelope-token-api/releases/download/v4.0.0/antelope-token-api
+$ chmod +x ./antelope-token-api
+$ ./antelope-token-api --help
+Usage: antelope-token-api [options]
+
+Token balances, supply and transfers from the Antelope blockchains
+
+Options:
+ -V, --version output the version number
+ -p, --port HTTP port on which to attach the API (default: "8080", env: PORT)
+ --hostname Server listen on HTTP hostname (default: "localhost", env: HOSTNAME)
+ --host Database HTTP hostname (default: "http://localhost:8123", env: HOST)
+ --database The database to use inside ClickHouse (default: "default", env: DATABASE)
+ --username Database user (default: "default", env: USERNAME)
+ --password Password associated with the specified username (default: "", env: PASSWORD)
+ --max-limit Maximum LIMIT queries (default: 10000, env: MAX_LIMIT)
+ -v, --verbose Enable verbose logging (choices: "true", "false", default: false, env: VERBOSE)
+ -h, --help display help for command
+```
+
+## `.env` Environment variables
+
+```env
+# API Server
+PORT=8080
+HOSTNAME=localhost
+
+# Clickhouse Database
+HOST=http://127.0.0.1:8123
+DATABASE=default
+USERNAME=default
+PASSWORD=
+MAX_LIMIT=500
+
+# Logging
+VERBOSE=true
+```
+
+## Docker environment
+
+- Pull from GitHub Container registry
+
+**For latest tagged release**
+```bash
+docker pull ghcr.io/pinax-network/antelope-token-api:latest
+```
+
+**For head of `main` branch**
+```bash
+docker pull ghcr.io/pinax-network/antelope-token-api:develop
+```
+
+- Build from source
+```bash
+docker build -t antelope-token-api .
+```
+
+- Run with `.env` file
+```bash
+docker run -it --rm --env-file .env ghcr.io/pinax-network/antelope-token-api
+```
+
+## Contributing
+
+See [`CONTRIBUTING.md`](CONTRIBUTING.md).
+
+### Quick start
+
+Install [Bun](https://bun.sh/)
+
+```console
+$ bun install
+$ bun dev
+```
+
+**Tests**
+```console
+$ bun lint
+$ bun test
+```
diff --git a/bun.lockb b/bun.lockb
new file mode 100755
index 0000000..13c6bde
Binary files /dev/null and b/bun.lockb differ
diff --git a/create_schema.sh b/create_schema.sh
new file mode 100755
index 0000000..ed0fe33
--- /dev/null
+++ b/create_schema.sh
@@ -0,0 +1,213 @@
+#!/usr/bin/env bash
+
+# Helper script for generating the `schema.sql` ClickHouse tables definition
+# Specify a cluster name to add `ON CLUSTER` directives
+
+show_usage() {
+ printf 'Usage: %s [(-o|--outfile) file (default: "schema.sql")] [(-c|--cluster) name (default: none)] [(-h|--help)]\n' "$(basename "$0")"
+ exit 0
+}
+
+SCHEMA_FILE="./schema.sql"
+CLUSTER_NAME=""
+while [[ "$#" -gt 0 ]]; do
+ case $1 in
+ -o|--outfile) SCHEMA_FILE="$2"; shift ;;
+ -c|--cluster) CLUSTER_NAME="$2"; shift ;;
+ -h|--help) show_usage ;;
+ *) echo "Unknown parameter passed: $1"; show_usage; exit 1 ;;
+ esac
+ shift
+done
+
+ON_CLUSTER_DIRECTIVE=""
+ENGINE_DEFAULT="ReplacingMergeTree()"
+ENGINE_VER="ReplacingMergeTree(ver)"
+ENGINE_VER_DELETE="ReplacingMergeTree(ver, has_null_balance)"
+if [ -n "$CLUSTER_NAME" ]; then
+ ON_CLUSTER_DIRECTIVE="ON CLUSTER \"$CLUSTER_NAME\""
+ ENGINE_DEFAULT="ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')"
+ ENGINE_VER="ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}', ver)"
+ ENGINE_VER_DELETE="ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}', ver, has_null_balance)"
+fi
+
+cat > $SCHEMA_FILE <<- EOM
+--------------------------------------
+-- AUTO-GENERATED FILE, DO NOT EDIT --
+--------------------------------------
+-- This SQL file creates the required tables for a single Antelope chain.
+-- You can use the ClickHouse client command to execute it:
+-- $ cat schema.sql | clickhouse client -h --port 9000 -d -u --password
+
+-------------------------------------------------
+-- Meta tables to store Substreams information --
+-------------------------------------------------
+CREATE TABLE IF NOT EXISTS cursors $ON_CLUSTER_DIRECTIVE
+(
+ id String,
+ cursor String,
+ block_num Int64,
+ block_id String
+)
+ ENGINE = $ENGINE_DEFAULT
+ PRIMARY KEY (id)
+ ORDER BY (id);
+
+CREATE TABLE IF NOT EXISTS blocks $ON_CLUSTER_DIRECTIVE
+(
+ -- clock --
+ block_time DateTime64(3, 'UTC'),
+ block_number UInt64,
+ block_date Date,
+ block_hash String COMMENT 'Hash',
+
+ -- header --
+ parent_hash String COMMENT 'Hash',
+ producer String COMMENT 'Address',
+ confirmed UInt32,
+ schedule_version UInt32,
+
+ -- block --
+ version UInt32,
+ producer_signature String COMMENT 'Signature',
+ dpos_proposed_irreversible_blocknum UInt32,
+ dpos_irreversible_blocknum UInt32,
+
+ -- block roots --
+ transaction_mroot String COMMENT 'Hash',
+ action_mroot String COMMENT 'Hash',
+ -- blockroot_merkle_active_nodes Array(String) COMMENT 'A blockroot Merkle tree uses hashes to verify blockchain data integrity. Leaf nodes hash data blocks, non-leaf nodes hash child nodes. The root hash efficiently verifies all data.',
+ blockroot_merkle_node_count UInt32,
+
+ -- counters --
+ size UInt64 COMMENT 'Block size estimate in bytes',
+ total_transactions UInt64,
+ successful_transactions UInt64,
+ failed_transactions UInt64,
+ total_actions UInt64,
+ total_db_ops UInt64,
+)
+ ENGINE = $ENGINE_DEFAULT
+ PRIMARY KEY (block_date, block_number)
+ ORDER BY (block_date, block_number, block_hash)
+ COMMENT 'Antelope block header';
+
+CREATE TABLE IF NOT EXISTS transactions $ON_CLUSTER_DIRECTIVE
+(
+ -- clock --
+ block_time DateTime64(3, 'UTC'),
+ block_number UInt64,
+ block_hash String COMMENT 'Hash',
+ block_date Date,
+
+ -- transaction --
+ hash String COMMENT 'Hash',
+ \`index\` UInt64,
+ elapsed Int64,
+ net_usage UInt64,
+ scheduled Bool,
+
+ -- header --
+ cpu_usage_micro_seconds UInt32,
+ net_usage_words UInt32,
+ status LowCardinality(String) COMMENT 'Status',
+ status_code UInt8,
+ success Bool,
+
+ -- block roots --
+ transaction_mroot String COMMENT 'Hash',
+)
+ ENGINE = $ENGINE_DEFAULT
+ PRIMARY KEY (block_date, block_number)
+ ORDER BY (block_date, block_number, block_hash, hash)
+ COMMENT 'Antelope transactions';
+
+CREATE TABLE IF NOT EXISTS actions $ON_CLUSTER_DIRECTIVE
+(
+ -- clock --
+ block_time DateTime64(3, 'UTC'),
+ block_number UInt64,
+ block_hash String COMMENT 'Hash',
+ block_date Date,
+
+ -- transaction --
+ tx_hash String COMMENT 'Hash',
+ tx_index UInt64,
+ tx_status LowCardinality(String),
+ tx_status_code UInt8,
+ tx_success Bool,
+
+ -- receipt --
+ abi_sequence UInt64,
+ code_sequence UInt64,
+ digest String,
+ global_sequence UInt64,
+ receipt_receiver String COMMENT 'Address',
+ recv_sequence UInt64,
+
+ -- action --
+ account String COMMENT 'Address',
+ name String COMMENT 'Address',
+ json_data String COMMENT 'JSON',
+ raw_data String COMMENT 'Hex',
+
+ -- trace --
+ \`index\` UInt32 COMMENT 'Action Ordinal',
+ receiver String,
+ context_free Bool,
+ elapsed Int64,
+ console String,
+ raw_return_value String,
+ json_return_value String,
+ creator_action_ordinal UInt32,
+ closest_unnotified_ancestor_action_ordinal UInt32,
+ execution_index UInt32,
+
+ -- block roots --
+ action_mroot String COMMENT 'Hash',
+)
+ ENGINE = $ENGINE_DEFAULT
+ PRIMARY KEY (block_date, block_number)
+ ORDER BY (block_date, block_number, block_hash, tx_hash, tx_index, \`index\`)
+ COMMENT 'Antelope actions';
+
+CREATE TABLE IF NOT EXISTS db_ops $ON_CLUSTER_DIRECTIVE
+(
+ -- clock --
+ block_time DateTime64(3, 'UTC'),
+ block_number UInt64,
+ block_hash String COMMENT 'EVM Hash',
+ block_date Date,
+
+ -- transaction --
+ tx_hash String COMMENT 'Hash',
+ tx_index UInt64,
+ tx_status LowCardinality(String),
+ tx_status_code UInt8,
+ tx_success Bool,
+
+ -- storage change --
+ \`index\` UInt32,
+ operation LowCardinality(String) COMMENT 'Operation',
+ operation_code UInt8,
+ action_index UInt32,
+ code String,
+ scope String,
+ table_name String,
+ primary_key String,
+ old_payer String,
+ new_payer String,
+ old_data String,
+ new_data String,
+ old_data_json String,
+ new_data_json String,
+)
+ ENGINE = $ENGINE_DEFAULT
+ PRIMARY KEY (block_date, block_number)
+ ORDER BY (block_date, block_number, block_hash, tx_hash, \`index\`)
+ COMMENT 'Antelope database operations';
+EOM
+
+echo "[+] Created '$SCHEMA_FILE'"
+echo "[*] Run the following command to apply:"
+echo "cat $SCHEMA_FILE | clickhouse client -h --port 9000 -d -u --password "
diff --git a/index.ts b/index.ts
new file mode 100644
index 0000000..b3ac5fb
--- /dev/null
+++ b/index.ts
@@ -0,0 +1,175 @@
+import { Hono, type Context } from "hono";
+import { type RootResolver, graphqlServer } from '@hono/graphql-server';
+import { buildSchema } from 'graphql';
+import { SafeParseSuccess, z } from 'zod';
+
+import client from './src/clickhouse/client.js';
+import openapi from "./static/@typespec/openapi3/openapi.json";
+import * as prometheus from './src/prometheus.js';
+import { APP_VERSION } from "./src/config.js";
+import { logger } from './src/logger.js';
+import { makeUsageQuery } from "./src/usage.js";
+import { APIErrorResponse } from "./src/utils.js";
+import { usageOperationsToEndpointsMap, type EndpointReturnTypes, type UsageEndpoints, type ValidPathParams, type ValidUserParams } from "./src/types/api.js";
+import { paths } from './src/types/zod.gen.js';
+
+async function AntelopeTokenAPI() {
+ const app = new Hono();
+
+ // Tracking all incoming requests
+ app.use(async (ctx: Context, next) => {
+ const pathname = ctx.req.path;
+ logger.trace(`Incoming request: [${pathname}]`);
+ prometheus.request.inc({ pathname });
+
+ await next();
+ });
+
+ // ---------------
+ // --- Swagger ---
+ // ---------------
+
+ app.get(
+ "/",
+ async (_) => new Response(Bun.file("./swagger/index.html"))
+ );
+
+ app.get(
+ "/favicon.ico",
+ async (_) => new Response(Bun.file("./swagger/favicon.ico"))
+ );
+
+ // ------------
+ // --- Docs ---
+ // ------------
+
+ app.get(
+ "/openapi",
+ async (ctx: Context) => ctx.json<{ [key: string]: EndpointReturnTypes<"/openapi">; }, 200>(openapi)
+ );
+
+ app.get(
+ "/version",
+ async (ctx: Context) => ctx.json, 200>(APP_VERSION)
+ );
+
+ // ------------------
+ // --- Monitoring ---
+ // ------------------
+
+ app.get(
+ "/health",
+ async (ctx: Context) => {
+ const response = await client.ping();
+
+ if (!response.success) {
+ return APIErrorResponse(ctx, 500, "bad_database_response", response.error.message);
+ }
+
+ return new Response("OK");
+ }
+ );
+
+ app.get(
+ "/metrics",
+ async () => new Response(await prometheus.registry.metrics())
+ );
+
+ // --------------------------
+ // --- REST API endpoints ---
+ // --------------------------
+
+ const createUsageEndpoint = (endpoint: UsageEndpoints) => app.get(
+ // Hono using different syntax than OpenAPI for path parameters
+ // `/{path_param}` (OpenAPI) VS `/:path_param` (Hono)
+ endpoint.replace(/{([^}]+)}/g, ":$1"),
+ async (ctx: Context) => {
+ // Use `unknown` for undefined schemas definitions in `zod.gen.ts`
+ const path_params_schema = paths[endpoint]["get"]["parameters"]["path"] ?? z.unknown();
+ const query_params_schema = paths[endpoint]["get"]["parameters"]["query"] ?? z.unknown();
+ const path_params = path_params_schema.safeParse(ctx.req.param());
+ const query_params = query_params_schema.safeParse(ctx.req.query());
+
+ if (path_params.success && query_params.success) {
+ return makeUsageQuery(
+ ctx,
+ endpoint,
+ {
+ ...path_params.data as SafeParseSuccess>,
+ ...query_params.data
+ } as ValidUserParams
+ );
+ } else {
+ return APIErrorResponse(ctx, 400, "bad_query_input", { ...path_params.error, ...query_params.error });
+ }
+ }
+ );
+
+ // Create all API endpoints interacting with DB
+ Object.values(usageOperationsToEndpointsMap).forEach(e => createUsageEndpoint(e));
+
+ // ------------------------
+ // --- GraphQL endpoint ---
+ // ------------------------
+
+ // TODO: Make GraphQL endpoint use the same $SERVER parameter as Swagger if set ?
+ const schema = buildSchema(await Bun.file("./static/@openapi-to-graphql/graphql/schema.graphql").text());
+ const filterFields: Array = ['metrics'];
+
+ // @ts-ignore Ignore private field warning for filtering out certain operations from the schema
+ filterFields.forEach(f => delete schema._queryType._fields[f]);
+
+ const rootResolver: RootResolver = async (ctx?: Context) => {
+ if (ctx) {
+ // GraphQL resolver uses the same SQL queries backend as the REST API (`makeUsageQuery`)
+ const createGraphQLUsageResolver = (endpoint: UsageEndpoints) =>
+ async (args: ValidUserParams) => {
+ return await (await makeUsageQuery(ctx, endpoint, { ...args })).json();
+ };
+
+
+ return Object.keys(usageOperationsToEndpointsMap).reduce(
+ // SQL queries endpoints
+ (resolver, op) => Object.assign(
+ resolver,
+ {
+ [op]: createGraphQLUsageResolver(usageOperationsToEndpointsMap[op] as UsageEndpoints)
+ }
+ ),
+ // Other endpoints
+ {
+ health: async () => {
+ const response = await client.ping();
+ return response.success ? "OK" : `[500] bad_database_response: ${response.error.message}`;
+ },
+ openapi: () => openapi,
+ metrics: async () => await prometheus.registry.metrics(),
+ version: () => APP_VERSION
+ }
+ );
+ }
+ };
+
+ // TODO: Find way to log GraphQL queries (need to workaround middleware consuming Request)
+ // See: https://github.com/honojs/middleware/issues/81
+ //app.use('/graphql', async (ctx: Context) => logger.trace(await ctx.req.json()))
+
+ app.use(
+ '/graphql',
+ graphqlServer({
+ schema,
+ rootResolver,
+ graphiql: true, // if `true`, presents GraphiQL when the GraphQL endpoint is loaded in a browser.
+ })
+ );
+
+ // -------------
+ // --- Miscs ---
+ // -------------
+
+ app.notFound((ctx: Context) => APIErrorResponse(ctx, 404, "route_not_found", `Path not found: ${ctx.req.method} ${ctx.req.path}`));
+
+ return app;
+}
+
+export default await AntelopeTokenAPI();
\ No newline at end of file
diff --git a/kubb.config.ts b/kubb.config.ts
new file mode 100644
index 0000000..27d84bc
--- /dev/null
+++ b/kubb.config.ts
@@ -0,0 +1,27 @@
+import { defineConfig } from '@kubb/core';
+import { pluginOas } from '@kubb/plugin-oas';
+import { pluginZod } from '@kubb/swagger-zod';
+export default defineConfig(() => {
+ return {
+ root: '.',
+ input: {
+ path: './static/@typespec/openapi3/openapi.json',
+ },
+ output: {
+ path: './src/types'
+ },
+ plugins: [
+ pluginOas({
+ output: false,
+ validate: false,
+ }),
+ pluginZod({
+ output: {
+ path: './zod.gen.ts',
+ },
+ typedSchema: true,
+ coercion: true,
+ })
+ ],
+ };
+});
\ No newline at end of file
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..e555fa8
--- /dev/null
+++ b/package.json
@@ -0,0 +1,59 @@
+{
+ "name": "antelope-transactions-api",
+ "description": "Transactions information from the Antelope blockchains",
+ "version": "0.1.0",
+ "homepage": "https://github.com/pinax-network/antelope-transactions-api",
+ "license": "MIT",
+ "authors": [
+ {
+ "name": "Etienne Donneger",
+ "email": "etienne@pinax.network",
+ "url": "https://github.com/0237h"
+ },
+ {
+ "name": "Denis Carriere",
+ "email": "denis@pinax.network",
+ "url": "https://github.com/DenisCarriere/"
+ }
+ ],
+ "dependencies": {
+ "@clickhouse/client-web": "latest",
+ "@hono/graphql-server": "^0.5.0",
+ "@kubb/cli": "^2.23.3",
+ "@kubb/core": "^2.23.3",
+ "@kubb/plugin-oas": "^2.23.3",
+ "@kubb/swagger-zod": "^2.23.3",
+ "commander": "latest",
+ "dotenv": "latest",
+ "hono": "latest",
+ "prom-client": "latest",
+ "tslog": "latest",
+ "zod": "latest"
+ },
+ "private": true,
+ "scripts": {
+ "build": "export APP_VERSION=$(git rev-parse --short HEAD) && bun build --compile index.ts --outfile antelope-token-api",
+ "clean": "bun i --force",
+ "dev": "export APP_VERSION=$(git rev-parse --short HEAD) && bun --watch index.ts",
+ "lint": "export APP_VERSION=$(git rev-parse --short HEAD) && bun run tsc --noEmit --skipLibCheck --pretty",
+ "start": "export APP_VERSION=$(git rev-parse --short HEAD) && bun index.ts",
+ "test": "bun test --coverage",
+ "types": "bun run tsp compile ./src/typespec --output-dir static && bun run openapi-to-graphql ./static/@typespec/openapi3/openapi.json --save static/@openapi-to-graphql/graphql/schema.graphql --simpleNames --singularNames --no-viewer -H 'X-Api-Key:changeme' && bun run kubb",
+ "types:check": "bun run tsp compile ./src/typespec --no-emit --pretty --warn-as-error",
+ "types:format": "bun run tsp format src/typespec/**/*.tsp",
+ "types:watch": "bun run tsp compile ./src/typespec --watch --pretty --warn-as-error"
+ },
+ "type": "module",
+ "devDependencies": {
+ "@typespec/compiler": "latest",
+ "@typespec/openapi": "latest",
+ "@typespec/openapi3": "latest",
+ "@typespec/protobuf": "latest",
+ "bun-types": "latest",
+ "openapi-to-graphql-cli": "^3.0.7",
+ "typescript": "latest"
+ },
+ "prettier": {
+ "tabWidth": 4
+ }
+}
\ No newline at end of file
diff --git a/schema.sql b/schema.sql
new file mode 100644
index 0000000..c6fe3f2
--- /dev/null
+++ b/schema.sql
@@ -0,0 +1,594 @@
+--------------------------------------
+-- AUTO-GENERATED FILE, DO NOT EDIT --
+--------------------------------------
+-- This SQL file creates the required tables for a single Antelope chain.
+-- You can use the ClickHouse client command to execute it:
+-- $ cat schema.sql | clickhouse client -h --port 9000 -d -u --password
+
+-------------------------------------------------
+-- Meta tables to store Substreams information --
+-------------------------------------------------
+
+CREATE TABLE IF NOT EXISTS cursors ON CLUSTER "antelope"
+(
+ id String,
+ cursor String,
+ block_num Int64,
+ block_id String
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (id)
+ ORDER BY (id);
+
+-----------------------------------------------------------
+-- Tables to store the raw events without any processing --
+-----------------------------------------------------------
+
+-- The table to store all transfers. This uses the trx_id as first primary key so we can use this table to do
+-- transfer lookups based on a transaction id.
+CREATE TABLE IF NOT EXISTS transfer_events ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+ -- contract & scope --
+ contract String,
+ symcode String,
+ -- data payload --
+ from String,
+ to String,
+ quantity String,
+ memo String,
+ -- extras --
+ precision UInt32,
+ amount Int64,
+ value Float64,
+ -- meta --
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (trx_id, action_index)
+ ORDER BY (trx_id, action_index);
+
+-- The table to store all account balance changes from the database operations. This uses the account and block_num as
+-- first primary keys so we can use this table to lookup the account balance from a certain block number.
+CREATE TABLE IF NOT EXISTS balance_change_events ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+ -- contract & scope --
+ contract String,
+ symcode String,
+ -- data payload --
+ account String,
+ balance String,
+ balance_delta Int64,
+ -- extras --
+ precision UInt32,
+ amount Int64,
+ value Float64,
+ -- meta --
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (account, block_num, trx_id, action_index)
+ ORDER BY (account, block_num, trx_id, action_index);
+
+-- The table to store all token supply changes from the database operations. This uses the account and block_num as
+-- first primary keys so we can use this table to lookup token supplies from a certain block number.
+CREATE TABLE IF NOT EXISTS supply_change_events ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+ -- contract & scope --
+ contract String,
+ symcode String,
+ -- data payload --
+ issuer String,
+ max_supply String,
+ supply String,
+ supply_delta Int64,
+ -- extras --
+ precision UInt32,
+ amount Int64,
+ value Float64,
+ -- meta --
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (contract, block_num, trx_id, action_index)
+ ORDER BY (contract, block_num, trx_id, action_index);
+
+-- Table to contain all 'eosio.token:issue' transactions
+CREATE TABLE IF NOT EXISTS issue_events ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+ -- contract & scope --
+ contract String,
+ symcode String,
+ -- data payload --
+ issuer String,
+ to String,
+ quantity String,
+ memo String,
+ -- extras --
+ precision UInt32,
+ amount Int64,
+ value Float64,
+ -- meta --
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (contract, symcode, to, amount, trx_id, action_index)
+ ORDER BY (contract, symcode, to, amount, trx_id, action_index);
+
+-- Table to contain all 'eosio.token:retire' transactions --
+CREATE TABLE IF NOT EXISTS retire_events ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+ -- contract & scope --
+ contract String,
+ symcode String,
+ -- data payload --
+ from String,
+ quantity String,
+ memo String,
+ -- extras --
+ precision UInt32,
+ amount Int64,
+ value Float64,
+ -- meta --
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (contract, symcode, amount, trx_id, action_index)
+ ORDER BY (contract, symcode, amount, trx_id, action_index);
+
+-- Table to contain all 'eosio.token:create' transactions
+CREATE TABLE IF NOT EXISTS create_events ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+ -- contract & scope --
+ contract String,
+ symcode String,
+ -- data payload --
+ issuer String,
+ maximum_supply String,
+ -- extras --
+ precision UInt32,
+ amount Int64,
+ value Float64,
+ -- meta --
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (contract, symcode, trx_id, action_index)
+ ORDER BY (contract, symcode, trx_id, action_index);
+
+-----------------------------------------------
+-- Tables to store the extracted information --
+-----------------------------------------------
+
+-- Table to store up to date balances per account and token
+CREATE TABLE IF NOT EXISTS account_balances ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ account String,
+ balance String,
+ balance_delta Int64,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime,
+ ver UInt64
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}', ver)
+ PRIMARY KEY (account, contract, symcode)
+ ORDER BY (account, contract, symcode);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS account_balances_mv ON CLUSTER "antelope"
+ TO account_balances
+AS
+SELECT *,
+ (block_num + action_index) AS ver
+FROM balance_change_events;
+
+-- Table to store historical balances per account and token
+CREATE TABLE IF NOT EXISTS historical_account_balances ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ account String,
+ balance String,
+ balance_delta Int64,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime,
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (block_num, account, contract, symcode)
+ ORDER BY (block_num, account, contract, symcode);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS historical_account_balances_mv ON CLUSTER "antelope"
+ TO historical_account_balances
+AS
+SELECT *
+FROM balance_change_events;
+
+-- Table to store up to date positive balances per account and token for token holders
+CREATE TABLE IF NOT EXISTS token_holders ON CLUSTER "antelope"
+(
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ account String,
+ value Float64,
+
+ block_num UInt64,
+ has_null_balance UInt8,
+ ver UInt64
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}', ver, has_null_balance)
+ PRIMARY KEY (has_null_balance, contract, symcode, account)
+ ORDER BY (has_null_balance, contract, symcode, account);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS token_holders_mv ON CLUSTER "antelope"
+ TO token_holders
+AS
+SELECT action_index,
+ contract,
+ symcode,
+ account,
+ value,
+ block_num,
+ if(amount > 0, 0, 1) AS has_null_balance,
+ (block_num + action_index) AS ver
+FROM balance_change_events;
+
+-- Table to store up to date token supplies
+CREATE TABLE IF NOT EXISTS token_supplies ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ issuer String,
+ max_supply String,
+ supply String,
+ supply_delta Int64,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime,
+ ver UInt64
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}', ver)
+ PRIMARY KEY (contract, symcode, issuer)
+ ORDER BY (contract, symcode, issuer);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS token_supplies_mv ON CLUSTER "antelope"
+ TO token_supplies
+AS
+SELECT *,
+ (block_num + action_index) AS ver
+FROM supply_change_events;
+
+-- Table to store historical token supplies per token
+CREATE TABLE IF NOT EXISTS historical_token_supplies ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ issuer String,
+ max_supply String,
+ supply String,
+ supply_delta Int64,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime,
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (block_num, contract, symcode, issuer)
+ ORDER BY (block_num, contract, symcode, issuer);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS historical_token_supplies_mv ON CLUSTER "antelope"
+ TO historical_token_supplies
+AS
+SELECT *
+FROM supply_change_events;
+
+-- Table to store token transfers primarily indexed by the 'contract' field --
+CREATE TABLE IF NOT EXISTS transfers_contract ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ from String,
+ to String,
+ quantity String,
+ memo String,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (contract, symcode, trx_id, action_index)
+ ORDER BY (contract, symcode, trx_id, action_index);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS transfers_contract_mv ON CLUSTER "antelope"
+ TO transfers_contract
+AS
+SELECT trx_id,
+ action_index,
+ contract,
+ symcode,
+ from,
+ to,
+ quantity,
+ memo,
+ precision,
+ amount,
+ value,
+ block_num,
+ timestamp
+FROM transfer_events;
+
+-- Table to store token transfers primarily indexed by the 'from' field --
+CREATE TABLE IF NOT EXISTS transfers_from ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ from String,
+ to String,
+ quantity String,
+ memo String,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (from, to, contract, symcode, trx_id, action_index)
+ ORDER BY (from, to, contract, symcode, trx_id, action_index);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS transfers_from_mv ON CLUSTER "antelope"
+ TO transfers_from
+AS
+SELECT trx_id,
+ action_index,
+ contract,
+ symcode,
+ from,
+ to,
+ quantity,
+ memo,
+ precision,
+ amount,
+ value,
+ block_num,
+ timestamp
+FROM transfer_events;
+
+-- Table to store historical token transfers 'from' address --
+CREATE TABLE IF NOT EXISTS historical_transfers_from ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ from String,
+ to String,
+ quantity String,
+ memo String,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (block_num, from, to, contract, symcode, trx_id, action_index)
+ ORDER BY (block_num, from, to, contract, symcode, trx_id, action_index);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS historical_transfers_from_mv ON CLUSTER "antelope"
+ TO historical_transfers_from
+AS
+SELECT trx_id,
+ action_index,
+ contract,
+ symcode,
+ from,
+ to,
+ quantity,
+ memo,
+ precision,
+ amount,
+ value,
+ block_num,
+ timestamp
+FROM transfer_events;
+
+-- Table to store token transfers primarily indexed by the 'to' field --
+CREATE TABLE IF NOT EXISTS transfers_to ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ from String,
+ to String,
+ quantity String,
+ memo String,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (to, contract, symcode, trx_id, action_index)
+ ORDER BY (to, contract, symcode, trx_id, action_index);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS transfers_to_mv ON CLUSTER "antelope"
+ TO transfers_to
+AS
+SELECT trx_id,
+ action_index,
+ contract,
+ symcode,
+ from,
+ to,
+ quantity,
+ memo,
+ precision,
+ amount,
+ value,
+ block_num,
+ timestamp
+FROM transfer_events;
+
+-- Table to store historical token transfers 'to' address --
+CREATE TABLE IF NOT EXISTS historical_transfers_to ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ from String,
+ to String,
+ quantity String,
+ memo String,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (block_num, to, contract, symcode, trx_id, action_index)
+ ORDER BY (block_num, to, contract, symcode, trx_id, action_index);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS historical_transfers_to_mv ON CLUSTER "antelope"
+ TO historical_transfers_to
+AS
+SELECT trx_id,
+ action_index,
+ contract,
+ symcode,
+ from,
+ to,
+ quantity,
+ memo,
+ precision,
+ amount,
+ value,
+ block_num,
+ timestamp
+FROM transfer_events;
+
+-- Table to store token transfers primarily indexed by the 'block_num' field
+CREATE TABLE IF NOT EXISTS transfers_block_num ON CLUSTER "antelope"
+(
+ trx_id String,
+ action_index UInt32,
+
+ contract String,
+ symcode String,
+
+ from String,
+ to String,
+ quantity String,
+ memo String,
+
+ precision UInt32,
+ amount Int64,
+ value Float64,
+
+ block_num UInt64,
+ timestamp DateTime
+)
+ ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
+ PRIMARY KEY (block_num, contract, symcode, trx_id, action_index)
+ ORDER BY (block_num, contract, symcode, trx_id, action_index);
+
+CREATE MATERIALIZED VIEW IF NOT EXISTS transfers_block_num_mv ON CLUSTER "antelope"
+ TO transfers_block_num
+AS
+SELECT trx_id,
+ action_index,
+ contract,
+ symcode,
+ from,
+ to,
+ quantity,
+ memo,
+ precision,
+ amount,
+ value,
+ block_num,
+ timestamp
+FROM transfer_events;
diff --git a/src/clickhouse/client.ts b/src/clickhouse/client.ts
new file mode 100644
index 0000000..4552940
--- /dev/null
+++ b/src/clickhouse/client.ts
@@ -0,0 +1,23 @@
+import { createClient } from "@clickhouse/client-web";
+import { ping } from "./ping.js";
+import { APP_NAME, config } from "../config.js";
+
+// TODO: Check how to abort previous queries if haven't returned yet
+const client = createClient({
+ ...config,
+ clickhouse_settings: {
+ allow_experimental_object_type: 1,
+ exact_rows_before_limit: 1,
+ output_format_json_quote_64bit_integers: 0,
+ readonly: "1"
+ },
+ application: APP_NAME,
+});
+
+// These overrides should not be required but the @clickhouse/client-web instance
+// does not work well with Bun's implementation of Node streams.
+// https://github.com/oven-sh/bun/issues/5470
+client.command = client.exec;
+client.ping = ping;
+
+export default client;
\ No newline at end of file
diff --git a/src/clickhouse/makeQuery.ts b/src/clickhouse/makeQuery.ts
new file mode 100644
index 0000000..d30d1dc
--- /dev/null
+++ b/src/clickhouse/makeQuery.ts
@@ -0,0 +1,25 @@
+import client from "./client.js";
+
+import { logger } from "../logger.js";
+import * as prometheus from "../prometheus.js";
+
+import type { ResponseJSON } from "@clickhouse/client-web";
+import type { ValidQueryParams } from "../types/api.js";
+
+export async function makeQuery(query: string, query_params: ValidQueryParams) {
+ logger.trace({ query, query_params });
+
+ const response = await client.query({ query, query_params, format: "JSON" });
+ const data: ResponseJSON = await response.json();
+
+ prometheus.query.inc();
+ if ( data.statistics ) {
+ prometheus.bytes_read.inc(data.statistics.bytes_read);
+ prometheus.rows_read.inc(data.statistics.rows_read);
+ prometheus.elapsed.inc(data.statistics.elapsed);
+ }
+
+ logger.trace({ statistics: data.statistics, rows: data.rows, rows_before_limit_at_least: data.rows_before_limit_at_least });
+
+ return data;
+}
\ No newline at end of file
diff --git a/src/clickhouse/ping.ts b/src/clickhouse/ping.ts
new file mode 100644
index 0000000..5cb8720
--- /dev/null
+++ b/src/clickhouse/ping.ts
@@ -0,0 +1,15 @@
+import { PingResult } from "@clickhouse/client-web";
+import client from "./client.js";
+import { logger } from "../logger.js";
+
+// Does not work with Bun's implementation of Node streams.
+export async function ping(): Promise {
+ try {
+ await client.exec({ query: "SELECT 1" });
+ logger.info("Successfully pinged database");
+ return { success: true };
+ } catch (err) {
+ const message = typeof err === "string" ? err : JSON.stringify(err);
+ return { success: false, error: new Error(message) };
+ }
+};
\ No newline at end of file
diff --git a/src/config.ts b/src/config.ts
new file mode 100644
index 0000000..53f1912
--- /dev/null
+++ b/src/config.ts
@@ -0,0 +1,49 @@
+import "dotenv/config";
+import { z } from 'zod';
+import { Option, program } from "commander";
+
+import pkg from "../package.json";
+
+export const DEFAULT_PORT = "8080";
+export const DEFAULT_HOSTNAME = "localhost";
+export const DEFAULT_HOST = "http://localhost:8123";
+export const DEFAULT_DATABASE = "default";
+export const DEFAULT_USERNAME = "default";
+export const DEFAULT_PASSWORD = "";
+export const DEFAULT_MAX_LIMIT = 10000;
+export const DEFAULT_VERBOSE = false;
+export const DEFAULT_SORT_BY = "DESC";
+export const APP_NAME = pkg.name;
+export const APP_VERSION = {
+ version: pkg.version,
+ commit: process.env.APP_VERSION || "unknown"
+};
+
+// parse command line options
+const opts = program
+ .name(pkg.name)
+ .version(`${APP_VERSION.version}+${APP_VERSION.commit}`)
+ .description(pkg.description)
+ .showHelpAfterError()
+ .addOption(new Option("-p, --port ", "HTTP port on which to attach the API").env("PORT").default(DEFAULT_PORT))
+ .addOption(new Option("--hostname ", "Server listen on HTTP hostname").env("HOSTNAME").default(DEFAULT_HOSTNAME))
+ .addOption(new Option("--host ", "Database HTTP hostname").env("HOST").default(DEFAULT_HOST))
+ .addOption(new Option("--database ", "The database to use inside ClickHouse").env("DATABASE").default(DEFAULT_DATABASE))
+ .addOption(new Option("--username ", "Database user").env("USERNAME").default(DEFAULT_USERNAME))
+ .addOption(new Option("--password ", "Password associated with the specified username").env("PASSWORD").default(DEFAULT_PASSWORD))
+ .addOption(new Option("--max-limit ", "Maximum LIMIT queries").env("MAX_LIMIT").default(DEFAULT_MAX_LIMIT))
+ .addOption(new Option("-v, --verbose ", "Enable verbose logging").choices(["true", "false"]).env("VERBOSE").default(DEFAULT_VERBOSE))
+ .parse()
+ .opts();
+
+export const config = z.object({
+ port: z.string(),
+ hostname: z.string(),
+ host: z.string(),
+ database: z.string(),
+ username: z.string(),
+ password: z.string(),
+ maxLimit: z.coerce.number(),
+ // `z.coerce.boolean` doesn't parse boolean string values as expected (see https://github.com/colinhacks/zod/issues/1630)
+ verbose: z.coerce.string().transform((val) => val.toLowerCase() === "true"),
+}).parse(opts);
diff --git a/src/logger.ts b/src/logger.ts
new file mode 100644
index 0000000..3f142c3
--- /dev/null
+++ b/src/logger.ts
@@ -0,0 +1,24 @@
+import { Logger, type ILogObj } from "tslog";
+import { APP_NAME, APP_VERSION, config } from "./config.js";
+
+class TsLogger extends Logger {
+ constructor() {
+ super();
+ this.settings.minLevel = 5;
+ this.settings.name = `${APP_NAME}:${APP_VERSION.version}+${APP_VERSION.commit}`;
+ }
+
+ public enable(type: "pretty" | "json" = "pretty") {
+ this.settings.type = type;
+ this.settings.minLevel = 0;
+ this.info("Enabled logger");
+ }
+
+ public disable() {
+ this.settings.type = "hidden";
+ this.info("Disabled logger");
+ }
+}
+
+export const logger = new TsLogger();
+if (config.verbose) logger.enable();
\ No newline at end of file
diff --git a/src/prometheus.ts b/src/prometheus.ts
new file mode 100644
index 0000000..baa906f
--- /dev/null
+++ b/src/prometheus.ts
@@ -0,0 +1,42 @@
+// From https://github.com/pinax-network/substreams-sink-websockets/blob/main/src/prometheus.ts
+import client, { Counter, CounterConfiguration, Gauge, GaugeConfiguration } from 'prom-client';
+import { logger } from "./logger.js";
+
+export const registry = new client.Registry();
+
+// Metrics
+export function registerCounter(name: string, help = "help", labelNames: string[] = [], config?: CounterConfiguration) {
+ try {
+ registry.registerMetric(new Counter({ name, help, labelNames, ...config }));
+ logger.debug(`Registered new counter metric: ${name}`);
+ return registry.getSingleMetric(name) as Counter;
+ } catch (e) {
+ logger.error("Error registering counter:", { name, e });
+ throw new Error(`${e}`);
+ }
+}
+
+export function registerGauge(name: string, help = "help", labelNames: string[] = [], config?: GaugeConfiguration) {
+ try {
+ registry.registerMetric(new Gauge({ name, help, labelNames, ...config }));
+ logger.debug(`Registered new gauge metric: ${name}`);
+ return registry.getSingleMetric(name) as Gauge;
+ } catch (e) {
+ logger.error("Error registering gauge:", { name, e });
+ throw new Error(`${e}`);
+ }
+}
+
+export async function getSingleMetric(name: string) {
+ const metric = registry.getSingleMetric(name);
+ const get = await metric?.get();
+ return get?.values[0]?.value;
+}
+
+// REST API metrics
+export const request_error = registerCounter('request_error', 'Total Requests errors', ['pathname', 'status']);
+export const request = registerCounter('request', 'Total Requests', ['pathname']);
+export const query = registerCounter('query', 'Clickhouse DB queries made');
+export const bytes_read = registerCounter('bytes_read', 'Clickhouse DB Statistics bytes read');
+export const rows_read = registerCounter('rows_read', 'Clickhouse DB Statistics rows read');
+export const elapsed = registerCounter('elapsed', 'Clickhouse DB Statistics query elapsed time');
diff --git a/src/types/README.md b/src/types/README.md
new file mode 100644
index 0000000..c8695ca
--- /dev/null
+++ b/src/types/README.md
@@ -0,0 +1,9 @@
+### `zod.gen.ts`
+
+> [!CAUTION]
+> Auto-generated [Zod](https://zod.dev/) schemas definitions from the [OpenAPI3](../static/@typespec/openapi3/openapi.json) specification using [`Kubb`](https://kubb.dev). **DO NOT EDIT MANUALLY**.
+> Use `bun run types` to run the code generation.
+
+### `api.ts`
+
+Utility types based on the generated Zod schemas.
\ No newline at end of file
diff --git a/src/types/api.ts b/src/types/api.ts
new file mode 100644
index 0000000..73a9a3f
--- /dev/null
+++ b/src/types/api.ts
@@ -0,0 +1,35 @@
+import { z } from "zod";
+
+import { operations, paths } from './zod.gen.js';
+
+type GetEndpoints = typeof paths;
+export type EndpointReturnTypes = z.infer;
+export type EndpointParameters = GetEndpoints[E]["get"]["parameters"];
+
+export type NonUsageEndpoints = "/health" | "/metrics" | "/version" | "/openapi";
+// Usage endpoints interacts with the database
+export type UsageEndpoints = Exclude;
+export type UsageResponse = EndpointReturnTypes["data"];
+export type UsageParameters = EndpointParameters;
+
+export type ValidPathParams = EndpointParameters["path"];
+export type ValidUserParams = NonNullable extends { path: undefined; } ?
+ // Combine path and query parameters only if path exists to prevent "never" on intersection
+ z.infer["query"]>
+ :
+ z.infer["query"] & ValidPathParams>>;
+export type AdditionalQueryParams = { offset?: number; min_block?: number; max_block?: number; };
+// Allow any valid parameters from the endpoint to be used as SQL query parameters
+export type ValidQueryParams = ValidUserParams & AdditionalQueryParams;
+
+// Map stripped operations name (e.g. `Usage_transfers` stripped to `transfers`) to endpoint paths (e.g. `/transfers`)
+// This is used to map GraphQL operations to REST endpoints
+export const usageOperationsToEndpointsMap = Object.entries(operations).filter(([k, _]) => k.startsWith("Usage")).reduce(
+ (o, [k, v]) => Object.assign(
+ o,
+ {
+ // Split once on first underscore to create keys (e.g. `Usage_transfersAccount` => `transfersAccount`)
+ [k.split('_')[1] as string]: Object.entries(paths).find(([_, v_]) => v_.get === v)?.[0]
+ }
+ ), {}
+) as { [key in string]: UsageEndpoints };
\ No newline at end of file
diff --git a/src/types/zod.gen.ts b/src/types/zod.gen.ts
new file mode 100644
index 0000000..7fdb642
--- /dev/null
+++ b/src/types/zod.gen.ts
@@ -0,0 +1,256 @@
+import { z } from "zod";
+
+
+export const apiErrorSchema = z.object({ "status": z.union([z.literal(500), z.literal(504), z.literal(400), z.literal(401), z.literal(403), z.literal(404), z.literal(405)]), "code": z.enum(["bad_database_response", "bad_header", "missing_required_header", "bad_query_input", "database_timeout", "forbidden", "internal_server_error", "method_not_allowed", "route_not_found", "unauthorized"]), "message": z.coerce.string() });
+export type ApiErrorSchema = z.infer;
+
+
+export const modelsBlocksSchema = z.object({ "block_time": z.string().datetime(), "block_number": z.coerce.number(), "block_hash": z.coerce.string(), "block_date": z.string().date(), "parent_hash": z.coerce.string(), "producer": z.coerce.string(), "confirmed": z.coerce.number(), "schedule_version": z.coerce.number(), "version": z.coerce.number(), "producer_signature": z.coerce.string(), "dpos_proposed_irreversible_blocknum": z.coerce.number(), "dpos_irreversible_blocknum": z.coerce.number(), "transaction_mroot": z.coerce.string(), "action_mroot": z.coerce.string(), "blockroot_merkle_node_count": z.coerce.number(), "size": z.coerce.number(), "total_transactions": z.coerce.number(), "successful_transactions": z.coerce.number(), "failed_transactions": z.coerce.number(), "total_actions": z.coerce.number(), "total_db_ops": z.coerce.number() });
+export type ModelsBlocksSchema = z.infer;
+
+
+export const paginationSchema = z.object({ "next_page": z.coerce.number(), "previous_page": z.coerce.number(), "total_pages": z.coerce.number(), "total_results": z.coerce.number() });
+export type PaginationSchema = z.infer;
+
+
+export const queryStatisticsSchema = z.object({ "elapsed": z.coerce.number(), "rows_read": z.coerce.number(), "bytes_read": z.coerce.number() });
+export type QueryStatisticsSchema = z.infer;
+
+
+export const responseMetadataSchema = z.object({ "statistics": z.lazy(() => queryStatisticsSchema).nullable(), "next_page": z.coerce.number(), "previous_page": z.coerce.number(), "total_pages": z.coerce.number(), "total_results": z.coerce.number() });
+export type ResponseMetadataSchema = z.infer;
+
+
+export const versionSchema = z.object({ "version": z.coerce.string().regex(new RegExp("^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)$")), "commit": z.coerce.string().regex(new RegExp("^[0-9a-f]{7}$")) });
+export type VersionSchema = z.infer;
+
+
+export const usageBlocksDatePathParamsSchema = z.object({ "date": z.string().date() });
+export type UsageBlocksDatePathParamsSchema = z.infer;
+
+ export const usageBlocksDateQueryParamsSchema = z.object({ "limit": z.coerce.number().optional(), "page": z.coerce.number().optional() }).optional();
+export type UsageBlocksDateQueryParamsSchema = z.infer;
+/**
+ * @description Array of blocks.
+ */
+export const usageBlocksDate200Schema = z.object({ "data": z.array(z.lazy(() => modelsBlocksSchema)), "meta": z.lazy(() => responseMetadataSchema) });
+export type UsageBlocksDate200Schema = z.infer;
+/**
+ * @description An unexpected error response.
+ */
+export const usageBlocksDateErrorSchema = z.lazy(() => apiErrorSchema);
+export type UsageBlocksDateErrorSchema = z.infer;
+/**
+ * @description Array of blocks.
+ */
+export const usageBlocksDateQueryResponseSchema = z.object({ "data": z.array(z.lazy(() => modelsBlocksSchema)), "meta": z.lazy(() => responseMetadataSchema) });
+export type UsageBlocksDateQueryResponseSchema = z.infer;
+
+
+export const usageBlocksHashPathParamsSchema = z.object({ "hash": z.coerce.string() });
+export type UsageBlocksHashPathParamsSchema = z.infer;
+
+ export const usageBlocksHashQueryParamsSchema = z.object({ "limit": z.coerce.number().optional(), "page": z.coerce.number().optional() }).optional();
+export type UsageBlocksHashQueryParamsSchema = z.infer;
+/**
+ * @description Array of blocks.
+ */
+export const usageBlocksHash200Schema = z.object({ "data": z.array(z.lazy(() => modelsBlocksSchema)), "meta": z.lazy(() => responseMetadataSchema) });
+export type UsageBlocksHash200Schema = z.infer;
+/**
+ * @description An unexpected error response.
+ */
+export const usageBlocksHashErrorSchema = z.lazy(() => apiErrorSchema);
+export type UsageBlocksHashErrorSchema = z.infer;
+/**
+ * @description Array of blocks.
+ */
+export const usageBlocksHashQueryResponseSchema = z.object({ "data": z.array(z.lazy(() => modelsBlocksSchema)), "meta": z.lazy(() => responseMetadataSchema) });
+export type UsageBlocksHashQueryResponseSchema = z.infer;
+
+
+export const usageBlocksNumberPathParamsSchema = z.object({ "number": z.coerce.number() });
+export type UsageBlocksNumberPathParamsSchema = z.infer;
+
+ export const usageBlocksNumberQueryParamsSchema = z.object({ "limit": z.coerce.number().optional(), "page": z.coerce.number().optional() }).optional();
+export type UsageBlocksNumberQueryParamsSchema = z.infer;
+/**
+ * @description Array of blocks.
+ */
+export const usageBlocksNumber200Schema = z.object({ "data": z.array(z.lazy(() => modelsBlocksSchema)), "meta": z.lazy(() => responseMetadataSchema) });
+export type UsageBlocksNumber200Schema = z.infer;
+/**
+ * @description An unexpected error response.
+ */
+export const usageBlocksNumberErrorSchema = z.lazy(() => apiErrorSchema);
+export type UsageBlocksNumberErrorSchema = z.infer;
+/**
+ * @description Array of blocks.
+ */
+export const usageBlocksNumberQueryResponseSchema = z.object({ "data": z.array(z.lazy(() => modelsBlocksSchema)), "meta": z.lazy(() => responseMetadataSchema) });
+export type UsageBlocksNumberQueryResponseSchema = z.infer;
+
+ /**
+ * @description OK or ApiError.
+ */
+export const monitoringHealth200Schema = z.coerce.string();
+export type MonitoringHealth200Schema = z.infer;
+/**
+ * @description An unexpected error response.
+ */
+export const monitoringHealthErrorSchema = z.lazy(() => apiErrorSchema);
+export type MonitoringHealthErrorSchema = z.infer;
+/**
+ * @description OK or ApiError.
+ */
+export const monitoringHealthQueryResponseSchema = z.coerce.string();
+export type MonitoringHealthQueryResponseSchema = z.infer;
+
+ /**
+ * @description Metrics as text.
+ */
+export const monitoringMetrics200Schema = z.coerce.string();
+export type MonitoringMetrics200Schema = z.infer;
+/**
+ * @description An unexpected error response.
+ */
+export const monitoringMetricsErrorSchema = z.lazy(() => apiErrorSchema);
+export type MonitoringMetricsErrorSchema = z.infer;
+/**
+ * @description Metrics as text.
+ */
+export const monitoringMetricsQueryResponseSchema = z.coerce.string();
+export type MonitoringMetricsQueryResponseSchema = z.infer;
+
+ /**
+ * @description The OpenAPI JSON spec
+ */
+export const docsOpenapi200Schema = z.object({});
+export type DocsOpenapi200Schema = z.infer;
+/**
+ * @description An unexpected error response.
+ */
+export const docsOpenapiErrorSchema = z.lazy(() => apiErrorSchema);
+export type DocsOpenapiErrorSchema = z.infer;
+/**
+ * @description The OpenAPI JSON spec
+ */
+export const docsOpenapiQueryResponseSchema = z.object({});
+export type DocsOpenapiQueryResponseSchema = z.infer;
+
+ /**
+ * @description The Api version and commit hash.
+ */
+export const docsVersion200Schema = z.lazy(() => versionSchema);
+export type DocsVersion200Schema = z.infer;
+/**
+ * @description An unexpected error response.
+ */
+export const docsVersionErrorSchema = z.lazy(() => apiErrorSchema);
+export type DocsVersionErrorSchema = z.infer;
+/**
+ * @description The Api version and commit hash.
+ */
+export const docsVersionQueryResponseSchema = z.lazy(() => versionSchema);
+export type DocsVersionQueryResponseSchema = z.infer;
+
+ export const operations = { "Usage_blocksDate": {
+ request: undefined,
+ parameters: {
+ path: usageBlocksDatePathParamsSchema,
+ query: usageBlocksDateQueryParamsSchema,
+ header: undefined
+ },
+ responses: {
+ 200: usageBlocksDateQueryResponseSchema,
+ default: usageBlocksDateQueryResponseSchema
+ },
+ errors: {}
+ }, "Usage_blocksHash": {
+ request: undefined,
+ parameters: {
+ path: usageBlocksHashPathParamsSchema,
+ query: usageBlocksHashQueryParamsSchema,
+ header: undefined
+ },
+ responses: {
+ 200: usageBlocksHashQueryResponseSchema,
+ default: usageBlocksHashQueryResponseSchema
+ },
+ errors: {}
+ }, "Usage_blocksNumber": {
+ request: undefined,
+ parameters: {
+ path: usageBlocksNumberPathParamsSchema,
+ query: usageBlocksNumberQueryParamsSchema,
+ header: undefined
+ },
+ responses: {
+ 200: usageBlocksNumberQueryResponseSchema,
+ default: usageBlocksNumberQueryResponseSchema
+ },
+ errors: {}
+ }, "Monitoring_health": {
+ request: undefined,
+ parameters: {
+ path: undefined,
+ query: undefined,
+ header: undefined
+ },
+ responses: {
+ 200: monitoringHealthQueryResponseSchema,
+ default: monitoringHealthQueryResponseSchema
+ },
+ errors: {}
+ }, "Monitoring_metrics": {
+ request: undefined,
+ parameters: {
+ path: undefined,
+ query: undefined,
+ header: undefined
+ },
+ responses: {
+ 200: monitoringMetricsQueryResponseSchema,
+ default: monitoringMetricsQueryResponseSchema
+ },
+ errors: {}
+ }, "Docs_openapi": {
+ request: undefined,
+ parameters: {
+ path: undefined,
+ query: undefined,
+ header: undefined
+ },
+ responses: {
+ 200: docsOpenapiQueryResponseSchema,
+ default: docsOpenapiQueryResponseSchema
+ },
+ errors: {}
+ }, "Docs_version": {
+ request: undefined,
+ parameters: {
+ path: undefined,
+ query: undefined,
+ header: undefined
+ },
+ responses: {
+ 200: docsVersionQueryResponseSchema,
+ default: docsVersionQueryResponseSchema
+ },
+ errors: {}
+ } } as const;
+export const paths = { "/blocks/{date}": {
+ get: operations["Usage_blocksDate"]
+ }, "/blocks/{hash}": {
+ get: operations["Usage_blocksHash"]
+ }, "/blocks/{number}": {
+ get: operations["Usage_blocksNumber"]
+ }, "/health": {
+ get: operations["Monitoring_health"]
+ }, "/metrics": {
+ get: operations["Monitoring_metrics"]
+ }, "/openapi": {
+ get: operations["Docs_openapi"]
+ }, "/version": {
+ get: operations["Docs_version"]
+ } } as const;
\ No newline at end of file
diff --git a/src/typespec/README.md b/src/typespec/README.md
new file mode 100644
index 0000000..fa0e934
--- /dev/null
+++ b/src/typespec/README.md
@@ -0,0 +1,19 @@
+# Typespec definitions
+
+*From https://github.com/microsoft/typespec*
+
+> TypeSpec is a language for defining cloud service APIs and shapes. TypeSpec is a highly extensible language with primitives that can describe API shapes common among REST, OpenAPI, gRPC, and other protocols.
+
+For Pinax's API projects, Typespec allows for both generating the [protobuf](./protobuf.tsp) definitions used at the *substreams* level **and** the [OpenAPI3](openapi3.tsp) specification, ensuring consistent data models for the whole pipeline.
+
+See https://typespec.io/docs to get started.
+
+## Common models
+
+The data models used for both outputs can be found in [`models.tsp`](./models.tsp).
+
+## Compiling definitions
+
+Use the `bun run types:watch` to auto-compile the definitions on file changes. Generated outputs can be found in the [`static`](/static/) folder.
+
+Typescript compiler options can be found in [`tspconfig.yaml`](/tspconfig.yaml).
\ No newline at end of file
diff --git a/src/typespec/main.tsp b/src/typespec/main.tsp
new file mode 100644
index 0000000..daa3780
--- /dev/null
+++ b/src/typespec/main.tsp
@@ -0,0 +1,4 @@
+/**
+ * Main file to allow compiling for both protobuf and openapi3 specs with single command `tsp compile .`
+ */
+import "./openapi3.tsp";
diff --git a/src/typespec/models.tsp b/src/typespec/models.tsp
new file mode 100644
index 0000000..5d879f4
--- /dev/null
+++ b/src/typespec/models.tsp
@@ -0,0 +1,113 @@
+/**
+ * Common models used for protobuf and openapi3 outputs
+ */
+namespace Models {
+ model Clock {
+ block_time: utcDateTime,
+ block_number: uint64,
+ block_hash: string, // 'EVM Hash'
+ block_date: plainDate,
+ }
+
+ model Transaction {
+ tx_hash: string, // 'Hash'
+ tx_index: uint64,
+ tx_status: string,
+ tx_status_code: uint8,
+ tx_success: boolean,
+ }
+
+ model Blocks {
+ ...Clock,
+ // Header
+ parent_hash: string, // 'Hash'
+ producer: string, // 'Address'
+ confirmed: uint32,
+ schedule_version: uint32,
+ // Block
+ version: uint32,
+ producer_signature: string, // 'Signature'
+ dpos_proposed_irreversible_blocknum: uint32,
+ dpos_irreversible_blocknum: uint32,
+ // Block roots
+ transaction_mroot: string, // 'Hash'
+ action_mroot: string, // 'Hash'
+ blockroot_merkle_node_count: uint32,
+ // Counters
+ size: uint64, // 'Block size estimate in bytes'
+ total_transactions: uint64,
+ successful_transactions: uint64,
+ failed_transactions: uint64,
+ total_actions: uint64,
+ total_db_ops: uint64,
+ }
+
+ model Transactions {
+ ...Clock,
+ // Transaction
+ hash: string, // 'Hash'
+ index: uint64,
+ elapsed: int64,
+ net_usage: uint64,
+ scheduled: boolean,
+ // Header
+ cpu_usage_micro_seconds: uint32,
+ net_usage_words: uint32,
+ status: string, // 'Status'
+ status_code: uint8,
+ success: boolean,
+ // Block roots
+ transaction_mroot: string, // 'Hash'
+ }
+
+
+ model Actions {
+ ...Clock,
+ ...Transaction,
+ // Receipt
+ abi_sequence: uint64,
+ code_sequence: uint64,
+ digest: string,
+ global_sequence: uint64,
+ receipt_receiver: string, // 'Address'
+ recv_sequence: uint64,
+ // Action
+ account: string, // 'Address'
+ name: string, // 'Address'
+ json_data: string, // 'JSON'
+ raw_data: string, // 'Hex'
+ // Trace
+ index: uint32, // 'Action Ordinal'
+ receiver: string,
+ context_free: boolean,
+ elapsed: int64,
+ console: string,
+ raw_return_value: string,
+ json_return_value: string,
+ creator_action_ordinal: uint32,
+ closest_unnotified_ancestor_action_ordinal: uint32,
+ execution_index: uint32,
+ // Block roots
+ action_mroot: string, // 'Hash'
+ }
+
+ model DBOps {
+ ...Clock,
+ ...Transaction,
+ // Storage changes
+ index: uint32,
+ operation: string, // 'Operation'
+ operation_code: uint8,
+ action_index: uint32,
+ code: string,
+ scope: string,
+ table_name: string,
+ primary_key: string,
+ old_payer: string,
+ new_payer: string,
+ old_data: string,
+ new_data: string,
+ old_data_json: string,
+ new_data_json: string,
+ }
+}
diff --git a/src/typespec/openapi3.tsp b/src/typespec/openapi3.tsp
new file mode 100644
index 0000000..790c322
--- /dev/null
+++ b/src/typespec/openapi3.tsp
@@ -0,0 +1,164 @@
+import "@typespec/http";
+import "@typespec/openapi";
+import "./models.tsp";
+
+using TypeSpec.Http;
+using TypeSpec.OpenAPI;
+
+@service({ title: "Antelope Transactions Api" })
+@info({
+ summary: "Transactions information from the Antelope blockchains, powered by Substreams",
+ license: {
+ name: "MIT",
+ url: "https://github.com/pinax-network/antelope-transactions-api/blob/75db1c61477ac7ea5e56f775cefe54875af953d4/LICENSE"
+ },
+ version: "0.1.0"
+}) // From @typespec/openapi
+//@server("https://eos.api.pinax.network/v1", "EOS V1 Api Endpoint")
+namespace AntelopeTransactionsApi;
+
+alias ApiKeyHeader = "X-Api-Key";
+// Error codes adapted from https://github.com/pinax-network/golang-base/blob/develop/response/errors.go
+alias ApiErrorCode =
+ | "bad_database_response" // invalid response from the database
+ | "bad_header" // invalid or malformed header given
+ | "missing_required_header" // request is missing a header
+ | "bad_query_input" // given query input is missing or malformed
+ | "database_timeout" // timeout while connecting to database
+ | "forbidden" // not allowed to access this endpoint
+ | "internal_server_error" // an unknown error occurred on the backend
+ | "method_not_allowed" // http method is not allowed on this endpoint
+ | "route_not_found" // the requested route was not found
+ | "unauthorized"; // invalid authorization information given
+
+alias ErrorStatusCode = 500 | 504 | 400 | 401 | 403 | 404 | 405;
+
+@error
+model ApiError {
+ status: ErrorStatusCode;
+ code: ApiErrorCode;
+ message: string;
+}
+
+model QueryStatistics {
+ elapsed: float;
+ rows_read: safeint;
+ bytes_read: safeint;
+}
+
+model Pagination {
+ next_page: safeint;
+ previous_page: safeint;
+ total_pages: safeint;
+ total_results: safeint;
+}
+
+model ResponseMetadata {
+ statistics: QueryStatistics | null;
+ ...Pagination;
+}
+
+model UsageResponse {
+ data: T[];
+ meta: ResponseMetadata;
+}
+
+// Alias will *not* be present in the OpenAPI components.
+// This also helps preventing self-references in generated `components` for codegen to work properly.
+alias ApiResponse = T | ApiError;
+alias PaginationQueryParams = {
+ @query limit?: uint64 = 10;
+ @query page?: uint64 = 1;
+};
+
+@tag("Usage")
+interface Usage {
+ /**
+ Block by hash.
+ @returns Array of blocks.
+ */
+ @summary("Blocks")
+ @route("/blocks/{hash}")
+ @get
+ @useAuth(ApiKeyAuth)
+ blocksHash(
+ @path hash: Models.Blocks.block_hash,
+ ...PaginationQueryParams,
+ ): ApiResponse>;
+
+ /**
+ Block by number.
+ @returns Array of blocks.
+ */
+ @summary("Blocks")
+ @route("/blocks/{number}")
+ @get
+ @useAuth(ApiKeyAuth)
+ blocksNumber(
+ @path number: Models.Blocks.block_number,
+ ...PaginationQueryParams,
+ ): ApiResponse>;
+
+ /**
+ Block by date.
+ @returns Array of blocks.
+ */
+ @summary("Blocks")
+ @route("/blocks/{date}")
+ @get
+ @useAuth(ApiKeyAuth)
+ blocksDate(
+ @path date: Models.Blocks.block_date,
+ ...PaginationQueryParams,
+ ): ApiResponse>;
+}
+
+model Version {
+ @pattern("^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)$") // Adapted from https://semver.org/
+ version: string;
+
+ @pattern("^[0-9a-f]{7}$")
+ commit: string;
+}
+
+@tag("Docs")
+interface Docs {
+ /**
+ Reflection endpoint to return OpenAPI JSON spec. Also used by Swagger to generate the frontpage.
+ @returns The OpenAPI JSON spec
+ */
+ @summary("OpenAPI JSON spec")
+ @route("/openapi")
+ @get
+ openapi(): ApiResponse>;
+
+ /**
+ Api version and Git short commit hash.
+ @returns The Api version and commit hash.
+ */
+ @summary("Api version")
+ @route("/version")
+ @get
+ version(): ApiResponse;
+}
+
+@tag("Monitoring")
+interface Monitoring {
+ /**
+ Checks database connection.
+ @returns OK or ApiError.
+ */
+ @summary("Health check")
+ @route("/health")
+ @get
+ health(): ApiResponse;
+
+ /**
+ Prometheus metrics.
+ @returns Metrics as text.
+ */
+ @summary("Prometheus metrics")
+ @route("/metrics")
+ @get
+ metrics(): ApiResponse;
+}
diff --git a/src/usage.ts b/src/usage.ts
new file mode 100644
index 0000000..f0faf65
--- /dev/null
+++ b/src/usage.ts
@@ -0,0 +1,86 @@
+import { makeQuery } from "./clickhouse/makeQuery.js";
+import { APIErrorResponse } from "./utils.js";
+
+import type { Context } from "hono";
+import type { AdditionalQueryParams, UsageEndpoints, UsageResponse, ValidUserParams } from "./types/api.js";
+
+/**
+ * This function creates and send the SQL queries to the ClickHouse database based on the endpoint requested.
+ *
+ * Both the REST API and GraphQL endpoint use those.
+ * `endpoint` is a valid "Usage" endpoint (e.g. not a `/version`, `/metrics`, etc. endpoint, an actual data endpoint).
+ * `user_params` is an key-value object created from the path and query parameters present in the request.
+ **/
+
+export async function makeUsageQuery(ctx: Context, endpoint: UsageEndpoints, user_params: ValidUserParams) {
+ type UsageElementReturnType = UsageResponse[number];
+
+ let { page, ...query_params } = user_params;
+
+ if (!query_params.limit)
+ query_params.limit = 10;
+
+ if (!page)
+ page = 1;
+
+ let filters = "";
+ // Don't add `limit` and `block_range` to WHERE clause
+ for (const k of Object.keys(query_params).filter(k => k !== "limit")) {
+ const clickhouse_type = typeof query_params[k as keyof typeof query_params] === "number" ? "int" : "String";
+ filters += ` (${k} = {${k}: ${clickhouse_type}}) AND`;
+ }
+
+ filters = filters.substring(0, filters.lastIndexOf(' ')); // Remove last item ` AND`
+ if (filters.length)
+ filters = `WHERE ${filters}`;
+
+ let query = "";
+ let additional_query_params: AdditionalQueryParams = {};
+
+ // Parse block range for endpoints that uses it. Check for single value or two comma-separated values.
+ if (endpoint == "/blocks/{date}" || endpoint == "/blocks/{hash}" || endpoint == "/blocks/{number}") {
+ // NB: Using `account_balances` seems to return the most results
+ // Have to try with fully synced chain to compare with `create_events` and others
+ query += `SELECT * FROM blocks ${filters}`;
+ }
+
+ query += " LIMIT {limit: int}";
+ query += " OFFSET {offset: int}";
+
+ let query_results;
+ additional_query_params.offset = query_params.limit * (page - 1);
+ try {
+ query_results = await makeQuery(query, { ...query_params, ...additional_query_params });
+ } catch (err) {
+ return APIErrorResponse(ctx, 500, "bad_database_response", err);
+ }
+
+ // Always have a least one total page
+ const total_pages = Math.max(Math.ceil((query_results.rows_before_limit_at_least ?? 0) / query_params.limit), 1);
+
+ if (page > total_pages)
+ return APIErrorResponse(ctx, 400, "bad_query_input", `Requested page (${page}) exceeds total pages (${total_pages})`);
+
+ /* Solving the `data` type issue:
+ type A = string[] | number[]; // This is union of array types
+ type B = A[number][]; // This is array of elements of union type
+
+ let t: A;
+ let v: B;
+
+ t = v; // Error
+ */
+
+ return ctx.json, 200>({
+ // @ts-ignore
+ data: query_results.data,
+ meta: {
+ statistics: query_results.statistics ?? null,
+ next_page: (page * query_params.limit >= (query_results.rows_before_limit_at_least ?? 0)) ? page : page + 1,
+ previous_page: (page <= 1) ? page : page - 1,
+ total_pages,
+ total_results: query_results.rows_before_limit_at_least ?? 0
+ }
+ });
+}
+
diff --git a/src/utils.ts b/src/utils.ts
new file mode 100644
index 0000000..8ad6085
--- /dev/null
+++ b/src/utils.ts
@@ -0,0 +1,29 @@
+import { ZodError } from "zod";
+
+import type { Context } from "hono";
+import type { ApiErrorSchema } from "./types/zod.gen.js";
+import { logger } from "./logger.js";
+import * as prometheus from "./prometheus.js";
+
+export function APIErrorResponse(ctx: Context, status: ApiErrorSchema["status"], code: ApiErrorSchema["code"], err: unknown) {
+ let message = "An unexpected error occured";
+
+ if (typeof err === "string") {
+ message = err;
+ } else if (err instanceof ZodError) {
+ message = err.issues.map(issue => `[${issue.code}] ${issue.path.join('/')}: ${issue.message}`).join('\n');
+ } else if (err instanceof Error) {
+ message = err.message;
+ }
+
+ const api_error = {
+ status,
+ code,
+ message
+ };
+
+ logger.error(api_error);
+ prometheus.request_error.inc({ pathname: ctx.req.path, status });
+
+ return ctx.json(api_error, status);
+}
\ No newline at end of file
diff --git a/static/@openapi-to-graphql/graphql/schema.graphql b/static/@openapi-to-graphql/graphql/schema.graphql
new file mode 100644
index 0000000..916e645
--- /dev/null
+++ b/static/@openapi-to-graphql/graphql/schema.graphql
@@ -0,0 +1,235 @@
+type Query {
+ """
+ Token balances of an account.
+
+ Equivalent to GET /balance
+ """
+ balance(account: String!, contract: String, limit: Int, page: Int, symcode: String): Balance
+
+ """
+ Historical token balances of an account.
+
+ Equivalent to GET /balance/historical
+ """
+ balanceHistorical(account: String!, block_num: Int!, contract: String, limit: Int, page: Int, symcode: String): BalanceHistorical
+
+ """
+ Current head block for which data is available (can be lower than head block of the chain).
+
+ Equivalent to GET /head
+ """
+ head(limit: Int, page: Int): Head
+
+ """
+ Checks database connection.
+
+ Equivalent to GET /health
+ """
+ health: String
+
+ """
+ List of holders of a token.
+
+ Equivalent to GET /holders
+ """
+ holders(contract: String!, limit: Int, page: Int, symcode: String!): Holders
+
+ """
+ Prometheus metrics.
+
+ Equivalent to GET /metrics
+ """
+ metrics: String
+
+ """
+ Reflection endpoint to return OpenAPI JSON spec. Also used by Swagger to generate the frontpage.
+
+ Equivalent to GET /openapi
+ """
+ openapi: JSON
+
+ """
+ Total supply for a token.
+
+ Equivalent to GET /supply
+ """
+ supply(block_num: Int, contract: String!, issuer: String, limit: Int, page: Int, symcode: String!): Supply
+
+ """
+ List of available tokens.
+
+ Equivalent to GET /tokens
+ """
+ tokens(limit: Int, page: Int): Tokens
+
+ """
+ All transfers related to a token.
+
+ Equivalent to GET /transfers
+ """
+ transfers(block_range: [Int], contract: String!, limit: Int, page: Int, symcode: String!): Transfers
+
+ """
+ All transfers related to an account.
+
+ Equivalent to GET /transfers/account
+ """
+ transfersAccount(account: String!, block_range: [Int], contract: String, from: String, limit: Int, page: Int, symcode: String, to: String): TransfersAccount
+
+ """
+ Specific transfer related to a token.
+
+ Equivalent to GET /transfers/id
+ """
+ transfersId(limit: Int, page: Int, trx_id: String!): TransfersId
+
+ """
+ Api version and Git short commit hash.
+
+ Equivalent to GET /version
+ """
+ version: Version
+}
+
+type Balance {
+ data: [Balance2]!
+ meta: ResponseMetadata!
+}
+
+type Balance2 {
+ balance: Float!
+ contract: String!
+ last_updated_block: Int!
+ symcode: String!
+}
+
+type ResponseMetadata {
+ next_page: BigInt!
+ previous_page: BigInt!
+ statistics: Statistics!
+ total_pages: BigInt!
+ total_results: BigInt!
+}
+
+"""
+The `BigInt` scalar type represents non-fractional signed whole numeric values.
+"""
+scalar BigInt
+
+type Statistics {
+ bytes_read: BigInt!
+ elapsed: Float!
+ rows_read: BigInt!
+}
+
+type BalanceHistorical {
+ data: [BalanceChange]!
+ meta: ResponseMetadata!
+}
+
+type BalanceChange {
+ account: String!
+ action_index: Int!
+ amount: BigInt!
+ balance: String!
+ balance_delta: BigInt!
+ block_num: Int!
+ contract: String!
+ precision: Int!
+ symcode: String!
+ timestamp: String!
+ trx_id: String!
+ value: Float!
+}
+
+type Head {
+ data: [Data3ListItem]!
+ meta: ResponseMetadata!
+}
+
+type Data3ListItem {
+ block_id: String!
+ block_num: Int!
+}
+
+type Holders {
+ data: [Holder]!
+ meta: ResponseMetadata!
+}
+
+type Holder {
+ account: String!
+ balance: Float!
+}
+
+"""
+The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).
+"""
+scalar JSON @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf")
+
+type Supply {
+ data: [Supply2]!
+ meta: ResponseMetadata!
+}
+
+type Supply2 {
+ action_index: Int!
+ amount: BigInt!
+ block_num: Int!
+ contract: String!
+ issuer: String!
+ max_supply: String!
+ precision: Int!
+ supply: String!
+ supply_delta: BigInt!
+ symcode: String!
+ timestamp: String!
+ trx_id: String!
+ value: Float!
+}
+
+type Tokens {
+ data: [ModelsScope]!
+ meta: ResponseMetadata!
+}
+
+type ModelsScope {
+ contract: String!
+ symcode: String!
+}
+
+type Transfers {
+ data: [Transfer]!
+ meta: ResponseMetadata!
+}
+
+type Transfer {
+ action_index: Int!
+ amount: BigInt!
+ block_num: Int!
+ contract: String!
+ from: String!
+ memo: String!
+ precision: Int!
+ quantity: String!
+ symcode: String!
+ timestamp: String!
+ to: String!
+ trx_id: String!
+ value: Float!
+}
+
+type TransfersAccount {
+ data: [Transfer]!
+ meta: ResponseMetadata!
+}
+
+type TransfersId {
+ data: [Transfer]!
+ meta: ResponseMetadata!
+}
+
+type Version {
+ commit: String!
+ version: String!
+}
\ No newline at end of file
diff --git a/static/@typespec/openapi3/openapi.json b/static/@typespec/openapi3/openapi.json
new file mode 100644
index 0000000..160ec7f
--- /dev/null
+++ b/static/@typespec/openapi3/openapi.json
@@ -0,0 +1,665 @@
+{
+ "openapi": "3.0.0",
+ "info": {
+ "title": "Antelope Transactions Api",
+ "summary": "Transactions information from the Antelope blockchains, powered by Substreams",
+ "license": {
+ "name": "MIT",
+ "url": "https://github.com/pinax-network/antelope-transactions-api/blob/75db1c61477ac7ea5e56f775cefe54875af953d4/LICENSE"
+ },
+ "version": "0.1.0"
+ },
+ "tags": [
+ {
+ "name": "Usage"
+ },
+ {
+ "name": "Docs"
+ },
+ {
+ "name": "Monitoring"
+ }
+ ],
+ "paths": {
+ "/blocks/{date}": {
+ "get": {
+ "tags": [
+ "Usage"
+ ],
+ "operationId": "Usage_blocksDate",
+ "summary": "Blocks",
+ "description": "Block by date.",
+ "parameters": [
+ {
+ "name": "date",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "date"
+ }
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 10
+ }
+ },
+ {
+ "name": "page",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 1
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Array of blocks.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": [
+ "data",
+ "meta"
+ ],
+ "properties": {
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Models.Blocks"
+ }
+ },
+ "meta": {
+ "$ref": "#/components/schemas/ResponseMetadata"
+ }
+ }
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "ApiKeyAuth": []
+ }
+ ]
+ }
+ },
+ "/blocks/{hash}": {
+ "get": {
+ "tags": [
+ "Usage"
+ ],
+ "operationId": "Usage_blocksHash",
+ "summary": "Blocks",
+ "description": "Block by hash.",
+ "parameters": [
+ {
+ "name": "hash",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 10
+ }
+ },
+ {
+ "name": "page",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 1
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Array of blocks.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": [
+ "data",
+ "meta"
+ ],
+ "properties": {
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Models.Blocks"
+ }
+ },
+ "meta": {
+ "$ref": "#/components/schemas/ResponseMetadata"
+ }
+ }
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "ApiKeyAuth": []
+ }
+ ]
+ }
+ },
+ "/blocks/{number}": {
+ "get": {
+ "tags": [
+ "Usage"
+ ],
+ "operationId": "Usage_blocksNumber",
+ "summary": "Blocks",
+ "description": "Block by number.",
+ "parameters": [
+ {
+ "name": "number",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "integer",
+ "format": "uint64"
+ }
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 10
+ }
+ },
+ {
+ "name": "page",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 1
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Array of blocks.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": [
+ "data",
+ "meta"
+ ],
+ "properties": {
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Models.Blocks"
+ }
+ },
+ "meta": {
+ "$ref": "#/components/schemas/ResponseMetadata"
+ }
+ }
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "ApiKeyAuth": []
+ }
+ ]
+ }
+ },
+ "/health": {
+ "get": {
+ "tags": [
+ "Monitoring"
+ ],
+ "operationId": "Monitoring_health",
+ "summary": "Health check",
+ "description": "Checks database connection.",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "OK or ApiError.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/metrics": {
+ "get": {
+ "tags": [
+ "Monitoring"
+ ],
+ "operationId": "Monitoring_metrics",
+ "summary": "Prometheus metrics",
+ "description": "Prometheus metrics.",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "Metrics as text.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/openapi": {
+ "get": {
+ "tags": [
+ "Docs"
+ ],
+ "operationId": "Docs_openapi",
+ "summary": "OpenAPI JSON spec",
+ "description": "Reflection endpoint to return OpenAPI JSON spec. Also used by Swagger to generate the frontpage.",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "The OpenAPI JSON spec",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "additionalProperties": {}
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/version": {
+ "get": {
+ "tags": [
+ "Docs"
+ ],
+ "operationId": "Docs_version",
+ "summary": "Api version",
+ "description": "Api version and Git short commit hash.",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "The Api version and commit hash.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Version"
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "ApiError": {
+ "type": "object",
+ "required": [
+ "status",
+ "code",
+ "message"
+ ],
+ "properties": {
+ "status": {
+ "type": "number",
+ "enum": [
+ 500,
+ 504,
+ 400,
+ 401,
+ 403,
+ 404,
+ 405
+ ]
+ },
+ "code": {
+ "type": "string",
+ "enum": [
+ "bad_database_response",
+ "bad_header",
+ "missing_required_header",
+ "bad_query_input",
+ "database_timeout",
+ "forbidden",
+ "internal_server_error",
+ "method_not_allowed",
+ "route_not_found",
+ "unauthorized"
+ ]
+ },
+ "message": {
+ "type": "string"
+ }
+ }
+ },
+ "Models.Blocks": {
+ "type": "object",
+ "required": [
+ "block_time",
+ "block_number",
+ "block_hash",
+ "block_date",
+ "parent_hash",
+ "producer",
+ "confirmed",
+ "schedule_version",
+ "version",
+ "producer_signature",
+ "dpos_proposed_irreversible_blocknum",
+ "dpos_irreversible_blocknum",
+ "transaction_mroot",
+ "action_mroot",
+ "blockroot_merkle_node_count",
+ "size",
+ "total_transactions",
+ "successful_transactions",
+ "failed_transactions",
+ "total_actions",
+ "total_db_ops"
+ ],
+ "properties": {
+ "block_time": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "block_number": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "block_hash": {
+ "type": "string"
+ },
+ "block_date": {
+ "type": "string",
+ "format": "date"
+ },
+ "parent_hash": {
+ "type": "string"
+ },
+ "producer": {
+ "type": "string"
+ },
+ "confirmed": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "schedule_version": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "version": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "producer_signature": {
+ "type": "string"
+ },
+ "dpos_proposed_irreversible_blocknum": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "dpos_irreversible_blocknum": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "transaction_mroot": {
+ "type": "string"
+ },
+ "action_mroot": {
+ "type": "string"
+ },
+ "blockroot_merkle_node_count": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "size": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "total_transactions": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "successful_transactions": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "failed_transactions": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "total_actions": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "total_db_ops": {
+ "type": "integer",
+ "format": "uint64"
+ }
+ }
+ },
+ "Pagination": {
+ "type": "object",
+ "required": [
+ "next_page",
+ "previous_page",
+ "total_pages",
+ "total_results"
+ ],
+ "properties": {
+ "next_page": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "previous_page": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "total_pages": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "total_results": {
+ "type": "integer",
+ "format": "int64"
+ }
+ }
+ },
+ "QueryStatistics": {
+ "type": "object",
+ "required": [
+ "elapsed",
+ "rows_read",
+ "bytes_read"
+ ],
+ "properties": {
+ "elapsed": {
+ "type": "number"
+ },
+ "rows_read": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "bytes_read": {
+ "type": "integer",
+ "format": "int64"
+ }
+ }
+ },
+ "ResponseMetadata": {
+ "type": "object",
+ "required": [
+ "statistics",
+ "next_page",
+ "previous_page",
+ "total_pages",
+ "total_results"
+ ],
+ "properties": {
+ "statistics": {
+ "type": "object",
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/QueryStatistics"
+ }
+ ],
+ "nullable": true
+ },
+ "next_page": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "previous_page": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "total_pages": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "total_results": {
+ "type": "integer",
+ "format": "int64"
+ }
+ }
+ },
+ "Version": {
+ "type": "object",
+ "required": [
+ "version",
+ "commit"
+ ],
+ "properties": {
+ "version": {
+ "type": "string",
+ "pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)$"
+ },
+ "commit": {
+ "type": "string",
+ "pattern": "^[0-9a-f]{7}$"
+ }
+ }
+ }
+ },
+ "securitySchemes": {
+ "ApiKeyAuth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": "X-Api-Key"
+ }
+ }
+ }
+}
diff --git a/static/@typespec/protobuf/antelope/eosio/token/v1.proto b/static/@typespec/protobuf/antelope/eosio/token/v1.proto
new file mode 100644
index 0000000..76f028a
--- /dev/null
+++ b/static/@typespec/protobuf/antelope/eosio/token/v1.proto
@@ -0,0 +1,38 @@
+// Generated by Microsoft TypeSpec
+
+syntax = "proto3";
+
+package antelope.eosio.token.v1;
+
+import "google/protobuf/timestamp.proto";
+
+message Transfer {
+ string trx_id = 1;
+ uint32 action_index = 2;
+ string contract = 3;
+ string symcode = 4;
+ uint32 precision = 9;
+ int64 amount = 10;
+ double value = 11;
+ uint64 block_num = 12;
+ google.protobuf.Timestamp timestamp = 13;
+ string from = 5;
+ string to = 6;
+ string quantity = 7;
+ string memo = 8;
+}
+
+message BalanceChange {
+ string trx_id = 1;
+ uint32 action_index = 2;
+ string contract = 3;
+ string symcode = 4;
+ uint32 precision = 8;
+ int64 amount = 9;
+ double value = 10;
+ uint64 block_num = 11;
+ google.protobuf.Timestamp timestamp = 12;
+ string account = 5;
+ string balance = 6;
+ int64 balance_delta = 7;
+}
diff --git a/static/README.md b/static/README.md
new file mode 100644
index 0000000..5a651bc
--- /dev/null
+++ b/static/README.md
@@ -0,0 +1,12 @@
+> [!CAUTION]
+>
+> Static files generated at build time. **DO NOT EDIT MANUALLY**.
+> Use `bun run types` to run the static file generation.
+
+### `@openapi-to-graphql`
+
+GraphQL schema generated with [`openapi-to-graphql-cli`](https://www.npmjs.com/package/openapi-to-graphql-cli) from the [`openapi.json`](@typespec/openapi3/openapi.json) generated by Typespec.
+
+### `@typespec`
+
+Protobuf definitions and OpenAPI schemas generated with Typespec.
\ No newline at end of file
diff --git a/swagger/favicon.ico b/swagger/favicon.ico
new file mode 100644
index 0000000..647590e
Binary files /dev/null and b/swagger/favicon.ico differ
diff --git a/swagger/index.html b/swagger/index.html
new file mode 100644
index 0000000..9267022
--- /dev/null
+++ b/swagger/index.html
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+ Substreams Antelope Token API - SwaggerUI
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/token_api_architecture_diagram.png b/token_api_architecture_diagram.png
new file mode 100644
index 0000000..2c7fcf8
Binary files /dev/null and b/token_api_architecture_diagram.png differ
diff --git a/tsconfig.json b/tsconfig.json
new file mode 100644
index 0000000..fbfdeeb
--- /dev/null
+++ b/tsconfig.json
@@ -0,0 +1,19 @@
+{
+ "compilerOptions": {
+ "target": "ESNext",
+ "module": "NodeNext",
+ "moduleResolution": "NodeNext",
+ "resolveJsonModule": true,
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "outDir": "./dist/",
+ "strict": true,
+ "noImplicitAny": true,
+ "strictNullChecks": true,
+ "alwaysStrict": true,
+ "skipLibCheck": true,
+ "noUncheckedIndexedAccess": true,
+ "types": ["bun-types"]
+ }
+}
diff --git a/tsp-output/@typespec/openapi3/openapi.json b/tsp-output/@typespec/openapi3/openapi.json
new file mode 100644
index 0000000..160ec7f
--- /dev/null
+++ b/tsp-output/@typespec/openapi3/openapi.json
@@ -0,0 +1,665 @@
+{
+ "openapi": "3.0.0",
+ "info": {
+ "title": "Antelope Transactions Api",
+ "summary": "Transactions information from the Antelope blockchains, powered by Substreams",
+ "license": {
+ "name": "MIT",
+ "url": "https://github.com/pinax-network/antelope-transactions-api/blob/75db1c61477ac7ea5e56f775cefe54875af953d4/LICENSE"
+ },
+ "version": "0.1.0"
+ },
+ "tags": [
+ {
+ "name": "Usage"
+ },
+ {
+ "name": "Docs"
+ },
+ {
+ "name": "Monitoring"
+ }
+ ],
+ "paths": {
+ "/blocks/{date}": {
+ "get": {
+ "tags": [
+ "Usage"
+ ],
+ "operationId": "Usage_blocksDate",
+ "summary": "Blocks",
+ "description": "Block by date.",
+ "parameters": [
+ {
+ "name": "date",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "date"
+ }
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 10
+ }
+ },
+ {
+ "name": "page",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 1
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Array of blocks.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": [
+ "data",
+ "meta"
+ ],
+ "properties": {
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Models.Blocks"
+ }
+ },
+ "meta": {
+ "$ref": "#/components/schemas/ResponseMetadata"
+ }
+ }
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "ApiKeyAuth": []
+ }
+ ]
+ }
+ },
+ "/blocks/{hash}": {
+ "get": {
+ "tags": [
+ "Usage"
+ ],
+ "operationId": "Usage_blocksHash",
+ "summary": "Blocks",
+ "description": "Block by hash.",
+ "parameters": [
+ {
+ "name": "hash",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 10
+ }
+ },
+ {
+ "name": "page",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 1
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Array of blocks.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": [
+ "data",
+ "meta"
+ ],
+ "properties": {
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Models.Blocks"
+ }
+ },
+ "meta": {
+ "$ref": "#/components/schemas/ResponseMetadata"
+ }
+ }
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "ApiKeyAuth": []
+ }
+ ]
+ }
+ },
+ "/blocks/{number}": {
+ "get": {
+ "tags": [
+ "Usage"
+ ],
+ "operationId": "Usage_blocksNumber",
+ "summary": "Blocks",
+ "description": "Block by number.",
+ "parameters": [
+ {
+ "name": "number",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "integer",
+ "format": "uint64"
+ }
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 10
+ }
+ },
+ {
+ "name": "page",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "format": "uint64",
+ "default": 1
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Array of blocks.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": [
+ "data",
+ "meta"
+ ],
+ "properties": {
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Models.Blocks"
+ }
+ },
+ "meta": {
+ "$ref": "#/components/schemas/ResponseMetadata"
+ }
+ }
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "ApiKeyAuth": []
+ }
+ ]
+ }
+ },
+ "/health": {
+ "get": {
+ "tags": [
+ "Monitoring"
+ ],
+ "operationId": "Monitoring_health",
+ "summary": "Health check",
+ "description": "Checks database connection.",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "OK or ApiError.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/metrics": {
+ "get": {
+ "tags": [
+ "Monitoring"
+ ],
+ "operationId": "Monitoring_metrics",
+ "summary": "Prometheus metrics",
+ "description": "Prometheus metrics.",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "Metrics as text.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/openapi": {
+ "get": {
+ "tags": [
+ "Docs"
+ ],
+ "operationId": "Docs_openapi",
+ "summary": "OpenAPI JSON spec",
+ "description": "Reflection endpoint to return OpenAPI JSON spec. Also used by Swagger to generate the frontpage.",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "The OpenAPI JSON spec",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "additionalProperties": {}
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/version": {
+ "get": {
+ "tags": [
+ "Docs"
+ ],
+ "operationId": "Docs_version",
+ "summary": "Api version",
+ "description": "Api version and Git short commit hash.",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "The Api version and commit hash.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Version"
+ }
+ }
+ }
+ },
+ "default": {
+ "description": "An unexpected error response.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ApiError"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "ApiError": {
+ "type": "object",
+ "required": [
+ "status",
+ "code",
+ "message"
+ ],
+ "properties": {
+ "status": {
+ "type": "number",
+ "enum": [
+ 500,
+ 504,
+ 400,
+ 401,
+ 403,
+ 404,
+ 405
+ ]
+ },
+ "code": {
+ "type": "string",
+ "enum": [
+ "bad_database_response",
+ "bad_header",
+ "missing_required_header",
+ "bad_query_input",
+ "database_timeout",
+ "forbidden",
+ "internal_server_error",
+ "method_not_allowed",
+ "route_not_found",
+ "unauthorized"
+ ]
+ },
+ "message": {
+ "type": "string"
+ }
+ }
+ },
+ "Models.Blocks": {
+ "type": "object",
+ "required": [
+ "block_time",
+ "block_number",
+ "block_hash",
+ "block_date",
+ "parent_hash",
+ "producer",
+ "confirmed",
+ "schedule_version",
+ "version",
+ "producer_signature",
+ "dpos_proposed_irreversible_blocknum",
+ "dpos_irreversible_blocknum",
+ "transaction_mroot",
+ "action_mroot",
+ "blockroot_merkle_node_count",
+ "size",
+ "total_transactions",
+ "successful_transactions",
+ "failed_transactions",
+ "total_actions",
+ "total_db_ops"
+ ],
+ "properties": {
+ "block_time": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "block_number": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "block_hash": {
+ "type": "string"
+ },
+ "block_date": {
+ "type": "string",
+ "format": "date"
+ },
+ "parent_hash": {
+ "type": "string"
+ },
+ "producer": {
+ "type": "string"
+ },
+ "confirmed": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "schedule_version": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "version": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "producer_signature": {
+ "type": "string"
+ },
+ "dpos_proposed_irreversible_blocknum": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "dpos_irreversible_blocknum": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "transaction_mroot": {
+ "type": "string"
+ },
+ "action_mroot": {
+ "type": "string"
+ },
+ "blockroot_merkle_node_count": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "size": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "total_transactions": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "successful_transactions": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "failed_transactions": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "total_actions": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "total_db_ops": {
+ "type": "integer",
+ "format": "uint64"
+ }
+ }
+ },
+ "Pagination": {
+ "type": "object",
+ "required": [
+ "next_page",
+ "previous_page",
+ "total_pages",
+ "total_results"
+ ],
+ "properties": {
+ "next_page": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "previous_page": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "total_pages": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "total_results": {
+ "type": "integer",
+ "format": "int64"
+ }
+ }
+ },
+ "QueryStatistics": {
+ "type": "object",
+ "required": [
+ "elapsed",
+ "rows_read",
+ "bytes_read"
+ ],
+ "properties": {
+ "elapsed": {
+ "type": "number"
+ },
+ "rows_read": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "bytes_read": {
+ "type": "integer",
+ "format": "int64"
+ }
+ }
+ },
+ "ResponseMetadata": {
+ "type": "object",
+ "required": [
+ "statistics",
+ "next_page",
+ "previous_page",
+ "total_pages",
+ "total_results"
+ ],
+ "properties": {
+ "statistics": {
+ "type": "object",
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/QueryStatistics"
+ }
+ ],
+ "nullable": true
+ },
+ "next_page": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "previous_page": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "total_pages": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "total_results": {
+ "type": "integer",
+ "format": "int64"
+ }
+ }
+ },
+ "Version": {
+ "type": "object",
+ "required": [
+ "version",
+ "commit"
+ ],
+ "properties": {
+ "version": {
+ "type": "string",
+ "pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)$"
+ },
+ "commit": {
+ "type": "string",
+ "pattern": "^[0-9a-f]{7}$"
+ }
+ }
+ }
+ },
+ "securitySchemes": {
+ "ApiKeyAuth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": "X-Api-Key"
+ }
+ }
+ }
+}
diff --git a/tsp-output/@typespec/protobuf/antelope/eosio/token/v1.proto b/tsp-output/@typespec/protobuf/antelope/eosio/token/v1.proto
new file mode 100644
index 0000000..76f028a
--- /dev/null
+++ b/tsp-output/@typespec/protobuf/antelope/eosio/token/v1.proto
@@ -0,0 +1,38 @@
+// Generated by Microsoft TypeSpec
+
+syntax = "proto3";
+
+package antelope.eosio.token.v1;
+
+import "google/protobuf/timestamp.proto";
+
+message Transfer {
+ string trx_id = 1;
+ uint32 action_index = 2;
+ string contract = 3;
+ string symcode = 4;
+ uint32 precision = 9;
+ int64 amount = 10;
+ double value = 11;
+ uint64 block_num = 12;
+ google.protobuf.Timestamp timestamp = 13;
+ string from = 5;
+ string to = 6;
+ string quantity = 7;
+ string memo = 8;
+}
+
+message BalanceChange {
+ string trx_id = 1;
+ uint32 action_index = 2;
+ string contract = 3;
+ string symcode = 4;
+ uint32 precision = 8;
+ int64 amount = 9;
+ double value = 10;
+ uint64 block_num = 11;
+ google.protobuf.Timestamp timestamp = 12;
+ string account = 5;
+ string balance = 6;
+ int64 balance_delta = 7;
+}
diff --git a/tspconfig.yaml b/tspconfig.yaml
new file mode 100644
index 0000000..c669899
--- /dev/null
+++ b/tspconfig.yaml
@@ -0,0 +1,25 @@
+# Typespec compiler configuration file
+# See https://typespec.io/docs/handbook/configuration
+
+# extends: ../tspconfig.yaml # Extend another config file
+# emit: # Emitter name
+# - ":
+# "": ""
+# environment-variables: # Environment variables which can be used to interpolate emitter options
+# :
+# default: ""
+# parameters: # Parameters which can be used to interpolate emitter options
+# :
+# default: ""
+# trace: # Trace areas to enable tracing
+# - ""
+# warn-as-error: true # Treat warnings as errors
+# output-dir: "{project-root}/_generated" # Configure the base output directory for all emitters
+warn-as-error: true
+emit:
+ - "@typespec/openapi3"
+options:
+ "@typespec/openapi3":
+ "file-type": "json"